repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
BedrockDev/Sunrin2017 | Software/Web Programming/Project01/test02.py | 1 | 1146 | # Chapter 3
def getadditionalworkhour(work_hour):
if work_hour >= 40:
return float(work_hour + (workHour - 40) * 1.5)
else:
return work_hour
print "Exercise 3.1"
workHour = float(input("Enter hours : "))
workRate = float(input("Enter rate : "))
pay = getadditionalworkhour(workHour) * workRate
print "Pay : " + str(pay)
print "Exercise 3.2"
try:
workRate = input("Enter rates : ")
except:
print "Error, you are a retard, please enter numeric input"
workRate = input("Enter rates : ")
try:
workHour = input("Enter hours : ")
except:
print "Error, you are a retard, please enter numeric input"
workHour = input("Enter hours : ")
pay = workHour * workRate
print "Pay : " + pay
print "Exercise 3.3"
print "Score | Grade"
print ">= 0.9 A"
print ">= 0.8 B"
print ">= 0.7 C"
print ">= 0.6 D"
print "< 0.6 E"
try:
score = input("Enter score : ")
if score >= 0.9:
print "A"
elif score >= 0.8:
print "B"
elif score >= 0.7:
print "C"
elif score >= 0.6:
print "D"
else:
print "E"
except:
print "Bad score"
| mit |
sahutd/youtube-dl | youtube_dl/extractor/nationalgeographic.py | 18 | 1419 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
smuggle_url,
url_basename,
)
class NationalGeographicIE(InfoExtractor):
_VALID_URL = r'http://video\.nationalgeographic\.com/video/.*?'
_TEST = {
'url': 'http://video.nationalgeographic.com/video/news/150210-news-crab-mating-vin?source=featuredvideo',
'info_dict': {
'id': '4DmDACA6Qtk_',
'ext': 'flv',
'title': 'Mating Crabs Busted by Sharks',
'description': 'md5:16f25aeffdeba55aaa8ec37e093ad8b3',
},
'add_ie': ['ThePlatform'],
}
def _real_extract(self, url):
name = url_basename(url)
webpage = self._download_webpage(url, name)
feed_url = self._search_regex(r'data-feed-url="([^"]+)"', webpage, 'feed url')
guid = self._search_regex(r'data-video-guid="([^"]+)"', webpage, 'guid')
feed = self._download_xml('%s?byGuid=%s' % (feed_url, guid), name)
content = feed.find('.//{http://search.yahoo.com/mrss/}content')
theplatform_id = url_basename(content.attrib.get('url'))
return self.url_result(smuggle_url(
'http://link.theplatform.com/s/ngs/%s?format=SMIL&formats=MPEG4&manifest=f4m' % theplatform_id,
# For some reason, the normal links don't work and we must force the use of f4m
{'force_smil_url': True}))
| unlicense |
ryfeus/lambda-packs | Sklearn_scipy_numpy/source/setuptools/ssl_support.py | 86 | 8131 | import os
import socket
import atexit
import re
from setuptools.extern.six.moves import urllib, http_client, map
import pkg_resources
from pkg_resources import ResolutionError, ExtractionError
try:
import ssl
except ImportError:
ssl = None
__all__ = [
'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
'opener_for'
]
cert_paths = """
/etc/pki/tls/certs/ca-bundle.crt
/etc/ssl/certs/ca-certificates.crt
/usr/share/ssl/certs/ca-bundle.crt
/usr/local/share/certs/ca-root.crt
/etc/ssl/cert.pem
/System/Library/OpenSSL/certs/cert.pem
/usr/local/share/certs/ca-root-nss.crt
/etc/ssl/ca-bundle.pem
""".strip().split()
try:
HTTPSHandler = urllib.request.HTTPSHandler
HTTPSConnection = http_client.HTTPSConnection
except AttributeError:
HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
try:
from ssl import CertificateError, match_hostname
except ImportError:
try:
from backports.ssl_match_hostname import CertificateError
from backports.ssl_match_hostname import match_hostname
except ImportError:
CertificateError = None
match_hostname = None
if not CertificateError:
class CertificateError(ValueError):
pass
if not match_hostname:
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
class VerifyingHTTPSHandler(HTTPSHandler):
"""Simple verifying handler: no auth, subclasses, timeouts, etc."""
def __init__(self, ca_bundle):
self.ca_bundle = ca_bundle
HTTPSHandler.__init__(self)
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
)
class VerifyingHTTPSConn(HTTPSConnection):
"""Simple verifying connection: no auth, subclasses, timeouts, etc."""
def __init__(self, host, ca_bundle, **kw):
HTTPSConnection.__init__(self, host, **kw)
self.ca_bundle = ca_bundle
def connect(self):
sock = socket.create_connection(
(self.host, self.port), getattr(self, 'source_address', None)
)
# Handle the socket if a (proxy) tunnel is present
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
# change self.host to mean the proxy server host when tunneling is
# being used. Adapt, since we are interested in the destination
# host for the match_hostname() comparison.
actual_host = self._tunnel_host
else:
actual_host = self.host
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), actual_host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def opener_for(ca_bundle=None):
"""Get a urlopen() replacement that uses ca_bundle for verification"""
return urllib.request.build_opener(
VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
).open
_wincerts = None
def get_win_certfile():
global _wincerts
if _wincerts is not None:
return _wincerts.name
try:
from wincertstore import CertFile
except ImportError:
return None
class MyCertFile(CertFile):
def __init__(self, stores=(), certs=()):
CertFile.__init__(self)
for store in stores:
self.addstore(store)
self.addcerts(certs)
atexit.register(self.close)
def close(self):
try:
super(MyCertFile, self).close()
except OSError:
pass
_wincerts = MyCertFile(stores=['CA', 'ROOT'])
return _wincerts.name
def find_ca_bundle():
"""Return an existing CA bundle path, or None"""
if os.name == 'nt':
return get_win_certfile()
else:
for cert_path in cert_paths:
if os.path.isfile(cert_path):
return cert_path
try:
import certifi
return certifi.where()
except (ImportError, ResolutionError, ExtractionError):
return None
| mit |
scs/uclinux | user/python/python-2.4.4/Lib/whrandom.py | 12 | 4910 | """Wichman-Hill random number generator.
Wichmann, B. A. & Hill, I. D. (1982)
Algorithm AS 183:
An efficient and portable pseudo-random number generator
Applied Statistics 31 (1982) 188-190
see also:
Correction to Algorithm AS 183
Applied Statistics 33 (1984) 123
McLeod, A. I. (1985)
A remark on Algorithm AS 183
Applied Statistics 34 (1985),198-200
USE:
whrandom.random() yields double precision random numbers
uniformly distributed between 0 and 1.
whrandom.seed(x, y, z) must be called before whrandom.random()
to seed the generator
There is also an interface to create multiple independent
random generators, and to choose from other ranges.
Multi-threading note: the random number generator used here is not
thread-safe; it is possible that nearly simultaneous calls in
different theads return the same random value. To avoid this, you
have to use a lock around all calls. (I didn't want to slow this
down in the serial case by using a lock here.)
"""
import warnings
warnings.warn("the whrandom module is deprecated; please use the random module",
DeprecationWarning)
# Translated by Guido van Rossum from C source provided by
# Adrian Baddeley.
class whrandom:
def __init__(self, x = 0, y = 0, z = 0):
"""Initialize an instance.
Without arguments, initialize from current time.
With arguments (x, y, z), initialize from them."""
self.seed(x, y, z)
def seed(self, x = 0, y = 0, z = 0):
"""Set the seed from (x, y, z).
These must be integers in the range [0, 256)."""
if not type(x) == type(y) == type(z) == type(0):
raise TypeError, 'seeds must be integers'
if not (0 <= x < 256 and 0 <= y < 256 and 0 <= z < 256):
raise ValueError, 'seeds must be in range(0, 256)'
if 0 == x == y == z:
# Initialize from current time
import time
t = long(time.time() * 256)
t = int((t&0xffffff) ^ (t>>24))
t, x = divmod(t, 256)
t, y = divmod(t, 256)
t, z = divmod(t, 256)
# Zero is a poor seed, so substitute 1
self._seed = (x or 1, y or 1, z or 1)
def random(self):
"""Get the next random number in the range [0.0, 1.0)."""
# This part is thread-unsafe:
# BEGIN CRITICAL SECTION
x, y, z = self._seed
#
x = (171 * x) % 30269
y = (172 * y) % 30307
z = (170 * z) % 30323
#
self._seed = x, y, z
# END CRITICAL SECTION
#
return (x/30269.0 + y/30307.0 + z/30323.0) % 1.0
def uniform(self, a, b):
"""Get a random number in the range [a, b)."""
return a + (b-a) * self.random()
def randint(self, a, b):
"""Get a random integer in the range [a, b] including
both end points.
(Deprecated; use randrange below.)"""
return self.randrange(a, b+1)
def choice(self, seq):
"""Choose a random element from a non-empty sequence."""
return seq[int(self.random() * len(seq))]
def randrange(self, start, stop=None, step=1, int=int, default=None):
"""Choose a random item from range(start, stop[, step]).
This fixes the problem with randint() which includes the
endpoint; in Python this is usually not what you want.
Do not supply the 'int' and 'default' arguments."""
# This code is a bit messy to make it fast for the
# common case while still doing adequate error checking
istart = int(start)
if istart != start:
raise ValueError, "non-integer arg 1 for randrange()"
if stop is default:
if istart > 0:
return int(self.random() * istart)
raise ValueError, "empty range for randrange()"
istop = int(stop)
if istop != stop:
raise ValueError, "non-integer stop for randrange()"
if step == 1:
if istart < istop:
return istart + int(self.random() *
(istop - istart))
raise ValueError, "empty range for randrange()"
istep = int(step)
if istep != step:
raise ValueError, "non-integer step for randrange()"
if istep > 0:
n = (istop - istart + istep - 1) / istep
elif istep < 0:
n = (istop - istart + istep + 1) / istep
else:
raise ValueError, "zero step for randrange()"
if n <= 0:
raise ValueError, "empty range for randrange()"
return istart + istep*int(self.random() * n)
# Initialize from the current time
_inst = whrandom()
seed = _inst.seed
random = _inst.random
uniform = _inst.uniform
randint = _inst.randint
choice = _inst.choice
randrange = _inst.randrange
| gpl-2.0 |
morissette/devopsdays-hackathon-2016 | venv/lib/python2.7/site-packages/alembic/operations/ops.py | 9 | 72690 | from .. import util
from ..util import sqla_compat
from . import schemaobj
from sqlalchemy.types import NULLTYPE
from .base import Operations, BatchOperations
import re
class MigrateOperation(object):
"""base class for migration command and organization objects.
This system is part of the operation extensibility API.
.. versionadded:: 0.8.0
.. seealso::
:ref:`operation_objects`
:ref:`operation_plugins`
:ref:`customizing_revision`
"""
@util.memoized_property
def info(self):
"""A dictionary that may be used to store arbitrary information
along with this :class:`.MigrateOperation` object.
"""
return {}
class AddConstraintOp(MigrateOperation):
"""Represent an add constraint operation."""
@property
def constraint_type(self):
raise NotImplementedError()
@classmethod
def from_constraint(cls, constraint):
funcs = {
"unique_constraint": CreateUniqueConstraintOp.from_constraint,
"foreign_key_constraint": CreateForeignKeyOp.from_constraint,
"primary_key_constraint": CreatePrimaryKeyOp.from_constraint,
"check_constraint": CreateCheckConstraintOp.from_constraint,
"column_check_constraint": CreateCheckConstraintOp.from_constraint,
}
return funcs[constraint.__visit_name__](constraint)
def reverse(self):
return DropConstraintOp.from_constraint(self.to_constraint())
def to_diff_tuple(self):
return ("add_constraint", self.to_constraint())
@Operations.register_operation("drop_constraint")
@BatchOperations.register_operation("drop_constraint", "batch_drop_constraint")
class DropConstraintOp(MigrateOperation):
"""Represent a drop constraint operation."""
def __init__(
self,
constraint_name, table_name, type_=None, schema=None,
_orig_constraint=None):
self.constraint_name = constraint_name
self.table_name = table_name
self.constraint_type = type_
self.schema = schema
self._orig_constraint = _orig_constraint
def reverse(self):
if self._orig_constraint is None:
raise ValueError(
"operation is not reversible; "
"original constraint is not present")
return AddConstraintOp.from_constraint(self._orig_constraint)
def to_diff_tuple(self):
if self.constraint_type == "foreignkey":
return ("remove_fk", self.to_constraint())
else:
return ("remove_constraint", self.to_constraint())
@classmethod
def from_constraint(cls, constraint):
types = {
"unique_constraint": "unique",
"foreign_key_constraint": "foreignkey",
"primary_key_constraint": "primary",
"check_constraint": "check",
"column_check_constraint": "check",
}
constraint_table = sqla_compat._table_for_constraint(constraint)
return cls(
constraint.name,
constraint_table.name,
schema=constraint_table.schema,
type_=types[constraint.__visit_name__],
_orig_constraint=constraint
)
def to_constraint(self):
if self._orig_constraint is not None:
return self._orig_constraint
else:
raise ValueError(
"constraint cannot be produced; "
"original constraint is not present")
@classmethod
@util._with_legacy_names([
("type", "type_"),
("name", "constraint_name"),
])
def drop_constraint(
cls, operations, constraint_name, table_name,
type_=None, schema=None):
"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
:param constraint_name: name of the constraint.
:param table_name: table name.
:param type_: optional, required on MySQL. can be
'foreignkey', 'primary', 'unique', or 'check'.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
"""
op = cls(constraint_name, table_name, type_=type_, schema=schema)
return operations.invoke(op)
@classmethod
def batch_drop_constraint(cls, operations, constraint_name, type_=None):
"""Issue a "drop constraint" instruction using the
current batch migration context.
The batch form of this call omits the ``table_name`` and ``schema``
arguments from the call.
.. seealso::
:meth:`.Operations.drop_constraint`
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
"""
op = cls(
constraint_name, operations.impl.table_name,
type_=type_, schema=operations.impl.schema
)
return operations.invoke(op)
@Operations.register_operation("create_primary_key")
@BatchOperations.register_operation(
"create_primary_key", "batch_create_primary_key")
class CreatePrimaryKeyOp(AddConstraintOp):
"""Represent a create primary key operation."""
constraint_type = "primarykey"
def __init__(
self, constraint_name, table_name, columns,
schema=None, _orig_constraint=None, **kw):
self.constraint_name = constraint_name
self.table_name = table_name
self.columns = columns
self.schema = schema
self._orig_constraint = _orig_constraint
self.kw = kw
@classmethod
def from_constraint(cls, constraint):
constraint_table = sqla_compat._table_for_constraint(constraint)
return cls(
constraint.name,
constraint_table.name,
constraint.columns,
schema=constraint_table.schema,
_orig_constraint=constraint
)
def to_constraint(self, migration_context=None):
if self._orig_constraint is not None:
return self._orig_constraint
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.primary_key_constraint(
self.constraint_name, self.table_name,
self.columns, schema=self.schema)
@classmethod
@util._with_legacy_names([
('name', 'constraint_name'),
('cols', 'columns')
])
def create_primary_key(
cls, operations,
constraint_name, table_name, columns, schema=None):
"""Issue a "create primary key" instruction using the current
migration context.
e.g.::
from alembic import op
op.create_primary_key(
"pk_my_table", "my_table",
["id", "version"]
)
This internally generates a :class:`~sqlalchemy.schema.Table` object
containing the necessary columns, then generates a new
:class:`~sqlalchemy.schema.PrimaryKeyConstraint`
object which it then associates with the
:class:`~sqlalchemy.schema.Table`.
Any event listeners associated with this action will be fired
off normally. The :class:`~sqlalchemy.schema.AddConstraint`
construct is ultimately used to generate the ALTER statement.
:param name: Name of the primary key constraint. The name is necessary
so that an ALTER statement can be emitted. For setups that
use an automated naming scheme such as that described at
:ref:`sqla:constraint_naming_conventions`
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
:param table_name: String name of the target table.
:param columns: a list of string column names to be applied to the
primary key constraint.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
* cols -> columns
"""
op = cls(constraint_name, table_name, columns, schema)
return operations.invoke(op)
@classmethod
def batch_create_primary_key(cls, operations, constraint_name, columns):
"""Issue a "create primary key" instruction using the
current batch migration context.
The batch form of this call omits the ``table_name`` and ``schema``
arguments from the call.
.. seealso::
:meth:`.Operations.create_primary_key`
"""
op = cls(
constraint_name, operations.impl.table_name, columns,
schema=operations.impl.schema
)
return operations.invoke(op)
@Operations.register_operation("create_unique_constraint")
@BatchOperations.register_operation(
"create_unique_constraint", "batch_create_unique_constraint")
class CreateUniqueConstraintOp(AddConstraintOp):
"""Represent a create unique constraint operation."""
constraint_type = "unique"
def __init__(
self, constraint_name, table_name,
columns, schema=None, _orig_constraint=None, **kw):
self.constraint_name = constraint_name
self.table_name = table_name
self.columns = columns
self.schema = schema
self._orig_constraint = _orig_constraint
self.kw = kw
@classmethod
def from_constraint(cls, constraint):
constraint_table = sqla_compat._table_for_constraint(constraint)
kw = {}
if constraint.deferrable:
kw['deferrable'] = constraint.deferrable
if constraint.initially:
kw['initially'] = constraint.initially
return cls(
constraint.name,
constraint_table.name,
[c.name for c in constraint.columns],
schema=constraint_table.schema,
_orig_constraint=constraint,
**kw
)
def to_constraint(self, migration_context=None):
if self._orig_constraint is not None:
return self._orig_constraint
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.unique_constraint(
self.constraint_name, self.table_name, self.columns,
schema=self.schema, **self.kw)
@classmethod
@util._with_legacy_names([
('name', 'constraint_name'),
('source', 'table_name'),
('local_cols', 'columns'),
])
def create_unique_constraint(
cls, operations, constraint_name, table_name, columns,
schema=None, **kw):
"""Issue a "create unique constraint" instruction using the
current migration context.
e.g.::
from alembic import op
op.create_unique_constraint("uq_user_name", "user", ["name"])
This internally generates a :class:`~sqlalchemy.schema.Table` object
containing the necessary columns, then generates a new
:class:`~sqlalchemy.schema.UniqueConstraint`
object which it then associates with the
:class:`~sqlalchemy.schema.Table`.
Any event listeners associated with this action will be fired
off normally. The :class:`~sqlalchemy.schema.AddConstraint`
construct is ultimately used to generate the ALTER statement.
:param name: Name of the unique constraint. The name is necessary
so that an ALTER statement can be emitted. For setups that
use an automated naming scheme such as that described at
:ref:`sqla:constraint_naming_conventions`,
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
:param table_name: String name of the source table.
:param columns: a list of string column names in the
source table.
:param deferrable: optional bool. If set, emit DEFERRABLE or
NOT DEFERRABLE when issuing DDL for this constraint.
:param initially: optional string. If set, emit INITIALLY <value>
when issuing DDL for this constraint.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
* source -> table_name
* local_cols -> columns
"""
op = cls(
constraint_name, table_name, columns,
schema=schema, **kw
)
return operations.invoke(op)
@classmethod
@util._with_legacy_names([('name', 'constraint_name')])
def batch_create_unique_constraint(
cls, operations, constraint_name, columns, **kw):
"""Issue a "create unique constraint" instruction using the
current batch migration context.
The batch form of this call omits the ``source`` and ``schema``
arguments from the call.
.. seealso::
:meth:`.Operations.create_unique_constraint`
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
"""
kw['schema'] = operations.impl.schema
op = cls(
constraint_name, operations.impl.table_name, columns,
**kw
)
return operations.invoke(op)
@Operations.register_operation("create_foreign_key")
@BatchOperations.register_operation(
"create_foreign_key", "batch_create_foreign_key")
class CreateForeignKeyOp(AddConstraintOp):
"""Represent a create foreign key constraint operation."""
constraint_type = "foreignkey"
def __init__(
self, constraint_name, source_table, referent_table, local_cols,
remote_cols, _orig_constraint=None, **kw):
self.constraint_name = constraint_name
self.source_table = source_table
self.referent_table = referent_table
self.local_cols = local_cols
self.remote_cols = remote_cols
self._orig_constraint = _orig_constraint
self.kw = kw
def to_diff_tuple(self):
return ("add_fk", self.to_constraint())
@classmethod
def from_constraint(cls, constraint):
kw = {}
if constraint.onupdate:
kw['onupdate'] = constraint.onupdate
if constraint.ondelete:
kw['ondelete'] = constraint.ondelete
if constraint.initially:
kw['initially'] = constraint.initially
if constraint.deferrable:
kw['deferrable'] = constraint.deferrable
if constraint.use_alter:
kw['use_alter'] = constraint.use_alter
source_schema, source_table, \
source_columns, target_schema, \
target_table, target_columns,\
onupdate, ondelete, deferrable, initially \
= sqla_compat._fk_spec(constraint)
kw['source_schema'] = source_schema
kw['referent_schema'] = target_schema
return cls(
constraint.name,
source_table,
target_table,
source_columns,
target_columns,
_orig_constraint=constraint,
**kw
)
def to_constraint(self, migration_context=None):
if self._orig_constraint is not None:
return self._orig_constraint
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.foreign_key_constraint(
self.constraint_name,
self.source_table, self.referent_table,
self.local_cols, self.remote_cols,
**self.kw)
@classmethod
@util._with_legacy_names([
('name', 'constraint_name'),
('source', 'source_table'),
('referent', 'referent_table'),
])
def create_foreign_key(cls, operations, constraint_name,
source_table, referent_table, local_cols,
remote_cols, onupdate=None, ondelete=None,
deferrable=None, initially=None, match=None,
source_schema=None, referent_schema=None,
**dialect_kw):
"""Issue a "create foreign key" instruction using the
current migration context.
e.g.::
from alembic import op
op.create_foreign_key(
"fk_user_address", "address",
"user", ["user_id"], ["id"])
This internally generates a :class:`~sqlalchemy.schema.Table` object
containing the necessary columns, then generates a new
:class:`~sqlalchemy.schema.ForeignKeyConstraint`
object which it then associates with the
:class:`~sqlalchemy.schema.Table`.
Any event listeners associated with this action will be fired
off normally. The :class:`~sqlalchemy.schema.AddConstraint`
construct is ultimately used to generate the ALTER statement.
:param name: Name of the foreign key constraint. The name is necessary
so that an ALTER statement can be emitted. For setups that
use an automated naming scheme such as that described at
:ref:`sqla:constraint_naming_conventions`,
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
:param source_table: String name of the source table.
:param referent_table: String name of the destination table.
:param local_cols: a list of string column names in the
source table.
:param remote_cols: a list of string column names in the
remote table.
:param onupdate: Optional string. If set, emit ON UPDATE <value> when
issuing DDL for this constraint. Typical values include CASCADE,
DELETE and RESTRICT.
:param ondelete: Optional string. If set, emit ON DELETE <value> when
issuing DDL for this constraint. Typical values include CASCADE,
DELETE and RESTRICT.
:param deferrable: optional bool. If set, emit DEFERRABLE or NOT
DEFERRABLE when issuing DDL for this constraint.
:param source_schema: Optional schema name of the source table.
:param referent_schema: Optional schema name of the destination table.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
* source -> source_table
* referent -> referent_table
"""
op = cls(
constraint_name,
source_table, referent_table,
local_cols, remote_cols,
onupdate=onupdate, ondelete=ondelete,
deferrable=deferrable,
source_schema=source_schema,
referent_schema=referent_schema,
initially=initially, match=match,
**dialect_kw
)
return operations.invoke(op)
@classmethod
@util._with_legacy_names([
('name', 'constraint_name'),
('referent', 'referent_table')
])
def batch_create_foreign_key(
cls, operations, constraint_name, referent_table,
local_cols, remote_cols,
referent_schema=None,
onupdate=None, ondelete=None,
deferrable=None, initially=None, match=None,
**dialect_kw):
"""Issue a "create foreign key" instruction using the
current batch migration context.
The batch form of this call omits the ``source`` and ``source_schema``
arguments from the call.
e.g.::
with batch_alter_table("address") as batch_op:
batch_op.create_foreign_key(
"fk_user_address",
"user", ["user_id"], ["id"])
.. seealso::
:meth:`.Operations.create_foreign_key`
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
* referent -> referent_table
"""
op = cls(
constraint_name,
operations.impl.table_name, referent_table,
local_cols, remote_cols,
onupdate=onupdate, ondelete=ondelete,
deferrable=deferrable,
source_schema=operations.impl.schema,
referent_schema=referent_schema,
initially=initially, match=match,
**dialect_kw
)
return operations.invoke(op)
@Operations.register_operation("create_check_constraint")
@BatchOperations.register_operation(
"create_check_constraint", "batch_create_check_constraint")
class CreateCheckConstraintOp(AddConstraintOp):
"""Represent a create check constraint operation."""
constraint_type = "check"
def __init__(
self, constraint_name, table_name,
condition, schema=None, _orig_constraint=None, **kw):
self.constraint_name = constraint_name
self.table_name = table_name
self.condition = condition
self.schema = schema
self._orig_constraint = _orig_constraint
self.kw = kw
@classmethod
def from_constraint(cls, constraint):
constraint_table = sqla_compat._table_for_constraint(constraint)
return cls(
constraint.name,
constraint_table.name,
constraint.sqltext,
schema=constraint_table.schema,
_orig_constraint=constraint
)
def to_constraint(self, migration_context=None):
if self._orig_constraint is not None:
return self._orig_constraint
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.check_constraint(
self.constraint_name, self.table_name,
self.condition, schema=self.schema, **self.kw)
@classmethod
@util._with_legacy_names([
('name', 'constraint_name'),
('source', 'table_name')
])
def create_check_constraint(
cls, operations,
constraint_name, table_name, condition,
schema=None, **kw):
"""Issue a "create check constraint" instruction using the
current migration context.
e.g.::
from alembic import op
from sqlalchemy.sql import column, func
op.create_check_constraint(
"ck_user_name_len",
"user",
func.len(column('name')) > 5
)
CHECK constraints are usually against a SQL expression, so ad-hoc
table metadata is usually needed. The function will convert the given
arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
to an anonymous table in order to emit the CREATE statement.
:param name: Name of the check constraint. The name is necessary
so that an ALTER statement can be emitted. For setups that
use an automated naming scheme such as that described at
:ref:`sqla:constraint_naming_conventions`,
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
:param table_name: String name of the source table.
:param condition: SQL expression that's the condition of the
constraint. Can be a string or SQLAlchemy expression language
structure.
:param deferrable: optional bool. If set, emit DEFERRABLE or
NOT DEFERRABLE when issuing DDL for this constraint.
:param initially: optional string. If set, emit INITIALLY <value>
when issuing DDL for this constraint.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
* source -> table_name
"""
op = cls(constraint_name, table_name, condition, schema=schema, **kw)
return operations.invoke(op)
@classmethod
@util._with_legacy_names([('name', 'constraint_name')])
def batch_create_check_constraint(
cls, operations, constraint_name, condition, **kw):
"""Issue a "create check constraint" instruction using the
current batch migration context.
The batch form of this call omits the ``source`` and ``schema``
arguments from the call.
.. seealso::
:meth:`.Operations.create_check_constraint`
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
"""
op = cls(
constraint_name, operations.impl.table_name,
condition, schema=operations.impl.schema, **kw)
return operations.invoke(op)
@Operations.register_operation("create_index")
@BatchOperations.register_operation("create_index", "batch_create_index")
class CreateIndexOp(MigrateOperation):
"""Represent a create index operation."""
def __init__(
self, index_name, table_name, columns, schema=None,
unique=False, _orig_index=None, **kw):
self.index_name = index_name
self.table_name = table_name
self.columns = columns
self.schema = schema
self.unique = unique
self.kw = kw
self._orig_index = _orig_index
def reverse(self):
return DropIndexOp.from_index(self.to_index())
def to_diff_tuple(self):
return ("add_index", self.to_index())
@classmethod
def from_index(cls, index):
return cls(
index.name,
index.table.name,
sqla_compat._get_index_expressions(index),
schema=index.table.schema,
unique=index.unique,
_orig_index=index,
**index.kwargs
)
def to_index(self, migration_context=None):
if self._orig_index:
return self._orig_index
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.index(
self.index_name, self.table_name, self.columns, schema=self.schema,
unique=self.unique, **self.kw)
@classmethod
@util._with_legacy_names([('name', 'index_name')])
def create_index(
cls, operations,
index_name, table_name, columns, schema=None,
unique=False, **kw):
"""Issue a "create index" instruction using the current
migration context.
e.g.::
from alembic import op
op.create_index('ik_test', 't1', ['foo', 'bar'])
Functional indexes can be produced by using the
:func:`sqlalchemy.sql.expression.text` construct::
from alembic import op
from sqlalchemy import text
op.create_index('ik_test', 't1', [text('lower(foo)')])
.. versionadded:: 0.6.7 support for making use of the
:func:`~sqlalchemy.sql.expression.text` construct in
conjunction with
:meth:`.Operations.create_index` in
order to produce functional expressions within CREATE INDEX.
:param index_name: name of the index.
:param table_name: name of the owning table.
:param columns: a list consisting of string column names and/or
:func:`~sqlalchemy.sql.expression.text` constructs.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
:param unique: If True, create a unique index.
:param quote:
Force quoting of this column's name on or off, corresponding
to ``True`` or ``False``. When left at its default
of ``None``, the column identifier will be quoted according to
whether the name is case sensitive (identifiers with at least one
upper case character are treated as case sensitive), or if it's a
reserved word. This flag is only needed to force quoting of a
reserved word which is not known by the SQLAlchemy dialect.
:param \**kw: Additional keyword arguments not mentioned above are
dialect specific, and passed in the form
``<dialectname>_<argname>``.
See the documentation regarding an individual dialect at
:ref:`dialect_toplevel` for detail on documented arguments.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> index_name
"""
op = cls(
index_name, table_name, columns, schema=schema,
unique=unique, **kw
)
return operations.invoke(op)
@classmethod
def batch_create_index(cls, operations, index_name, columns, **kw):
"""Issue a "create index" instruction using the
current batch migration context.
.. seealso::
:meth:`.Operations.create_index`
"""
op = cls(
index_name, operations.impl.table_name, columns,
schema=operations.impl.schema, **kw
)
return operations.invoke(op)
@Operations.register_operation("drop_index")
@BatchOperations.register_operation("drop_index", "batch_drop_index")
class DropIndexOp(MigrateOperation):
"""Represent a drop index operation."""
def __init__(
self, index_name, table_name=None, schema=None, _orig_index=None):
self.index_name = index_name
self.table_name = table_name
self.schema = schema
self._orig_index = _orig_index
def to_diff_tuple(self):
return ("remove_index", self.to_index())
def reverse(self):
if self._orig_index is None:
raise ValueError(
"operation is not reversible; "
"original index is not present")
return CreateIndexOp.from_index(self._orig_index)
@classmethod
def from_index(cls, index):
return cls(
index.name,
index.table.name,
schema=index.table.schema,
_orig_index=index
)
def to_index(self, migration_context=None):
if self._orig_index is not None:
return self._orig_index
schema_obj = schemaobj.SchemaObjects(migration_context)
# need a dummy column name here since SQLAlchemy
# 0.7.6 and further raises on Index with no columns
return schema_obj.index(
self.index_name, self.table_name, ['x'], schema=self.schema)
@classmethod
@util._with_legacy_names([
('name', 'index_name'),
('tablename', 'table_name')
])
def drop_index(cls, operations, index_name, table_name=None, schema=None):
"""Issue a "drop index" instruction using the current
migration context.
e.g.::
drop_index("accounts")
:param index_name: name of the index.
:param table_name: name of the owning table. Some
backends such as Microsoft SQL Server require this.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> index_name
"""
op = cls(index_name, table_name=table_name, schema=schema)
return operations.invoke(op)
@classmethod
@util._with_legacy_names([('name', 'index_name')])
def batch_drop_index(cls, operations, index_name, **kw):
"""Issue a "drop index" instruction using the
current batch migration context.
.. seealso::
:meth:`.Operations.drop_index`
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> index_name
"""
op = cls(
index_name, table_name=operations.impl.table_name,
schema=operations.impl.schema
)
return operations.invoke(op)
@Operations.register_operation("create_table")
class CreateTableOp(MigrateOperation):
"""Represent a create table operation."""
def __init__(
self, table_name, columns, schema=None, _orig_table=None, **kw):
self.table_name = table_name
self.columns = columns
self.schema = schema
self.kw = kw
self._orig_table = _orig_table
def reverse(self):
return DropTableOp.from_table(self.to_table())
def to_diff_tuple(self):
return ("add_table", self.to_table())
@classmethod
def from_table(cls, table):
return cls(
table.name,
list(table.c) + list(table.constraints),
schema=table.schema,
_orig_table=table,
**table.kwargs
)
def to_table(self, migration_context=None):
if self._orig_table is not None:
return self._orig_table
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.table(
self.table_name, *self.columns, schema=self.schema, **self.kw
)
@classmethod
@util._with_legacy_names([('name', 'table_name')])
def create_table(cls, operations, table_name, *columns, **kw):
"""Issue a "create table" instruction using the current migration
context.
This directive receives an argument list similar to that of the
traditional :class:`sqlalchemy.schema.Table` construct, but without the
metadata::
from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
from alembic import op
op.create_table(
'account',
Column('id', INTEGER, primary_key=True),
Column('name', VARCHAR(50), nullable=False),
Column('description', NVARCHAR(200)),
Column('timestamp', TIMESTAMP, server_default=func.now())
)
Note that :meth:`.create_table` accepts
:class:`~sqlalchemy.schema.Column`
constructs directly from the SQLAlchemy library. In particular,
default values to be created on the database side are
specified using the ``server_default`` parameter, and not
``default`` which only specifies Python-side defaults::
from alembic import op
from sqlalchemy import Column, TIMESTAMP, func
# specify "DEFAULT NOW" along with the "timestamp" column
op.create_table('account',
Column('id', INTEGER, primary_key=True),
Column('timestamp', TIMESTAMP, server_default=func.now())
)
The function also returns a newly created
:class:`~sqlalchemy.schema.Table` object, corresponding to the table
specification given, which is suitable for
immediate SQL operations, in particular
:meth:`.Operations.bulk_insert`::
from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
from alembic import op
account_table = op.create_table(
'account',
Column('id', INTEGER, primary_key=True),
Column('name', VARCHAR(50), nullable=False),
Column('description', NVARCHAR(200)),
Column('timestamp', TIMESTAMP, server_default=func.now())
)
op.bulk_insert(
account_table,
[
{"name": "A1", "description": "account 1"},
{"name": "A2", "description": "account 2"},
]
)
.. versionadded:: 0.7.0
:param table_name: Name of the table
:param \*columns: collection of :class:`~sqlalchemy.schema.Column`
objects within
the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
objects
and :class:`~.sqlalchemy.schema.Index` objects.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
:param \**kw: Other keyword arguments are passed to the underlying
:class:`sqlalchemy.schema.Table` object created for the command.
:return: the :class:`~sqlalchemy.schema.Table` object corresponding
to the parameters given.
.. versionadded:: 0.7.0 - the :class:`~sqlalchemy.schema.Table`
object is returned.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> table_name
"""
op = cls(table_name, columns, **kw)
return operations.invoke(op)
@Operations.register_operation("drop_table")
class DropTableOp(MigrateOperation):
"""Represent a drop table operation."""
def __init__(
self, table_name, schema=None, table_kw=None, _orig_table=None):
self.table_name = table_name
self.schema = schema
self.table_kw = table_kw or {}
self._orig_table = _orig_table
def to_diff_tuple(self):
return ("remove_table", self.to_table())
def reverse(self):
if self._orig_table is None:
raise ValueError(
"operation is not reversible; "
"original table is not present")
return CreateTableOp.from_table(self._orig_table)
@classmethod
def from_table(cls, table):
return cls(table.name, schema=table.schema, _orig_table=table)
def to_table(self, migration_context=None):
if self._orig_table is not None:
return self._orig_table
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.table(
self.table_name,
schema=self.schema,
**self.table_kw)
@classmethod
@util._with_legacy_names([('name', 'table_name')])
def drop_table(cls, operations, table_name, schema=None, **kw):
"""Issue a "drop table" instruction using the current
migration context.
e.g.::
drop_table("accounts")
:param table_name: Name of the table
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
:param \**kw: Other keyword arguments are passed to the underlying
:class:`sqlalchemy.schema.Table` object created for the command.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> table_name
"""
op = cls(table_name, schema=schema, table_kw=kw)
operations.invoke(op)
class AlterTableOp(MigrateOperation):
"""Represent an alter table operation."""
def __init__(self, table_name, schema=None):
self.table_name = table_name
self.schema = schema
@Operations.register_operation("rename_table")
class RenameTableOp(AlterTableOp):
"""Represent a rename table operation."""
def __init__(self, old_table_name, new_table_name, schema=None):
super(RenameTableOp, self).__init__(old_table_name, schema=schema)
self.new_table_name = new_table_name
@classmethod
def rename_table(
cls, operations, old_table_name, new_table_name, schema=None):
"""Emit an ALTER TABLE to rename a table.
:param old_table_name: old name.
:param new_table_name: new name.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
"""
op = cls(old_table_name, new_table_name, schema=schema)
return operations.invoke(op)
@Operations.register_operation("alter_column")
@BatchOperations.register_operation("alter_column", "batch_alter_column")
class AlterColumnOp(AlterTableOp):
"""Represent an alter column operation."""
def __init__(
self, table_name, column_name, schema=None,
existing_type=None,
existing_server_default=False,
existing_nullable=None,
modify_nullable=None,
modify_server_default=False,
modify_name=None,
modify_type=None,
**kw
):
super(AlterColumnOp, self).__init__(table_name, schema=schema)
self.column_name = column_name
self.existing_type = existing_type
self.existing_server_default = existing_server_default
self.existing_nullable = existing_nullable
self.modify_nullable = modify_nullable
self.modify_server_default = modify_server_default
self.modify_name = modify_name
self.modify_type = modify_type
self.kw = kw
def to_diff_tuple(self):
col_diff = []
schema, tname, cname = self.schema, self.table_name, self.column_name
if self.modify_type is not None:
col_diff.append(
("modify_type", schema, tname, cname,
{
"existing_nullable": self.existing_nullable,
"existing_server_default": self.existing_server_default,
},
self.existing_type,
self.modify_type)
)
if self.modify_nullable is not None:
col_diff.append(
("modify_nullable", schema, tname, cname,
{
"existing_type": self.existing_type,
"existing_server_default": self.existing_server_default
},
self.existing_nullable,
self.modify_nullable)
)
if self.modify_server_default is not False:
col_diff.append(
("modify_default", schema, tname, cname,
{
"existing_nullable": self.existing_nullable,
"existing_type": self.existing_type
},
self.existing_server_default,
self.modify_server_default)
)
return col_diff
def has_changes(self):
hc1 = self.modify_nullable is not None or \
self.modify_server_default is not False or \
self.modify_type is not None
if hc1:
return True
for kw in self.kw:
if kw.startswith('modify_'):
return True
else:
return False
def reverse(self):
kw = self.kw.copy()
kw['existing_type'] = self.existing_type
kw['existing_nullable'] = self.existing_nullable
kw['existing_server_default'] = self.existing_server_default
if self.modify_type is not None:
kw['modify_type'] = self.modify_type
if self.modify_nullable is not None:
kw['modify_nullable'] = self.modify_nullable
if self.modify_server_default is not False:
kw['modify_server_default'] = self.modify_server_default
# TODO: make this a little simpler
all_keys = set(m.group(1) for m in [
re.match(r'^(?:existing_|modify_)(.+)$', k)
for k in kw
] if m)
for k in all_keys:
if 'modify_%s' % k in kw:
swap = kw['existing_%s' % k]
kw['existing_%s' % k] = kw['modify_%s' % k]
kw['modify_%s' % k] = swap
return self.__class__(
self.table_name, self.column_name, schema=self.schema,
**kw
)
@classmethod
@util._with_legacy_names([('name', 'new_column_name')])
def alter_column(
cls, operations, table_name, column_name,
nullable=None,
server_default=False,
new_column_name=None,
type_=None,
existing_type=None,
existing_server_default=False,
existing_nullable=None,
schema=None, **kw
):
"""Issue an "alter column" instruction using the
current migration context.
Generally, only that aspect of the column which
is being changed, i.e. name, type, nullability,
default, needs to be specified. Multiple changes
can also be specified at once and the backend should
"do the right thing", emitting each change either
separately or together as the backend allows.
MySQL has special requirements here, since MySQL
cannot ALTER a column without a full specification.
When producing MySQL-compatible migration files,
it is recommended that the ``existing_type``,
``existing_server_default``, and ``existing_nullable``
parameters be present, if not being altered.
Type changes which are against the SQLAlchemy
"schema" types :class:`~sqlalchemy.types.Boolean`
and :class:`~sqlalchemy.types.Enum` may also
add or drop constraints which accompany those
types on backends that don't support them natively.
The ``existing_type`` argument is
used in this case to identify and remove a previous
constraint that was bound to the type object.
:param table_name: string name of the target table.
:param column_name: string name of the target column,
as it exists before the operation begins.
:param nullable: Optional; specify ``True`` or ``False``
to alter the column's nullability.
:param server_default: Optional; specify a string
SQL expression, :func:`~sqlalchemy.sql.expression.text`,
or :class:`~sqlalchemy.schema.DefaultClause` to indicate
an alteration to the column's default value.
Set to ``None`` to have the default removed.
:param new_column_name: Optional; specify a string name here to
indicate the new name within a column rename operation.
:param type_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
type object to specify a change to the column's type.
For SQLAlchemy types that also indicate a constraint (i.e.
:class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
the constraint is also generated.
:param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
currently understood by the MySQL dialect.
:param existing_type: Optional; a
:class:`~sqlalchemy.types.TypeEngine`
type object to specify the previous type. This
is required for all MySQL column alter operations that
don't otherwise specify a new type, as well as for
when nullability is being changed on a SQL Server
column. It is also used if the type is a so-called
SQLlchemy "schema" type which may define a constraint (i.e.
:class:`~sqlalchemy.types.Boolean`,
:class:`~sqlalchemy.types.Enum`),
so that the constraint can be dropped.
:param existing_server_default: Optional; The existing
default value of the column. Required on MySQL if
an existing default is not being changed; else MySQL
removes the default.
:param existing_nullable: Optional; the existing nullability
of the column. Required on MySQL if the existing nullability
is not being changed; else MySQL sets this to NULL.
:param existing_autoincrement: Optional; the existing autoincrement
of the column. Used for MySQL's system of altering a column
that specifies ``AUTO_INCREMENT``.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
"""
alt = cls(
table_name, column_name, schema=schema,
existing_type=existing_type,
existing_server_default=existing_server_default,
existing_nullable=existing_nullable,
modify_name=new_column_name,
modify_type=type_,
modify_server_default=server_default,
modify_nullable=nullable,
**kw
)
return operations.invoke(alt)
@classmethod
def batch_alter_column(
cls, operations, column_name,
nullable=None,
server_default=False,
new_column_name=None,
type_=None,
existing_type=None,
existing_server_default=False,
existing_nullable=None,
**kw
):
"""Issue an "alter column" instruction using the current
batch migration context.
.. seealso::
:meth:`.Operations.add_column`
"""
alt = cls(
operations.impl.table_name, column_name,
schema=operations.impl.schema,
existing_type=existing_type,
existing_server_default=existing_server_default,
existing_nullable=existing_nullable,
modify_name=new_column_name,
modify_type=type_,
modify_server_default=server_default,
modify_nullable=nullable,
**kw
)
return operations.invoke(alt)
@Operations.register_operation("add_column")
@BatchOperations.register_operation("add_column", "batch_add_column")
class AddColumnOp(AlterTableOp):
"""Represent an add column operation."""
def __init__(self, table_name, column, schema=None):
super(AddColumnOp, self).__init__(table_name, schema=schema)
self.column = column
def reverse(self):
return DropColumnOp.from_column_and_tablename(
self.schema, self.table_name, self.column)
def to_diff_tuple(self):
return ("add_column", self.schema, self.table_name, self.column)
def to_column(self):
return self.column
@classmethod
def from_column(cls, col):
return cls(col.table.name, col, schema=col.table.schema)
@classmethod
def from_column_and_tablename(cls, schema, tname, col):
return cls(tname, col, schema=schema)
@classmethod
def add_column(cls, operations, table_name, column, schema=None):
"""Issue an "add column" instruction using the current
migration context.
e.g.::
from alembic import op
from sqlalchemy import Column, String
op.add_column('organization',
Column('name', String())
)
The provided :class:`~sqlalchemy.schema.Column` object can also
specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing
a remote table name. Alembic will automatically generate a stub
"referenced" table and emit a second ALTER statement in order
to add the constraint separately::
from alembic import op
from sqlalchemy import Column, INTEGER, ForeignKey
op.add_column('organization',
Column('account_id', INTEGER, ForeignKey('accounts.id'))
)
Note that this statement uses the :class:`~sqlalchemy.schema.Column`
construct as is from the SQLAlchemy library. In particular,
default values to be created on the database side are
specified using the ``server_default`` parameter, and not
``default`` which only specifies Python-side defaults::
from alembic import op
from sqlalchemy import Column, TIMESTAMP, func
# specify "DEFAULT NOW" along with the column add
op.add_column('account',
Column('timestamp', TIMESTAMP, server_default=func.now())
)
:param table_name: String name of the parent table.
:param column: a :class:`sqlalchemy.schema.Column` object
representing the new column.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
"""
op = cls(table_name, column, schema=schema)
return operations.invoke(op)
@classmethod
def batch_add_column(cls, operations, column):
"""Issue an "add column" instruction using the current
batch migration context.
.. seealso::
:meth:`.Operations.add_column`
"""
op = cls(
operations.impl.table_name, column,
schema=operations.impl.schema
)
return operations.invoke(op)
@Operations.register_operation("drop_column")
@BatchOperations.register_operation("drop_column", "batch_drop_column")
class DropColumnOp(AlterTableOp):
"""Represent a drop column operation."""
def __init__(
self, table_name, column_name, schema=None,
_orig_column=None, **kw):
super(DropColumnOp, self).__init__(table_name, schema=schema)
self.column_name = column_name
self.kw = kw
self._orig_column = _orig_column
def to_diff_tuple(self):
return (
"remove_column", self.schema, self.table_name, self.to_column())
def reverse(self):
if self._orig_column is None:
raise ValueError(
"operation is not reversible; "
"original column is not present")
return AddColumnOp.from_column_and_tablename(
self.schema, self.table_name, self._orig_column)
@classmethod
def from_column_and_tablename(cls, schema, tname, col):
return cls(tname, col.name, schema=schema, _orig_column=col)
def to_column(self, migration_context=None):
if self._orig_column is not None:
return self._orig_column
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.column(self.column_name, NULLTYPE)
@classmethod
def drop_column(
cls, operations, table_name, column_name, schema=None, **kw):
"""Issue a "drop column" instruction using the current
migration context.
e.g.::
drop_column('organization', 'account_id')
:param table_name: name of table
:param column_name: name of column
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
:param mssql_drop_check: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
drop the CHECK constraint on the column using a
SQL-script-compatible
block that selects into a @variable from sys.check_constraints,
then exec's a separate DROP CONSTRAINT for that constraint.
:param mssql_drop_default: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
drop the DEFAULT constraint on the column using a
SQL-script-compatible
block that selects into a @variable from sys.default_constraints,
then exec's a separate DROP CONSTRAINT for that default.
:param mssql_drop_foreign_key: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
drop a single FOREIGN KEY constraint on the column using a
SQL-script-compatible
block that selects into a @variable from
sys.foreign_keys/sys.foreign_key_columns,
then exec's a separate DROP CONSTRAINT for that default. Only
works if the column has exactly one FK constraint which refers to
it, at the moment.
.. versionadded:: 0.6.2
"""
op = cls(table_name, column_name, schema=schema, **kw)
return operations.invoke(op)
@classmethod
def batch_drop_column(cls, operations, column_name, **kw):
"""Issue a "drop column" instruction using the current
batch migration context.
.. seealso::
:meth:`.Operations.drop_column`
"""
op = cls(
operations.impl.table_name, column_name,
schema=operations.impl.schema, **kw)
return operations.invoke(op)
@Operations.register_operation("bulk_insert")
class BulkInsertOp(MigrateOperation):
"""Represent a bulk insert operation."""
def __init__(self, table, rows, multiinsert=True):
self.table = table
self.rows = rows
self.multiinsert = multiinsert
@classmethod
def bulk_insert(cls, operations, table, rows, multiinsert=True):
"""Issue a "bulk insert" operation using the current
migration context.
This provides a means of representing an INSERT of multiple rows
which works equally well in the context of executing on a live
connection as well as that of generating a SQL script. In the
case of a SQL script, the values are rendered inline into the
statement.
e.g.::
from alembic import op
from datetime import date
from sqlalchemy.sql import table, column
from sqlalchemy import String, Integer, Date
# Create an ad-hoc table to use for the insert statement.
accounts_table = table('account',
column('id', Integer),
column('name', String),
column('create_date', Date)
)
op.bulk_insert(accounts_table,
[
{'id':1, 'name':'John Smith',
'create_date':date(2010, 10, 5)},
{'id':2, 'name':'Ed Williams',
'create_date':date(2007, 5, 27)},
{'id':3, 'name':'Wendy Jones',
'create_date':date(2008, 8, 15)},
]
)
When using --sql mode, some datatypes may not render inline
automatically, such as dates and other special types. When this
issue is present, :meth:`.Operations.inline_literal` may be used::
op.bulk_insert(accounts_table,
[
{'id':1, 'name':'John Smith',
'create_date':op.inline_literal("2010-10-05")},
{'id':2, 'name':'Ed Williams',
'create_date':op.inline_literal("2007-05-27")},
{'id':3, 'name':'Wendy Jones',
'create_date':op.inline_literal("2008-08-15")},
],
multiinsert=False
)
When using :meth:`.Operations.inline_literal` in conjunction with
:meth:`.Operations.bulk_insert`, in order for the statement to work
in "online" (e.g. non --sql) mode, the
:paramref:`~.Operations.bulk_insert.multiinsert`
flag should be set to ``False``, which will have the effect of
individual INSERT statements being emitted to the database, each
with a distinct VALUES clause, so that the "inline" values can
still be rendered, rather than attempting to pass the values
as bound parameters.
.. versionadded:: 0.6.4 :meth:`.Operations.inline_literal` can now
be used with :meth:`.Operations.bulk_insert`, and the
:paramref:`~.Operations.bulk_insert.multiinsert` flag has
been added to assist in this usage when running in "online"
mode.
:param table: a table object which represents the target of the INSERT.
:param rows: a list of dictionaries indicating rows.
:param multiinsert: when at its default of True and --sql mode is not
enabled, the INSERT statement will be executed using
"executemany()" style, where all elements in the list of
dictionaries are passed as bound parameters in a single
list. Setting this to False results in individual INSERT
statements being emitted per parameter set, and is needed
in those cases where non-literal values are present in the
parameter sets.
.. versionadded:: 0.6.4
"""
op = cls(table, rows, multiinsert=multiinsert)
operations.invoke(op)
@Operations.register_operation("execute")
class ExecuteSQLOp(MigrateOperation):
"""Represent an execute SQL operation."""
def __init__(self, sqltext, execution_options=None):
self.sqltext = sqltext
self.execution_options = execution_options
@classmethod
def execute(cls, operations, sqltext, execution_options=None):
"""Execute the given SQL using the current migration context.
In a SQL script context, the statement is emitted directly to the
output stream. There is *no* return result, however, as this
function is oriented towards generating a change script
that can run in "offline" mode. For full interaction
with a connected database, use the "bind" available
from the context::
from alembic import op
connection = op.get_bind()
Also note that any parameterized statement here *will not work*
in offline mode - INSERT, UPDATE and DELETE statements which refer
to literal values would need to render
inline expressions. For simple use cases, the
:meth:`.inline_literal` function can be used for **rudimentary**
quoting of string values. For "bulk" inserts, consider using
:meth:`.bulk_insert`.
For example, to emit an UPDATE statement which is equally
compatible with both online and offline mode::
from sqlalchemy.sql import table, column
from sqlalchemy import String
from alembic import op
account = table('account',
column('name', String)
)
op.execute(
account.update().\\
where(account.c.name==op.inline_literal('account 1')).\\
values({'name':op.inline_literal('account 2')})
)
Note above we also used the SQLAlchemy
:func:`sqlalchemy.sql.expression.table`
and :func:`sqlalchemy.sql.expression.column` constructs to
make a brief, ad-hoc table construct just for our UPDATE
statement. A full :class:`~sqlalchemy.schema.Table` construct
of course works perfectly fine as well, though note it's a
recommended practice to at least ensure the definition of a
table is self-contained within the migration script, rather
than imported from a module that may break compatibility with
older migrations.
:param sql: Any legal SQLAlchemy expression, including:
* a string
* a :func:`sqlalchemy.sql.expression.text` construct.
* a :func:`sqlalchemy.sql.expression.insert` construct.
* a :func:`sqlalchemy.sql.expression.update`,
:func:`sqlalchemy.sql.expression.insert`,
or :func:`sqlalchemy.sql.expression.delete` construct.
* Pretty much anything that's "executable" as described
in :ref:`sqlexpression_toplevel`.
:param execution_options: Optional dictionary of
execution options, will be passed to
:meth:`sqlalchemy.engine.Connection.execution_options`.
"""
op = cls(sqltext, execution_options=execution_options)
return operations.invoke(op)
class OpContainer(MigrateOperation):
"""Represent a sequence of operations operation."""
def __init__(self, ops=()):
self.ops = ops
def is_empty(self):
return not self.ops
def as_diffs(self):
return list(OpContainer._ops_as_diffs(self))
@classmethod
def _ops_as_diffs(cls, migrations):
for op in migrations.ops:
if hasattr(op, 'ops'):
for sub_op in cls._ops_as_diffs(op):
yield sub_op
else:
yield op.to_diff_tuple()
class ModifyTableOps(OpContainer):
"""Contains a sequence of operations that all apply to a single Table."""
def __init__(self, table_name, ops, schema=None):
super(ModifyTableOps, self).__init__(ops)
self.table_name = table_name
self.schema = schema
def reverse(self):
return ModifyTableOps(
self.table_name,
ops=list(reversed(
[op.reverse() for op in self.ops]
)),
schema=self.schema
)
class UpgradeOps(OpContainer):
"""contains a sequence of operations that would apply to the
'upgrade' stream of a script.
.. seealso::
:ref:`customizing_revision`
"""
def __init__(self, ops=(), upgrade_token="upgrades"):
super(UpgradeOps, self).__init__(ops=ops)
self.upgrade_token = upgrade_token
def reverse_into(self, downgrade_ops):
downgrade_ops.ops[:] = list(reversed(
[op.reverse() for op in self.ops]
))
return downgrade_ops
def reverse(self):
return self.reverse_into(DowngradeOps(ops=[]))
class DowngradeOps(OpContainer):
"""contains a sequence of operations that would apply to the
'downgrade' stream of a script.
.. seealso::
:ref:`customizing_revision`
"""
def __init__(self, ops=(), downgrade_token="downgrades"):
super(DowngradeOps, self).__init__(ops=ops)
self.downgrade_token = downgrade_token
def reverse(self):
return UpgradeOps(
ops=list(reversed(
[op.reverse() for op in self.ops]
))
)
class MigrationScript(MigrateOperation):
"""represents a migration script.
E.g. when autogenerate encounters this object, this corresponds to the
production of an actual script file.
A normal :class:`.MigrationScript` object would contain a single
:class:`.UpgradeOps` and a single :class:`.DowngradeOps` directive.
These are accessible via the ``.upgrade_ops`` and ``.downgrade_ops``
attributes.
In the case of an autogenerate operation that runs multiple times,
such as the multiple database example in the "multidb" template,
the ``.upgrade_ops`` and ``.downgrade_ops`` attributes are disabled,
and instead these objects should be accessed via the ``.upgrade_ops_list``
and ``.downgrade_ops_list`` list-based attributes. These latter
attributes are always available at the very least as single-element lists.
.. versionchanged:: 0.8.1 the ``.upgrade_ops`` and ``.downgrade_ops``
attributes should be accessed via the ``.upgrade_ops_list``
and ``.downgrade_ops_list`` attributes if multiple autogenerate
passes proceed on the same :class:`.MigrationScript` object.
.. seealso::
:ref:`customizing_revision`
"""
def __init__(
self, rev_id, upgrade_ops, downgrade_ops,
message=None,
imports=set(), head=None, splice=None,
branch_label=None, version_path=None, depends_on=None):
self.rev_id = rev_id
self.message = message
self.imports = imports
self.head = head
self.splice = splice
self.branch_label = branch_label
self.version_path = version_path
self.depends_on = depends_on
self.upgrade_ops = upgrade_ops
self.downgrade_ops = downgrade_ops
@property
def upgrade_ops(self):
"""An instance of :class:`.UpgradeOps`.
.. seealso::
:attr:`.MigrationScript.upgrade_ops_list`
"""
if len(self._upgrade_ops) > 1:
raise ValueError(
"This MigrationScript instance has a multiple-entry "
"list for UpgradeOps; please use the "
"upgrade_ops_list attribute.")
elif not self._upgrade_ops:
return None
else:
return self._upgrade_ops[0]
@upgrade_ops.setter
def upgrade_ops(self, upgrade_ops):
self._upgrade_ops = util.to_list(upgrade_ops)
for elem in self._upgrade_ops:
assert isinstance(elem, UpgradeOps)
@property
def downgrade_ops(self):
"""An instance of :class:`.DowngradeOps`.
.. seealso::
:attr:`.MigrationScript.downgrade_ops_list`
"""
if len(self._downgrade_ops) > 1:
raise ValueError(
"This MigrationScript instance has a multiple-entry "
"list for DowngradeOps; please use the "
"downgrade_ops_list attribute.")
elif not self._downgrade_ops:
return None
else:
return self._downgrade_ops[0]
@downgrade_ops.setter
def downgrade_ops(self, downgrade_ops):
self._downgrade_ops = util.to_list(downgrade_ops)
for elem in self._downgrade_ops:
assert isinstance(elem, DowngradeOps)
@property
def upgrade_ops_list(self):
"""A list of :class:`.UpgradeOps` instances.
This is used in place of the :attr:`.MigrationScript.upgrade_ops`
attribute when dealing with a revision operation that does
multiple autogenerate passes.
.. versionadded:: 0.8.1
"""
return self._upgrade_ops
@property
def downgrade_ops_list(self):
"""A list of :class:`.DowngradeOps` instances.
This is used in place of the :attr:`.MigrationScript.downgrade_ops`
attribute when dealing with a revision operation that does
multiple autogenerate passes.
.. versionadded:: 0.8.1
"""
return self._downgrade_ops
| gpl-3.0 |
coopsource/taiga-back | taiga/projects/references/permissions.py | 21 | 1070 | # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.base.api.permissions import (TaigaResourcePermission, HasProjectPerm,
IsProjectOwner, AllowAny)
class ResolverPermission(TaigaResourcePermission):
list_perms = HasProjectPerm('view_project')
| agpl-3.0 |
hexlism/css_platform | sleepyenv/lib/python2.7/site-packages/boto-2.38.0-py2.7.egg/boto/cloudsearch2/layer2.py | 136 | 3814 | # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.cloudsearch2.layer1 import CloudSearchConnection
from boto.cloudsearch2.domain import Domain
from boto.compat import six
class Layer2(object):
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
host=None, debug=0, session_token=None, region=None,
validate_certs=True, sign_request=False):
if isinstance(region, six.string_types):
import boto.cloudsearch2
for region_info in boto.cloudsearch2.regions():
if region_info.name == region:
region = region_info
break
self.layer1 = CloudSearchConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
is_secure=is_secure,
port=port,
proxy=proxy,
proxy_port=proxy_port,
host=host,
debug=debug,
security_token=session_token,
region=region,
validate_certs=validate_certs,
sign_request=sign_request)
def list_domains(self, domain_names=None):
"""
Return a list of objects for each domain defined in the
current account.
:rtype: list of :class:`boto.cloudsearch2.domain.Domain`
"""
domain_data = self.layer1.describe_domains(domain_names)
domain_data = (domain_data['DescribeDomainsResponse']
['DescribeDomainsResult']
['DomainStatusList'])
return [Domain(self.layer1, data) for data in domain_data]
def create_domain(self, domain_name):
"""
Create a new CloudSearch domain and return the corresponding object.
:return: Domain object, or None if the domain isn't found
:rtype: :class:`boto.cloudsearch2.domain.Domain`
"""
data = self.layer1.create_domain(domain_name)
return Domain(self.layer1, data['CreateDomainResponse']
['CreateDomainResult']
['DomainStatus'])
def lookup(self, domain_name):
"""
Lookup a single domain
:param domain_name: The name of the domain to look up
:type domain_name: str
:return: Domain object, or None if the domain isn't found
:rtype: :class:`boto.cloudsearch2.domain.Domain`
"""
domains = self.list_domains(domain_names=[domain_name])
if len(domains) > 0:
return domains[0]
| apache-2.0 |
NamPNQ/rethinkengine | tests/test_connection.py | 1 | 1550 | import sys
import os
sys.path.append(os.path.abspath('..'))
try:
import unittest2 as unittest
except ImportError:
import unittest
import rethinkdb
from rethinkengine import *
import rethinkengine.connection
from rethinkengine.connection import get_connection, ConnectionError
class ConnectionTest(unittest.TestCase):
def tearDown(self):
rethinkengine.connection._connection_settings = {}
rethinkengine.connection._connections = {}
rethinkengine.connection._dbs = {}
def test_connect(self):
"""Ensure that the connect() method works properly.
"""
connect('test')
conn = get_connection()
self.assertTrue(isinstance(conn, rethinkdb.net.Connection))
def test_sharing_connections(self):
"""Ensure that connections are shared when the connection settings are exactly the same
"""
connect('test', alias='testdb1')
expected_connection = get_connection('testdb1')
connect('test', alias='testdb2')
actual_connection = get_connection('testdb2')
self.assertEqual(expected_connection, actual_connection)
def test_register_connection(self):
"""Ensure that connections with different aliases may be registered.
"""
register_connection('testdb', 'rethinkenginetest2')
self.assertRaises(ConnectionError, get_connection)
conn = get_connection('testdb')
self.assertTrue(isinstance(conn, rethinkdb.net.Connection))
if __name__ == '__main__':
unittest.main()
| mit |
Bismarrck/pymatgen | pymatgen/analysis/pourbaix/maker.py | 3 | 12323 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
import logging
import numpy as np
import itertools
from scipy.spatial import ConvexHull
from pymatgen.analysis.pourbaix.entry import MultiEntry, ion_or_solid_comp_object
from pymatgen.core.periodic_table import Element
from pymatgen.core.composition import Composition
from pymatgen.analysis.reaction_calculator import Reaction, ReactionError
"""
Module containing analysis classes which compute a pourbaix diagram given a
target compound/element.
"""
from six.moves import zip
__author__ = "Sai Jayaraman"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.0"
__maintainer__ = "Sai Jayaraman"
__credits__ = "Arunima Singh, Joseph Montoya"
__email__ = "sjayaram@mit.edu"
__status__ = "Development"
__date__ = "Nov 1, 2012"
logger = logging.getLogger(__name__)
PREFAC = 0.0591
MU_H2O = -2.4583
# TODO: There's a lot of functionality here that diverges
# based on whether or not the pbx diagram is multielement
# or not. Could be a more elegant way to
# treat the two distinct modes.
class PourbaixDiagram(object):
"""
Class to create a Pourbaix diagram from entries
Args:
entries: Entries list containing both Solids and Ions
comp_dict: Dictionary of compositions
"""
def __init__(self, entries, comp_dict=None):
self._solid_entries = list()
self._ion_entries = list()
for entry in entries:
if entry.phase_type == "Solid":
self._solid_entries.append(entry)
elif entry.phase_type == "Ion":
self._ion_entries.append(entry)
else:
raise Exception("Incorrect Phase type - needs to be \
Pourbaix entry of phase type Ion/Solid")
if len(self._ion_entries) == 0:
raise Exception("No ion phase. Equilibrium between ion/solid "
"is required to make a Pourbaix Diagram")
self._unprocessed_entries = self._solid_entries + self._ion_entries
self._elt_comp = comp_dict
if comp_dict and len(comp_dict) > 1:
self._multielement = True
pbx_elements = set()
for comp in comp_dict.keys():
for el in [el for el in
ion_or_solid_comp_object(comp).elements
if el not in ["H", "O"]]:
pbx_elements.add(el.symbol)
self.pourbaix_elements = pbx_elements
w = [comp_dict[key] for key in comp_dict]
A = []
for comp in comp_dict:
comp_obj = ion_or_solid_comp_object(comp)
Ai = []
for elt in self.pourbaix_elements:
Ai.append(comp_obj[elt])
A.append(Ai)
A = np.array(A).T.astype(float)
w = np.array(w)
A /= np.dot([a.sum() for a in A], w)
x = np.linalg.solve(A, w)
self._elt_comp = dict(zip(self.pourbaix_elements, x))
else:
self._multielement = False
self.pourbaix_elements = [el.symbol
for el in entries[0].composition.elements
if el.symbol not in ["H", "O"]]
# TODO: document the physical meaning
# of ternary oxide of pbx diagrams in both modes
self._elt_comp = {self.pourbaix_elements[0]: 1.0}
self._make_pourbaix_diagram()
def _create_conv_hull_data(self):
"""
Make data conducive to convex hull generator.
"""
if self._multielement:
self._all_entries = self._process_multielement_entries()
else:
self._all_entries = self._unprocessed_entries
entries_to_process = list()
for entry in self._all_entries:
entry.scale(entry.normalization_factor)
entry.correction += (- MU_H2O * entry.nH2O + entry.conc_term)
entries_to_process.append(entry)
self._qhull_entries = entries_to_process
return self._process_conv_hull_data(entries_to_process)
def _process_conv_hull_data(self, entries_to_process):
"""
From a sequence of ion+solid entries, generate the necessary data
for generation of the convex hull.
"""
data = []
for entry in entries_to_process:
row = [entry.npH, entry.nPhi, entry.g0]
data.append(row)
temp = sorted(zip(data, self._qhull_entries),
key=lambda x: x[0][2])
[data, self._qhull_entries] = list(zip(*temp))
return data
def _process_multielement_entries(self):
"""
Create entries for multi-element Pourbaix construction.
This works by finding all possible linear combinations
of entries that can result in the specified composition
from the initialized comp_dict.
"""
N = len(self._elt_comp) # No. of elements
entries = self._unprocessed_entries
dummy_prod = Composition(self._elt_comp)
total_comp = Composition(self._elt_comp)
# generate all possible combinations of compounds that have all elts
entry_combos = [itertools.combinations(entries, j+1) for j in range(N)]
entry_combos = itertools.chain.from_iterable(entry_combos)
entry_combos = filter(lambda x: dummy_prod < MultiEntry(x).total_composition,
entry_combos)
# Generate and filter entries
processed_entries = []
for entry_combo in entry_combos:
processed_entry = self.process_multientry(entry_combo, total_comp)
if processed_entry is not None:
processed_entries.append(processed_entry)
return processed_entries
@staticmethod
def process_multientry(entry_list, prod_comp):
"""
Static method for finding a multientry based on
a list of entries and a product composition.
Essentially checks to see if a valid aqueous
reaction exists between the entries and the
product composition and returns a MultiEntry
with weights according to the coefficients if so.
Args:
entry_list ([Entry]): list of entries from which to
create a MultiEntry
comp (Composition): composition constraint for setting
weights of MultiEntry
"""
dummy_oh = [Composition("H"), Composition("O")]
try:
# Get balanced reaction coeffs, ensuring all < 0 or conc thresh
# Note that we get reduced compositions for solids and non-reduced
# compositions for ions because ions aren't normalized due to
# their charge state.
entry_comps = [e.composition if e.phase_type=='Ion'
else e.composition.reduced_composition
for e in entry_list]
rxn = Reaction(entry_comps + dummy_oh, [prod_comp])
thresh = np.array([pe.conc if pe.phase_type == "Ion"
else 1e-3 for pe in entry_list])
coeffs = -np.array([rxn.get_coeff(comp) for comp in entry_comps])
if (coeffs > thresh).all():
weights = coeffs / coeffs[0]
return MultiEntry(entry_list, weights=weights.tolist())
else:
return None
except ReactionError:
return None
def _make_pourbaix_diagram(self):
"""
Calculates entries on the convex hull in the dual space.
"""
stable_entries = set()
self._qhull_data = self._create_conv_hull_data()
dim = len(self._qhull_data[0])
if len(self._qhull_data) < dim:
# TODO: might want to lift this restriction and
# supply a warning instead, should work even if it's slow.
raise NotImplementedError("Can only do elements with at-least "
"3 entries for now")
if len(self._qhull_data) == dim:
self._facets = [list(range(dim))]
else:
facets_hull = np.array(ConvexHull(self._qhull_data).simplices)
self._facets = np.sort(np.array(facets_hull))
logger.debug("Final facets are\n{}".format(self._facets))
logger.debug("Removing vertical facets...")
vert_facets_removed = list()
for facet in self._facets:
facetmatrix = np.zeros((len(facet), len(facet)))
count = 0
for vertex in facet:
facetmatrix[count] = np.array(self._qhull_data[vertex])
facetmatrix[count, dim - 1] = 1
count += 1
if abs(np.linalg.det(facetmatrix)) > 1e-8:
vert_facets_removed.append(facet)
else:
logger.debug("Removing vertical facet : {}".format(facet))
logger.debug("Removing UCH facets by eliminating normal.z >0 ...")
# Find center of hull
vertices = set()
for facet in vert_facets_removed:
for vertex in facet:
vertices.add(vertex)
c = [0.0, 0.0, 0.0]
c[0] = np.average([self._qhull_data[vertex][0]
for vertex in vertices])
c[1] = np.average([self._qhull_data[vertex][1]
for vertex in vertices])
c[2] = np.average([self._qhull_data[vertex][2]
for vertex in vertices])
# Shift origin to c
new_qhull_data = np.array(self._qhull_data)
for vertex in vertices:
new_qhull_data[vertex] -= c
# For each facet, find normal n, find dot product with P, and
# check if this is -ve
final_facets = list()
for facet in vert_facets_removed:
a = new_qhull_data[facet[1]] - new_qhull_data[facet[0]]
b = new_qhull_data[facet[2]] - new_qhull_data[facet[0]]
n = np.cross(a, b)
val = np.dot(n, new_qhull_data[facet[0]])
if val < 0:
n = -n
if n[2] <= 0:
final_facets.append(facet)
else:
logger.debug("Removing UCH facet : {}".format(facet))
final_facets = np.array(final_facets)
self._facets = final_facets
stable_vertices = set()
for facet in self._facets:
for vertex in facet:
stable_vertices.add(vertex)
stable_entries.add(self._qhull_entries[vertex])
self._stable_entries = stable_entries
self._vertices = stable_vertices
@property
def facets(self):
"""
Facets of the convex hull in the form of [[1,2,3],[4,5,6]...]
"""
return self._facets
@property
def qhull_data(self):
"""
Data used in the convex hull operation. This is essentially a matrix of
composition data and energy per atom values created from qhull_entries.
"""
return self._qhull_data
@property
def qhull_entries(self):
"""
Return qhull entries
"""
return self._qhull_entries
@property
def stable_entries(self):
"""
Returns the stable entries in the Pourbaix diagram.
"""
return list(self._stable_entries)
@property
def unstable_entries(self):
"""
Returns all unstable entries in the Pourbaix diagram
"""
return [e for e in self.qhull_entries if e not in self.stable_entries]
@property
def all_entries(self):
"""
Return all entries
"""
return self._all_entries
@property
def vertices(self):
"""
Return vertices of the convex hull
"""
return self._vertices
@property
def unprocessed_entries(self):
"""
Return unprocessed entries
"""
return self._unprocessed_entries
| mit |
DeBortoliWines/Bika-LIMS | bika/lims/utils/analysis.py | 1 | 11236 | # -*- coding: utf-8 -*-
import math
import zope.event
from bika.lims.utils import formatDecimalMark
from Products.Archetypes.event import ObjectInitializedEvent
from Products.CMFCore.WorkflowCore import WorkflowException
from Products.CMFPlone.utils import _createObjectByType
def create_analysis(context, service, keyword, interim_fields):
# Determine if the sampling workflow is enabled
workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
# Create the analysis
analysis = _createObjectByType("Analysis", context, keyword)
analysis.setService(service)
analysis.setInterimFields(interim_fields)
analysis.setMaxTimeAllowed(service.getMaxTimeAllowed())
analysis.unmarkCreationFlag()
analysis.reindexObject()
# Trigger the intitialization event of the new object
zope.event.notify(ObjectInitializedEvent(analysis))
# Perform the appropriate workflow action
try:
workflow_action = 'sampling_workflow' if workflow_enabled \
else 'no_sampling_workflow'
context.portal_workflow.doActionFor(analysis, workflow_action)
except WorkflowException:
# The analysis may have been transitioned already!
# I am leaving this code here though, to prevent regression.
pass
# Return the newly created analysis
return analysis
def get_significant_digits(numeric_value):
"""
Returns the precision for a given floatable value.
If value is None or not floatable, returns None.
Will return positive values if the result is below 0 and will
return 0 or positive values if the result is above 0.
:param numeric_value: the value to get the precision from
:return: the numeric_value's precision
"""
try:
numeric_value = float(numeric_value)
except ValueError:
return None
if numeric_value == 0:
return 0
return int(math.floor(math.log10(abs(numeric_value))))
def format_uncertainty(analysis, result, decimalmark='.', sciformat=1):
"""
Returns the formatted uncertainty according to the analysis, result
and decimal mark specified following these rules:
If the "Calculate precision from uncertainties" is enabled in
the Analysis service, and
a) If the the non-decimal number of digits of the result is above
the service's ExponentialFormatPrecision, the uncertainty will
be formatted in scientific notation. The uncertainty exponential
value used will be the same as the one used for the result. The
uncertainty will be rounded according to the same precision as
the result.
Example:
Given an Analysis with an uncertainty of 37 for a range of
results between 30000 and 40000, with an
ExponentialFormatPrecision equal to 4 and a result of 32092,
this method will return 0.004E+04
b) If the number of digits of the integer part of the result is
below the ExponentialFormatPrecision, the uncertainty will be
formatted as decimal notation and the uncertainty will be
rounded one position after reaching the last 0 (precision
calculated according to the uncertainty value).
Example:
Given an Analysis with an uncertainty of 0.22 for a range of
results between 1 and 10 with an ExponentialFormatPrecision
equal to 4 and a result of 5.234, this method will return 0.2
If the "Calculate precision from Uncertainties" is disabled in the
analysis service, the same rules described above applies, but the
precision used for rounding the uncertainty is not calculated from
the uncertainty neither the result. The fixed length precision is
used instead.
For further details, visit
https://jira.bikalabs.com/browse/LIMS-1334
If the result is not floatable or no uncertainty defined, returns
an empty string.
The default decimal mark '.' will be replaced by the decimalmark
specified.
:param analysis: the analysis from which the uncertainty, precision
and other additional info have to be retrieved
:param result: result of the analysis. Used to retrieve and/or
calculate the precision and/or uncertainty
:param decimalmark: decimal mark to use. By default '.'
:param sciformat: 1. The sci notation has to be formatted as aE^+b
2. The sci notation has to be formatted as ax10^b
3. As 2, but with super html entity for exp
4. The sci notation has to be formatted as a·10^b
5. As 4, but with super html entity for exp
By default 1
:return: the formatted uncertainty
"""
try:
result = float(result)
except ValueError:
return ""
service = analysis.getService()
uncertainty = service.getUncertainty(result)
if uncertainty is None:
return ""
# Scientific notation?
# Get the default precision for scientific notation
threshold = service.getExponentialFormatPrecision()
# Current result precision is above the threshold?
sig_digits = get_significant_digits(result)
negative = sig_digits < 0
sign = '-' if negative else ''
sig_digits = abs(sig_digits)
sci = sig_digits >= threshold and sig_digits > 0
formatted = ''
if sci:
# Scientific notation
# 3.2014E+4
if negative == True:
res = float(uncertainty)*(10**sig_digits)
else:
res = float(uncertainty)/(10**sig_digits)
res = float(str("%%.%sf" % (sig_digits-1)) % res)
res = int(res) if res.is_integer() else res
if sciformat in [2,3,4,5]:
if sciformat == 2:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"x10^",sign,sig_digits)
elif sciformat == 3:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"x10<sup>",sign,sig_digits,"</sup>")
elif sciformat == 4:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"·10^",sign,sig_digits)
elif sciformat == 5:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"·10<sup>",sign,sig_digits,"</sup>")
else:
# Default format: aE^+b
sig_digits = "%02d" % sig_digits
formatted = "%s%s%s%s" % (res,"e",sign,sig_digits)
#formatted = str("%%.%se" % sig_digits) % uncertainty
else:
# Decimal notation
prec = service.getPrecision(result)
prec = prec if prec else ''
formatted = str("%%.%sf" % prec) % uncertainty
return formatDecimalMark(formatted, decimalmark)
def format_numeric_result(analysis, result, decimalmark='.', sciformat=1):
"""
Returns the formatted number part of a results value. This is
responsible for deciding the precision, and notation of numeric
values in accordance to the uncertainty. If a non-numeric
result value is given, the value will be returned unchanged.
The following rules apply:
If the "Calculate precision from uncertainties" is enabled in
the Analysis service, and
a) If the non-decimal number of digits of the result is above
the service's ExponentialFormatPrecision, the result will
be formatted in scientific notation.
Example:
Given an Analysis with an uncertainty of 37 for a range of
results between 30000 and 40000, with an
ExponentialFormatPrecision equal to 4 and a result of 32092,
this method will return 3.2092E+04
b) If the number of digits of the integer part of the result is
below the ExponentialFormatPrecision, the result will be
formatted as decimal notation and the resulta will be rounded
in accordance to the precision (calculated from the uncertainty)
Example:
Given an Analysis with an uncertainty of 0.22 for a range of
results between 1 and 10 with an ExponentialFormatPrecision
equal to 4 and a result of 5.234, this method will return 5.2
If the "Calculate precision from Uncertainties" is disabled in the
analysis service, the same rules described above applies, but the
precision used for rounding the result is not calculated from
the uncertainty. The fixed length precision is used instead.
For further details, visit
https://jira.bikalabs.com/browse/LIMS-1334
The default decimal mark '.' will be replaced by the decimalmark
specified.
:param analysis: the analysis from which the uncertainty, precision
and other additional info have to be retrieved
:param result: result to be formatted.
:param decimalmark: decimal mark to use. By default '.'
:param sciformat: 1. The sci notation has to be formatted as aE^+b
2. The sci notation has to be formatted as ax10^b
3. As 2, but with super html entity for exp
4. The sci notation has to be formatted as a·10^b
5. As 4, but with super html entity for exp
By default 1
:return: the formatted result
"""
try:
result = float(result)
except ValueError:
return result
service = analysis.getService()
# Scientific notation?
# Get the default precision for scientific notation
threshold = service.getExponentialFormatPrecision()
# Current result precision is above the threshold?
sig_digits = get_significant_digits(result)
negative = sig_digits < 0
sign = '-' if negative else ''
sig_digits = abs(sig_digits)
sci = sig_digits >= threshold
formatted = ''
if sci:
# Scientific notation
if sciformat in [2,3,4,5]:
if negative == True:
res = float(result)*(10**sig_digits)
else:
res = float(result)/(10**sig_digits)
res = float(str("%%.%sf" % (sig_digits-1)) % res)
res = int(res) if res.is_integer() else res
if sciformat == 2:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"x10^",sign,sig_digits)
elif sciformat == 3:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"x10<sup>",sign,sig_digits,"</sup>")
elif sciformat == 4:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"·10^",sign,sig_digits)
elif sciformat == 5:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"·10<sup>",sign,sig_digits,"</sup>")
else:
# Default format: aE^+b
formatted = str("%%.%se" % sig_digits) % result
else:
# Decimal notation
prec = service.getPrecision(result)
prec = prec if prec else ''
formatted = str("%%.%sf" % prec) % result
formatted = str(int(float(formatted))) if float(formatted).is_integer() else formatted
return formatDecimalMark(formatted, decimalmark)
| agpl-3.0 |
kaplun/invenio-records | requirements.py | 148 | 5904 | #!/usr/bin/env python2
#
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Generate minimal requirements from `setup.py` + `requirements-devel.txt`."""
from __future__ import print_function
import argparse
import re
import sys
import mock
import pkg_resources
import setuptools
def parse_set(string):
"""Parse set from comma separated string."""
string = string.strip()
if string:
return set(string.split(","))
else:
return set()
def minver_error(pkg_name):
"""Report error about missing minimum version constraint and exit."""
print(
'ERROR: specify minimal version of "{}" using '
'">=" or "=="'.format(pkg_name),
file=sys.stderr
)
sys.exit(1)
def parse_pip_file(path):
"""Parse pip requirements file."""
# requirement lines sorted by importance
# also collect other pip commands
rdev = dict()
rnormal = []
stuff = []
try:
with open(path) as f:
for line in f:
line = line.strip()
# see https://pip.readthedocs.org/en/1.1/requirements.html
if line.startswith('-e'):
# devel requirement
splitted = line.split('#egg=')
rdev[splitted[1].lower()] = line
elif line.startswith('-r'):
# recursive file command
splitted = re.split('-r\\s+', line)
subrdev, subrnormal, substuff = parse_pip_file(splitted[1])
for k, v in subrdev.iteritems():
if k not in rdev:
rdev[k] = v
rnormal.extend(subrnormal)
result.extend(substuff)
elif line.startswith('-'):
# another special command we don't recognize
stuff.append(line)
else:
# ordenary requirement, similary to them used in setup.py
rnormal.append(line)
except IOError:
print(
'Warning: could not parse requirements file "{}"!',
file=sys.stderr
)
return rdev, rnormal, stuff
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Calculates requirements for different purposes',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'-l', '--level',
choices=['min', 'pypi', 'dev'],
default='pypi',
help='Specifies desired requirements level.'
'"min" requests the minimal requirement that is specified, '
'"pypi" requests the maximum version that satisfies the '
'constrains and is available in PyPi. '
'"dev" includes experimental developer versions for VCSs.'
)
parser.add_argument(
'-e', '--extras',
default='',
help='Comma separated list of extras.',
type=parse_set
)
args = parser.parse_args()
result = dict()
requires = []
stuff = []
if args.level == 'dev':
result, requires, stuff = parse_pip_file('requirements-devel.txt')
with mock.patch.object(setuptools, 'setup') as mock_setup:
import setup
assert setup # silence warning about unused imports
# called arguments are in `mock_setup.call_args`
mock_args, mock_kwargs = mock_setup.call_args
requires = mock_kwargs.get('install_requires', [])
requires_extras = mock_kwargs.get('extras_require', {})
for e in args.extras:
if e in requires_extras:
requires.extend(requires_extras[e])
for pkg in pkg_resources.parse_requirements(requires):
# skip things we already know
# FIXME be smarter about merging things
if pkg.key in result:
continue
specs = dict(pkg.specs)
if (('>=' in specs) and ('>' in specs)) \
or (('<=' in specs) and ('<' in specs)):
print(
'ERROR: Do not specify such weird constraints! '
'("{}")'.format(pkg),
file=sys.stderr
)
sys.exit(1)
if '==' in specs:
result[pkg.key] = '{}=={}'.format(pkg.project_name, specs['=='])
elif '>=' in specs:
if args.level == 'min':
result[pkg.key] = '{}=={}'.format(
pkg.project_name,
specs['>=']
)
else:
result[pkg.key] = pkg
elif '>' in specs:
if args.level == 'min':
minver_error(pkg.project_name)
else:
result[pkg.key] = pkg
else:
if args.level == 'min':
minver_error(pkg.project_name)
else:
result[pkg.key] = pkg
for s in stuff:
print(s)
for k in sorted(result.iterkeys()):
print(result[k])
| gpl-2.0 |
SamiHiltunen/invenio-upgrader | invenio_upgrader/upgrades/invenio_2015_01_13_hide_holdings.py | 20 | 1898 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Upgrade for explicitly hiding the 'holdings' tab for all collections\
except 'Books' without an existing rule"""
from invenio.legacy.dbquery import run_sql
depends_on = ['invenio_release_1_1_0']
def info():
"""Upgrade recipe information."""
return "Updates the collectiondetailedrecordpagetabs to hide 'holdings' for every collection \
except 'Books' without an existing rule."
def do_upgrade():
"""Upgrade recipe procedure."""
# Show holdings for the 'Books' collection unless there's an existing rule
run_sql("""
INSERT IGNORE INTO collectiondetailedrecordpagetabs (id_collection, tabs)
SELECT id, "files;references;keywords;plots;hepdata;holdings;comments;linkbacks;citations;usage;metadata"
FROM collection WHERE name = 'Books'
""")
# Insert the rest of the rules
run_sql("""
INSERT IGNORE INTO collectiondetailedrecordpagetabs (id_collection, tabs)
SELECT id, "files;references;keywords;plots;hepdata;comments;linkbacks;citations;usage;metadata"
FROM collection
""")
def estimate():
"""Upgrade recipe time estimate."""
return 1
| gpl-2.0 |
Kami/libcloud | libcloud/test/dns/test_worldwidedns.py | 6 | 20383 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.dns.types import RecordType, ZoneDoesNotExistError
from libcloud.dns.types import RecordDoesNotExistError
from libcloud.dns.drivers.worldwidedns import WorldWideDNSDriver
from libcloud.dns.drivers.worldwidedns import WorldWideDNSError
from libcloud.common.worldwidedns import NonExistentDomain
from libcloud.common.worldwidedns import InvalidDomainName
from libcloud.test import MockHttp
from libcloud.test.file_fixtures import DNSFileFixtures
from libcloud.test.secrets import DNS_PARAMS_WORLDWIDEDNS
class WorldWideDNSTests(unittest.TestCase):
def setUp(self):
WorldWideDNSDriver.connectionCls.conn_class = WorldWideDNSMockHttp
WorldWideDNSMockHttp.type = None
self.driver = WorldWideDNSDriver(*DNS_PARAMS_WORLDWIDEDNS)
def assertHasKeys(self, dictionary, keys):
for key in keys:
self.assertTrue(key in dictionary, 'key "%s" not in dictionary' %
(key))
def test_list_record_types(self):
record_types = self.driver.list_record_types()
self.assertEqual(len(record_types), 6)
self.assertTrue(RecordType.A in record_types)
self.assertTrue(RecordType.CNAME in record_types)
self.assertTrue(RecordType.MX in record_types)
self.assertTrue(RecordType.TXT in record_types)
self.assertTrue(RecordType.SRV in record_types)
self.assertTrue(RecordType.NS in record_types)
def test_list_zones_success(self):
zones = self.driver.list_zones()
self.assertEqual(len(zones), 1)
zone = zones[0]
self.assertEqual(zone.id, 'niteowebsponsoredthisone.com')
self.assertEqual(zone.type, 'master')
self.assertEqual(zone.domain, 'niteowebsponsoredthisone.com')
self.assertEqual(zone.ttl, '43200')
self.assertHasKeys(zone.extra, ['HOSTMASTER', 'REFRESH', 'RETRY',
'EXPIRE', 'SECURE', 'S1', 'T1', 'D1',
'S2', 'T2', 'D2', 'S3', 'T3', 'D3'])
def test_list_records_success(self):
zone = self.driver.list_zones()[0]
records = self.driver.list_records(zone=zone)
self.assertEqual(len(records), 3)
www = records[0]
self.assertEqual(www.id, '1')
self.assertEqual(www.name, 'www')
self.assertEqual(www.type, RecordType.A)
self.assertEqual(www.data, '0.0.0.0')
self.assertEqual(www.extra, {})
def test_list_records_zone_does_not_exist(self):
WorldWideDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
zone = self.driver.list_zones()[0]
self.driver.list_records(zone=zone)
except NonExistentDomain as e:
self.assertEqual(e.code, 405)
else:
self.fail('Exception was not thrown')
def test_get_zone_success(self):
zone = self.driver.get_zone(zone_id='niteowebsponsoredthisone.com')
self.assertEqual(zone.id, 'niteowebsponsoredthisone.com')
self.assertEqual(zone.type, 'master')
self.assertEqual(zone.domain, 'niteowebsponsoredthisone.com')
self.assertEqual(zone.ttl, '43200')
self.assertHasKeys(zone.extra, ['HOSTMASTER', 'REFRESH', 'RETRY',
'EXPIRE', 'SECURE', 'S1', 'T1', 'D1',
'S2', 'T2', 'D2', 'S3', 'T3', 'D3'])
def test_get_zone_does_not_exist(self):
WorldWideDNSMockHttp.type = 'GET_ZONE_DOES_NOT_EXIST'
try:
self.driver.get_zone(zone_id='unexistentzone')
except ZoneDoesNotExistError as e:
self.assertEqual(e.zone_id, 'unexistentzone')
else:
self.fail('Exception was not thrown')
def test_get_record_success(self):
record = self.driver.get_record(zone_id='niteowebsponsoredthisone.com',
record_id='1')
self.assertEqual(record.id, '1')
self.assertEqual(record.name, 'www')
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, '0.0.0.0')
self.assertEqual(record.extra, {})
def test_get_record_zone_does_not_exist(self):
try:
self.driver.get_record(zone_id='unexistentzone',
record_id='3585100')
except ZoneDoesNotExistError as e:
self.assertEqual(e.zone_id, 'unexistentzone')
else:
self.fail('Exception was not thrown')
def test_get_record_record_does_not_exist(self):
try:
self.driver.get_record(zone_id='niteowebsponsoredthisone.com',
record_id='3585100')
except RecordDoesNotExistError as e:
self.assertEqual(e.record_id, '3585100')
else:
self.fail('Exception was not thrown')
def test_create_zone_success(self):
zone = self.driver.create_zone(domain='niteowebsponsoredthisone.com',
type='master')
self.assertEqual(zone.id, 'niteowebsponsoredthisone.com')
self.assertEqual(zone.domain, 'niteowebsponsoredthisone.com')
self.assertEqual(zone.ttl, '43200')
self.assertEqual(zone.type, 'master')
def test_create_zone_validaton_error(self):
WorldWideDNSMockHttp.type = 'VALIDATION_ERROR'
try:
self.driver.create_zone(domain='foo.%.com', type='master',
ttl=None, extra=None)
except InvalidDomainName as e:
self.assertEqual(e.code, 410)
else:
self.fail('Exception was not thrown')
def test_update_zone_success(self):
zone = self.driver.list_zones()[0]
WorldWideDNSMockHttp.type = 'UPDATE_ZONE'
updated_zone = self.driver.update_zone(zone=zone,
domain='niteowebsponsoredthisone.com', # noqa
ttl=3800,
extra={'HOSTMASTER':
'mail.niteowebsponsoredthisone.com'}) # noqa
self.assertEqual(zone.extra['HOSTMASTER'],
'hostmaster.niteowebsponsoredthisone.com')
self.assertEqual(updated_zone.id, zone.id)
self.assertEqual(updated_zone.domain, 'niteowebsponsoredthisone.com')
self.assertEqual(updated_zone.type, zone.type)
self.assertEqual(updated_zone.ttl, '3800')
self.assertEqual(updated_zone.extra['HOSTMASTER'],
'mail.niteowebsponsoredthisone.com')
self.assertEqual(updated_zone.extra['REFRESH'], zone.extra['REFRESH'])
self.assertEqual(updated_zone.extra['RETRY'], zone.extra['RETRY'])
self.assertEqual(updated_zone.extra['EXPIRE'], zone.extra['EXPIRE'])
self.assertEqual(updated_zone.extra['SECURE'], zone.extra['SECURE'])
self.assertEqual(updated_zone.extra['S1'], zone.extra['S1'])
self.assertEqual(updated_zone.extra['T1'], zone.extra['T1'])
self.assertEqual(updated_zone.extra['D1'], zone.extra['D1'])
self.assertEqual(updated_zone.extra['S2'], zone.extra['S2'])
self.assertEqual(updated_zone.extra['T2'], zone.extra['T2'])
self.assertEqual(updated_zone.extra['D2'], zone.extra['D2'])
self.assertEqual(updated_zone.extra['S3'], zone.extra['S3'])
self.assertEqual(updated_zone.extra['T3'], zone.extra['T3'])
self.assertEqual(updated_zone.extra['D3'], zone.extra['D3'])
def test_create_record_success(self):
zone = self.driver.list_zones()[0]
WorldWideDNSMockHttp.type = 'CREATE_RECORD'
record = self.driver.create_record(name='domain4', zone=zone,
type=RecordType.A, data='0.0.0.4',
extra={'entry': 4})
self.assertEqual(record.id, '4')
self.assertEqual(record.name, 'domain4')
self.assertNotEqual(record.zone.extra.get('S4'), zone.extra.get('S4'))
self.assertNotEqual(record.zone.extra.get('D4'), zone.extra.get('D4'))
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, '0.0.0.4')
def test_create_record_finding_entry(self):
zone = self.driver.list_zones()[0]
WorldWideDNSMockHttp.type = 'CREATE_RECORD'
record = self.driver.create_record(name='domain4', zone=zone,
type=RecordType.A, data='0.0.0.4')
WorldWideDNSMockHttp.type = 'CREATE_SECOND_RECORD'
zone = record.zone
record2 = self.driver.create_record(name='domain1', zone=zone,
type=RecordType.A, data='0.0.0.1')
self.assertEqual(record.id, '4')
self.assertEqual(record2.id, '5')
def test_create_record_max_entry_reached(self):
zone = self.driver.list_zones()[0]
WorldWideDNSMockHttp.type = 'CREATE_RECORD_MAX_ENTRIES'
record = self.driver.create_record(name='domain40', zone=zone,
type=RecordType.A, data='0.0.0.40')
WorldWideDNSMockHttp.type = 'CREATE_RECORD'
zone = record.zone
try:
self.driver.create_record(
name='domain41', zone=zone, type=RecordType.A, data='0.0.0.41')
except WorldWideDNSError as e:
self.assertEqual(e.value, 'All record entries are full')
else:
self.fail('Exception was not thrown')
def test_create_record_max_entry_reached_give_entry(self):
WorldWideDNSMockHttp.type = 'CREATE_RECORD_MAX_ENTRIES'
zone = self.driver.list_zones()[0]
record = self.driver.get_record(zone.id, '23')
self.assertEqual(record.id, '23')
self.assertEqual(record.name, 'domain23')
self.assertEqual(record.type, 'A')
self.assertEqual(record.data, '0.0.0.23')
# No matter if we have all entries full, if we choose a specific
# entry, the record will be replaced with the new one.
WorldWideDNSMockHttp.type = 'CREATE_RECORD_MAX_ENTRIES_WITH_ENTRY'
record = self.driver.create_record(name='domain23b', zone=zone,
type=RecordType.A, data='0.0.0.41',
extra={'entry': 23})
zone = record.zone
self.assertEqual(record.id, '23')
self.assertEqual(record.name, 'domain23b')
self.assertEqual(record.type, 'A')
self.assertEqual(record.data, '0.0.0.41')
def test_update_record_success(self):
zone = self.driver.list_zones()[0]
record = self.driver.get_record(zone.id, '1')
WorldWideDNSMockHttp.type = 'UPDATE_RECORD'
record = self.driver.update_record(record=record, name='domain1',
type=RecordType.A, data='0.0.0.1',
extra={'entry': 1})
self.assertEqual(record.id, '1')
self.assertEqual(record.name, 'domain1')
self.assertNotEqual(record.zone.extra.get('S1'), zone.extra.get('S1'))
self.assertNotEqual(record.zone.extra.get('D1'), zone.extra.get('D1'))
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, '0.0.0.1')
def test_delete_zone_success(self):
zone = self.driver.list_zones()[0]
status = self.driver.delete_zone(zone=zone)
self.assertTrue(status)
def test_delete_zone_does_not_exist(self):
zone = self.driver.list_zones()[0]
WorldWideDNSMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.delete_zone(zone=zone)
except NonExistentDomain as e:
self.assertEqual(e.code, 405)
else:
self.fail('Exception was not thrown')
def test_delete_record_success(self):
zone = self.driver.list_zones()[0]
records = self.driver.list_records(zone=zone)
self.assertEqual(len(records), 3)
record = records[1]
WorldWideDNSMockHttp.type = 'DELETE_RECORD'
status = self.driver.delete_record(record=record)
self.assertTrue(status)
zone = self.driver.list_zones()[0]
records = self.driver.list_records(zone=zone)
self.assertEqual(len(records), 2)
class WorldWideDNSMockHttp(MockHttp):
fixtures = DNSFileFixtures('worldwidedns')
def _api_dns_list_asp(self, method, url, body, headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_domain_asp(self, method, url, body, headers):
body = self.fixtures.load('api_dns_list_domain_asp')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_asp_ZONE_DOES_NOT_EXIST(self, method, url, body,
headers):
return (httplib.OK, '405', {}, httplib.responses[httplib.OK])
def _api_dns_list_asp_GET_ZONE_DOES_NOT_EXIST(self, method, url, body,
headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _api_dns_new_domain_asp(self, method, url, body, headers):
return (httplib.OK, '200', {}, httplib.responses[httplib.OK])
def _api_dns_new_domain_asp_VALIDATION_ERROR(self, method, url, body,
headers):
return (httplib.OK, '410', {}, httplib.responses[httplib.OK])
def _api_dns_modify_asp(self, method, url, body, headers):
return (httplib.OK, '211\r\n212\r\n213', {},
httplib.responses[httplib.OK])
def _api_dns_list_asp_UPDATE_ZONE(self, method, url, body, headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_modify_asp_UPDATE_ZONE(self, method, url, body, headers):
return (httplib.OK, '211\r\n212\r\n213', {},
httplib.responses[httplib.OK])
def _api_dns_list_domain_asp_UPDATE_ZONE(self, method, url, body,
headers):
body = self.fixtures.load('api_dns_list_domain_asp_UPDATE_ZONE')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_asp_CREATE_RECORD(self, method, url, body, headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_asp_CREATE_SECOND_RECORD(self, method, url, body,
headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_modify_asp_CREATE_RECORD(self, method, url, body, headers):
return (httplib.OK, '211\r\n212\r\n213', {},
httplib.responses[httplib.OK])
def _api_dns_modify_asp_CREATE_SECOND_RECORD(self, method, url, body,
headers):
return (httplib.OK, '211\r\n212\r\n213', {},
httplib.responses[httplib.OK])
def _api_dns_list_domain_asp_CREATE_RECORD(self, method, url, body,
headers):
body = self.fixtures.load('api_dns_list_domain_asp_CREATE_RECORD')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_domain_asp_CREATE_SECOND_RECORD(self, method, url, body,
headers):
body = self.fixtures.load(
'api_dns_list_domain_asp_CREATE_SECOND_RECORD')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_domain_asp_CREATE_RECORD_MAX_ENTRIES(self, method, url,
body, headers):
body = self.fixtures.load(
'api_dns_list_domain_asp_CREATE_RECORD_MAX_ENTRIES')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_modify_asp_CREATE_RECORD_MAX_ENTRIES(self, method, url, body,
headers):
return (httplib.OK, '211\r\n212\r\n213', {},
httplib.responses[httplib.OK])
def _api_dns_list_asp_CREATE_RECORD_MAX_ENTRIES(self, method, url, body,
headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_domain_asp_CREATE_RECORD_MAX_ENTRIES_WITH_ENTRY(self,
method,
url,
body,
headers):
body = self.fixtures.load(
'_api_dns_modify_asp_CREATE_RECORD_MAX_ENTRIES_WITH_ENTRY')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_modify_asp_CREATE_RECORD_MAX_ENTRIES_WITH_ENTRY(self, method,
url, body,
headers):
return (httplib.OK, '211\r\n212\r\n213', {},
httplib.responses[httplib.OK])
def _api_dns_list_asp_CREATE_RECORD_MAX_ENTRIES_WITH_ENTRY(self, method,
url, body,
headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_list_asp_UPDATE_RECORD(self, method, url, body, headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_modify_asp_UPDATE_RECORD(self, method, url, body, headers):
return (httplib.OK, '211\r\n212\r\n213', {},
httplib.responses[httplib.OK])
def _api_dns_list_domain_asp_UPDATE_RECORD(self, method, url, body,
headers):
body = self.fixtures.load('api_dns_list_domain_asp_UPDATE_RECORD')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_delete_domain_asp(self, method, url, body, headers):
return (httplib.OK, '200', {}, httplib.responses[httplib.OK])
def _api_dns_delete_domain_asp_ZONE_DOES_NOT_EXIST(self, method, url, body,
headers):
return (httplib.OK, '405', {}, httplib.responses[httplib.OK])
def _api_dns_list_asp_DELETE_RECORD(self, method, url, body, headers):
body = self.fixtures.load('api_dns_list')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _api_dns_modify_asp_DELETE_RECORD(self, method, url, body, headers):
return (httplib.OK, '200', {}, httplib.responses[httplib.OK])
def _api_dns_list_domain_asp_DELETE_RECORD(self, method, url, body,
headers):
body = self.fixtures.load('api_dns_list_domain_asp_DELETE_RECORD')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
| apache-2.0 |
Elico-Corp/odoo_OCB | openerp/addons/base/tests/test_res_lang.py | 40 | 2036 | import unittest
import openerp.tests.common as common
class test_res_lang(common.TransactionCase):
def test_00_intersperse(self):
from openerp.addons.base.res.res_lang import intersperse
assert intersperse("", []) == ("", 0)
assert intersperse("0", []) == ("0", 0)
assert intersperse("012", []) == ("012", 0)
assert intersperse("1", []) == ("1", 0)
assert intersperse("12", []) == ("12", 0)
assert intersperse("123", []) == ("123", 0)
assert intersperse("1234", []) == ("1234", 0)
assert intersperse("123456789", []) == ("123456789", 0)
assert intersperse("&ab%#@1", []) == ("&ab%#@1", 0)
assert intersperse("0", []) == ("0", 0)
assert intersperse("0", [1]) == ("0", 0)
assert intersperse("0", [2]) == ("0", 0)
assert intersperse("0", [200]) == ("0", 0)
assert intersperse("12345678", [1], '.') == ('1234567.8', 1)
assert intersperse("12345678", [1], '.') == ('1234567.8', 1)
assert intersperse("12345678", [2], '.') == ('123456.78', 1)
assert intersperse("12345678", [2,1], '.') == ('12345.6.78', 2)
assert intersperse("12345678", [2,0], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [-1,2], '.') == ('12345678', 0)
assert intersperse("12345678", [2,-1], '.') == ('123456.78', 1)
assert intersperse("12345678", [2,0,1], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [2,0,0], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [2,0,-1], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [3,3,3,3], '.') == ('12.345.678', 2)
assert intersperse("abc1234567xy", [2], '.') == ('abc1234567.xy', 1)
assert intersperse("abc1234567xy8", [2], '.') == ('abc1234567x.y8', 1) # ... w.r.t. here.
assert intersperse("abc12", [3], '.') == ('abc12', 0)
assert intersperse("abc12", [2], '.') == ('abc12', 0)
assert intersperse("abc12", [1], '.') == ('abc1.2', 1)
| agpl-3.0 |
hmrc/wristband | config/settings/common.py | 1 | 9722 | # -*- coding: utf-8 -*-
"""
Django settings for wristband project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import sys
import environ
import mongoengine
ROOT_DIR = environ.Path(__file__) - 3 # (/a/b/myfile.py - 3 = /)
APPS_DIR = ROOT_DIR.path('wristband')
env = environ.Env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
)
THIRD_PARTY_APPS = (
'rest_framework',
'rest_framework_swagger',
'mongoengine.django.mongo_auth'
)
# Apps specific for this project go here.
LOCAL_APPS = (
'wristband.apps',
'wristband.stages',
'wristband.providers',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'wristband.authentication.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'wristband.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool("DJANGO_DEBUG", False)
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.dummy'
}
}
# MONGO
# -----------------------------------------------------------------------------
MONGO_DB_NAME = env('MONGO_DB_NAME', default='wristband')
MONGO_USER = env('MONGODB_USER', default='')
MONGO_PASSWORD = env('MONGODB_PASSWORD', default='')
MONGO_HOST = env('MONGODB_HOST', default='localhost')
MONGO_PORT = env('MONGODB_PORT', default='27017')
MONGO_CREDENTIALS = ''
if MONGO_USER and MONGO_PASSWORD:
MONGO_CREDENTIALS = '{username}:{password}@'.format(username=MONGO_USER,
password=MONGO_PASSWORD)
MONGO_URI = env('MONGO_URI', default='mongodb://{credentials}{host}:{port}/{db_name}'.format(
credentials=MONGO_CREDENTIALS,
host=MONGO_HOST,
db_name=MONGO_DB_NAME,
port=MONGO_PORT
))
mongoengine.connect(MONGO_DB_NAME, host=MONGO_URI)
# SESSION
# ------------------------------------------------------------------------------
SESSION_ENGINE = 'mongoengine.django.sessions'
SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer'
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-gb'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# LOGGING CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOG_LEVEL = env('DJANGO_LOG_LEVEL', default='DEBUG')
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'logstash': {
'()': 'logstash_formatter.LogstashFormatter',
'format': '{"extra":{"app": "wristband"}}'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'logstash',
'stream': sys.stdout
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': True
},
'django.security': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': True,
},
'django': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': True,
},
'wristband.authentication': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': True
},
'wristband.provider': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': True
},
'wristband.apps.providers': {
'handlers': ['console'],
'level': LOG_LEVEL,
'propagate': True
}
}
}
# AUTHENTICATION
# -----------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'wristband.authentication.backends.MongoLDAPBackend',
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
# REST FRAMEWORK SETTINGS
# ------------------------------------------------------------------------------
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'wristband.authentication.backends.CustomTokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
)
}
# APP SPECIFIC SETTINGS
# -----------------------------------------------------------------------------
STAGES = env('STAGES', default='qa,staging')
PROVIDER_CONFIG = env('PROVIDER_CONFIG', default='providers.yaml')
WEBSTORE_URL = env('WEBSTORE_URL')
KIBANA_URL = env('KIBANA_URL')
| apache-2.0 |
matthappens/taskqueue | taskqueue/venv_tq/lib/python2.7/site-packages/Crypto/SelfTest/Util/test_asn1.py | 113 | 10239 | # -*- coding: utf-8 -*-
#
# SelfTest/Util/test_asn.py: Self-test for the Crypto.Util.asn1 module
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-tests for Crypto.Util.asn1"""
__revision__ = "$Id$"
import unittest
import sys
from Crypto.Util.py3compat import *
from Crypto.Util.asn1 import DerSequence, DerObject
class DerObjectTests(unittest.TestCase):
def testObjEncode1(self):
# No payload
der = DerObject(b('\x33'))
self.assertEquals(der.encode(), b('\x33\x00'))
# Small payload
der.payload = b('\x45')
self.assertEquals(der.encode(), b('\x33\x01\x45'))
# Invariant
self.assertEquals(der.encode(), b('\x33\x01\x45'))
# Initialize with numerical tag
der = DerObject(b(0x33))
der.payload = b('\x45')
self.assertEquals(der.encode(), b('\x33\x01\x45'))
def testObjEncode2(self):
# Known types
der = DerObject('SEQUENCE')
self.assertEquals(der.encode(), b('\x30\x00'))
der = DerObject('BIT STRING')
self.assertEquals(der.encode(), b('\x03\x00'))
def testObjEncode3(self):
# Long payload
der = DerObject(b('\x34'))
der.payload = b("0")*128
self.assertEquals(der.encode(), b('\x34\x81\x80' + "0"*128))
def testObjDecode1(self):
# Decode short payload
der = DerObject()
der.decode(b('\x20\x02\x01\x02'))
self.assertEquals(der.payload, b("\x01\x02"))
self.assertEquals(der.typeTag, 0x20)
def testObjDecode2(self):
# Decode short payload
der = DerObject()
der.decode(b('\x22\x81\x80' + "1"*128))
self.assertEquals(der.payload, b("1")*128)
self.assertEquals(der.typeTag, 0x22)
class DerSequenceTests(unittest.TestCase):
def testEncode1(self):
# Empty sequence
der = DerSequence()
self.assertEquals(der.encode(), b('0\x00'))
self.failIf(der.hasOnlyInts())
# One single-byte integer (zero)
der.append(0)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x00'))
self.failUnless(der.hasOnlyInts())
# Invariant
self.assertEquals(der.encode(), b('0\x03\x02\x01\x00'))
def testEncode2(self):
# One single-byte integer (non-zero)
der = DerSequence()
der.append(127)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x7f'))
# Indexing
der[0] = 1
self.assertEquals(len(der),1)
self.assertEquals(der[0],1)
self.assertEquals(der[-1],1)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x01'))
#
der[:] = [1]
self.assertEquals(len(der),1)
self.assertEquals(der[0],1)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x01'))
def testEncode3(self):
# One multi-byte integer (non-zero)
der = DerSequence()
der.append(0x180L)
self.assertEquals(der.encode(), b('0\x04\x02\x02\x01\x80'))
def testEncode4(self):
# One very long integer
der = DerSequence()
der.append(2**2048)
self.assertEquals(der.encode(), b('0\x82\x01\x05')+
b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00'))
def testEncode5(self):
# One single-byte integer (looks negative)
der = DerSequence()
der.append(0xFFL)
self.assertEquals(der.encode(), b('0\x04\x02\x02\x00\xff'))
def testEncode6(self):
# Two integers
der = DerSequence()
der.append(0x180L)
der.append(0xFFL)
self.assertEquals(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff'))
self.failUnless(der.hasOnlyInts())
#
der.append(0x01)
der[1:] = [9,8]
self.assertEquals(len(der),3)
self.assertEqual(der[1:],[9,8])
self.assertEqual(der[1:-1],[9])
self.assertEquals(der.encode(), b('0\x0A\x02\x02\x01\x80\x02\x01\x09\x02\x01\x08'))
def testEncode6(self):
# One integer and another type (no matter what it is)
der = DerSequence()
der.append(0x180L)
der.append(b('\x00\x02\x00\x00'))
self.assertEquals(der.encode(), b('0\x08\x02\x02\x01\x80\x00\x02\x00\x00'))
self.failIf(der.hasOnlyInts())
####
def testDecode1(self):
# Empty sequence
der = DerSequence()
der.decode(b('0\x00'))
self.assertEquals(len(der),0)
# One single-byte integer (zero)
der.decode(b('0\x03\x02\x01\x00'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0)
# Invariant
der.decode(b('0\x03\x02\x01\x00'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0)
def testDecode2(self):
# One single-byte integer (non-zero)
der = DerSequence()
der.decode(b('0\x03\x02\x01\x7f'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],127)
def testDecode3(self):
# One multi-byte integer (non-zero)
der = DerSequence()
der.decode(b('0\x04\x02\x02\x01\x80'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0x180L)
def testDecode4(self):
# One very long integer
der = DerSequence()
der.decode(b('0\x82\x01\x05')+
b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],2**2048)
def testDecode5(self):
# One single-byte integer (looks negative)
der = DerSequence()
der.decode(b('0\x04\x02\x02\x00\xff'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0xFFL)
def testDecode6(self):
# Two integers
der = DerSequence()
der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff'))
self.assertEquals(len(der),2)
self.assertEquals(der[0],0x180L)
self.assertEquals(der[1],0xFFL)
def testDecode7(self):
# One integer and 2 other types
der = DerSequence()
der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00'))
self.assertEquals(len(der),3)
self.assertEquals(der[0],0x180L)
self.assertEquals(der[1],b('\x24\x02\xb6\x63'))
self.assertEquals(der[2],b('\x12\x00'))
def testDecode8(self):
# Only 2 other types
der = DerSequence()
der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00'))
self.assertEquals(len(der),2)
self.assertEquals(der[0],b('\x24\x02\xb6\x63'))
self.assertEquals(der[1],b('\x12\x00'))
def testErrDecode1(self):
# Not a sequence
der = DerSequence()
self.assertRaises(ValueError, der.decode, b(''))
self.assertRaises(ValueError, der.decode, b('\x00'))
self.assertRaises(ValueError, der.decode, b('\x30'))
def testErrDecode2(self):
# Wrong payload type
der = DerSequence()
self.assertRaises(ValueError, der.decode, b('\x30\x00\x00'), True)
def testErrDecode3(self):
# Wrong length format
der = DerSequence()
self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00'))
self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01'))
self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01'))
def testErrDecode4(self):
# Wrong integer format
der = DerSequence()
# Multi-byte encoding for zero
#self.assertRaises(ValueError, der.decode, '\x30\x04\x02\x02\x00\x00')
# Negative integer
self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\xFF'))
def get_tests(config={}):
from Crypto.SelfTest.st_common import list_test_cases
listTests = []
listTests += list_test_cases(DerObjectTests)
listTests += list_test_cases(DerSequenceTests)
return listTests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit |
jab/lektor | lektor/publisher.py | 1 | 1696 | import subprocess
from werkzeug import urls
from lektor.utils import portable_popen
class Publisher(object):
def __init__(self, env, output_path):
self.env = env
self.output_path = output_path
def publish(self, target_url):
raise NotImplementedError()
class ExternalPublisher(Publisher):
def get_command_line(self, target_url):
raise NotImplementedError()
def publish(self, target_url):
argline = self.get_command_line(target_url)
cmd = portable_popen(argline, stdout=subprocess.PIPE)
try:
while 1:
line = cmd.stdout.readline()
if not line:
break
yield line.rstrip().decode('utf-8', 'replace')
finally:
cmd.wait()
class RsyncPublisher(ExternalPublisher):
def get_command_line(self, target_url):
argline = ['rsync', '-azv']
target = []
if target_url.port is not None:
argline.append('-e')
argline.append('ssh -p ' + str(target_url.port))
if target_url.username is not None:
target.append(target_url.username.encode('utf-8') + '@')
target.append(target_url.ascii_host)
target.append(':' + target_url.path.encode('utf-8').rstrip('/') + '/')
argline.append(self.output_path.rstrip('/\\') + '/')
argline.append(''.join(target))
return argline
publishers = {
'rsync': RsyncPublisher,
}
def publish(env, target, output_path):
url = urls.url_parse(unicode(target))
publisher = publishers.get(url.scheme)
if publisher is not None:
return publisher(env, output_path).publish(url)
| bsd-3-clause |
vrv/tensorflow | tensorflow/contrib/linalg/python/kernel_tests/linear_operator_full_matrix_test.py | 12 | 6651 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import linalg as linalg_lib
from tensorflow.contrib.linalg.python.ops import linear_operator_test_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
linalg = linalg_lib
random_seed.set_random_seed(23)
class SquareLinearOperatorFullMatrixTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def _operator_and_mat_and_feed_dict(self, shape, dtype, use_placeholder):
shape = list(shape)
matrix = linear_operator_test_util.random_positive_definite_matrix(shape,
dtype)
if use_placeholder:
matrix_ph = array_ops.placeholder(dtype=dtype)
# Evaluate here because (i) you cannot feed a tensor, and (ii)
# values are random and we want the same value used for both mat and
# feed_dict.
matrix = matrix.eval()
operator = linalg.LinearOperatorFullMatrix(matrix_ph)
feed_dict = {matrix_ph: matrix}
else:
operator = linalg.LinearOperatorFullMatrix(matrix)
feed_dict = None
# Convert back to Tensor. Needed if use_placeholder, since then we have
# already evaluated matrix to a numpy array.
mat = ops.convert_to_tensor(matrix)
return operator, mat, feed_dict
def test_is_x_flags(self):
# Matrix with two positive eigenvalues.
matrix = [[1., 0.], [1., 11.]]
operator = linalg.LinearOperatorFullMatrix(
matrix,
is_positive_definite=True,
is_non_singular=True,
is_self_adjoint=False)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertFalse(operator.is_self_adjoint)
class SquareLinearOperatorFullMatrixSymmetricPositiveDefiniteTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest.
In this test, the operator is constructed with hints that invoke the use of
a Cholesky decomposition for solves/determinant.
"""
def setUp(self):
# Increase from 1e-6 to 1e-5. This reduction in tolerance happens,
# presumably, because we are taking a different code path in the operator
# and the matrix. The operator uses a Choleksy, the matrix uses standard
# solve.
self._atol[dtypes.float32] = 1e-5
self._rtol[dtypes.float32] = 1e-5
self._atol[dtypes.float64] = 1e-10
self._rtol[dtypes.float64] = 1e-10
@property
def _dtypes_to_test(self):
return [dtypes.float32, dtypes.float64]
def _operator_and_mat_and_feed_dict(self, shape, dtype, use_placeholder):
shape = list(shape)
matrix = linear_operator_test_util.random_positive_definite_matrix(
shape, dtype, force_well_conditioned=True)
if use_placeholder:
matrix_ph = array_ops.placeholder(dtype=dtype)
# Evaluate here because (i) you cannot feed a tensor, and (ii)
# values are random and we want the same value used for both mat and
# feed_dict.
matrix = matrix.eval()
operator = linalg.LinearOperatorFullMatrix(
matrix_ph, is_self_adjoint=True, is_positive_definite=True)
feed_dict = {matrix_ph: matrix}
else:
operator = linalg.LinearOperatorFullMatrix(
matrix, is_self_adjoint=True, is_positive_definite=True)
feed_dict = None
# Convert back to Tensor. Needed if use_placeholder, since then we have
# already evaluated matrix to a numpy array.
mat = ops.convert_to_tensor(matrix)
return operator, mat, feed_dict
def test_is_x_flags(self):
# Matrix with two positive eigenvalues.
matrix = [[1., 0.], [0., 7.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_positive_definite=True, is_self_adjoint=True)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_self_adjoint)
# Should be auto-set
self.assertTrue(operator.is_non_singular)
self.assertTrue(operator._is_spd)
class NonSquareLinearOperatorFullMatrixTest(
linear_operator_test_util.NonSquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def _operator_and_mat_and_feed_dict(self, shape, dtype, use_placeholder):
matrix = linear_operator_test_util.random_normal(shape, dtype=dtype)
if use_placeholder:
matrix_ph = array_ops.placeholder(dtype=dtype)
# Evaluate here because (i) you cannot feed a tensor, and (ii)
# values are random and we want the same value used for both mat and
# feed_dict.
matrix = matrix.eval()
operator = linalg.LinearOperatorFullMatrix(matrix_ph)
feed_dict = {matrix_ph: matrix}
else:
operator = linalg.LinearOperatorFullMatrix(matrix)
feed_dict = None
# Convert back to Tensor. Needed if use_placeholder, since then we have
# already evaluated matrix to a numpy array.
mat = ops.convert_to_tensor(matrix)
return operator, mat, feed_dict
def test_is_x_flags(self):
# Matrix with two positive eigenvalues.
matrix = [[3., 0.], [1., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix,
is_positive_definite=True,
is_non_singular=True,
is_self_adjoint=False)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertFalse(operator.is_self_adjoint)
def test_matrix_must_have_at_least_two_dims_or_raises(self):
with self.assertRaisesRegexp(ValueError, "at least 2 dimensions"):
linalg.LinearOperatorFullMatrix([1.])
if __name__ == "__main__":
test.main()
| apache-2.0 |
jshufelt/volatility | volatility/plugins/gui/vtypes/vista.py | 58 | 4880 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
# Copyright (C) 2010,2011,2012 Michael Hale Ligh <michael.ligh@mnin.org>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.obj as obj
import volatility.plugins.gui.vtypes.win7_sp0_x64_vtypes_gui as win7_sp0_x64_vtypes_gui
import volatility.plugins.gui.constants as consts
class Vista2008x64GuiVTypes(obj.ProfileModification):
before = ["XP2003x64BaseVTypes", "Win32Kx64VTypes"]
conditions = {'os': lambda x: x == 'windows',
'memory_model': lambda x: x == '64bit',
'major': lambda x: x == 6,
'minor': lambda x: x == 0}
def modification(self, profile):
# Enough stayed the same between Vista/2008 and Windows 7,
## so we can re-use the Windows 7 types. This is a bit unconventional
## because we typically when we re-use, we do it forward (i.e. use
## an older OS's types for a newer OS). However since the win32k.sys
## vtypes were never public until Windows 7, we're re-using backward.
profile.vtypes.update(win7_sp0_x64_vtypes_gui.win32k_types)
# We don't want to overlay or HeEntrySize from Win7 will
# appear to be a valid member of the Vista structure.
profile.vtypes.update({
'tagSHAREDINFO' : [ 0x238, {
'psi' : [ 0x0, ['pointer64', ['tagSERVERINFO']]],
'aheList' : [ 0x8, ['pointer64', ['_HANDLEENTRY']]],
'ulSharedDelta' : [ 0x18, ['unsigned long long']],
}],
})
profile.merge_overlay({
# From Win7SP0x64
'tagDESKTOP' : [ None, {
'pheapDesktop' : [ 0x78, ['pointer64', ['tagWIN32HEAP']]],
'ulHeapSize' : [ 0x80, ['unsigned long']],
}],
'tagTHREADINFO' : [ None, {
'ppi' : [ 0x68, ['pointer64', ['tagPROCESSINFO']]],
'PtiLink' : [ 0x160, ['_LIST_ENTRY']],
}],
'tagHOOK': [ None, {
'flags': [ None, ['Flags', {'bitmap': consts.HOOK_FLAGS}]]
}],
'_HANDLEENTRY': [ None, {
'bType': [ None, ['Enumeration', dict(target = 'unsigned char', choices = consts.HANDLE_TYPE_ENUM)]],
}],
'tagWINDOWSTATION' : [ None, {
'pClipBase' : [ None, ['pointer', ['array', lambda x : x.cNumClipFormats, ['tagCLIP']]]],
}],
'tagCLIP': [ None, {
'fmt' : [ 0x0, ['Enumeration', dict(target = 'unsigned long', choices = consts.CLIPBOARD_FORMAT_ENUM)]],
}],
})
class Vista2008x86GuiVTypes(obj.ProfileModification):
before = ["XP2003x86BaseVTypes", "Win32Kx86VTypes"]
conditions = {'os': lambda x: x == 'windows',
'memory_model': lambda x: x == '32bit',
'major': lambda x: x == 6,
'minor': lambda x: x == 0}
def modification(self, profile):
profile.merge_overlay({
# The size is very important since we carve from bottom up
'tagWINDOWSTATION' : [ 0x54, {
'pClipBase' : [ None, ['pointer', ['array', lambda x : x.cNumClipFormats, ['tagCLIP']]]],
}],
'tagDESKTOP' : [ None, {
'PtiList' : [ 0x64, ['_LIST_ENTRY']],
'hsectionDesktop' : [ 0x3c, ['pointer', ['void']]],
'pheapDesktop' : [ 0x40, ['pointer', ['tagWIN32HEAP']]],
'ulHeapSize' : [ 0x44, ['unsigned long']],
}],
'tagTHREADINFO' : [ None, { # same as win2003x86
'PtiLink' : [ 0xB0, ['_LIST_ENTRY']],
'fsHooks' : [ 0x9C, ['unsigned long']],
'aphkStart' : [ 0xF8, ['array', 16, ['pointer', ['tagHOOK']]]],
}],
'tagSERVERINFO' : [ None, {
'cHandleEntries' : [ 0x4, ['unsigned long']],
'cbHandleTable' : [ 0x1c8, ['unsigned long']],
}],
'tagSHAREDINFO' : [ 0x11c, { # From Win7SP0x86
'psi' : [ 0x0, ['pointer', ['tagSERVERINFO']]],
'aheList' : [ 0x4, ['pointer', ['_HANDLEENTRY']]],
'ulSharedDelta' : [ 0xC, ['unsigned long']],
}],
'tagCLIP' : [ 16, { # just a size change
}]})
| gpl-2.0 |
wwj718/murp-edx | common/test/acceptance/pages/lms/open_response.py | 165 | 4579 | """
Open-ended response in the courseware.
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
from .rubric import RubricPage
class OpenResponsePage(PageObject):
"""
Open-ended response in the courseware.
"""
url = None
def is_browser_on_page(self):
return self.q(css='div.xmodule_CombinedOpenEndedModule').present
@property
def assessment_type(self):
"""
Return the type of assessment currently active.
Options are "self", "ai", or "peer"
"""
labels = self.q(css='section#combined-open-ended-status>div.statusitem-current').text
if len(labels) < 1:
self.warning("Could not find assessment type label")
# Provide some tolerance to UI changes
label_compare = labels[0].lower().strip()
if 'self' in label_compare:
return 'self'
elif 'ai' in label_compare:
return 'ai'
elif 'peer' in label_compare:
return 'peer'
else:
raise ValueError("Unexpected assessment type: '{0}'".format(label_compare))
@property
def prompt(self):
"""
Return an HTML string representing the essay prompt.
"""
prompt_css = "section.open-ended-child>div.prompt"
prompts = self.q(css=prompt_css).map(lambda el: el.get_attribute('innerHTML').strip()).results
if len(prompts) == 0:
self.warning("Could not find essay prompt on page.")
return ""
elif len(prompts) > 1:
self.warning("Multiple essay prompts found on page; using the first one.")
return prompts[0]
@property
def rubric(self):
"""
Return a `RubricPage` for a self-assessment problem.
If no rubric is available, raises a `BrokenPromise` exception.
"""
rubric = RubricPage(self.browser)
rubric.wait_for_page()
return rubric
@property
def written_feedback(self):
"""
Return the written feedback from the grader (if any).
If no feedback available, returns None.
"""
feedback = self.q(css='div.written-feedback').text
if len(feedback) > 0:
return feedback[0]
else:
return None
@property
def alert_message(self):
"""
Alert message displayed to the user.
"""
alerts = self.q(css="div.open-ended-alert").text
if len(alerts) < 1:
return ""
else:
return alerts[0]
@property
def grader_status(self):
"""
Status message from the grader.
If not present, return an empty string.
"""
status_list = self.q(css='div.grader-status').text
if len(status_list) < 1:
self.warning("No grader status found")
return ""
elif len(status_list) > 1:
self.warning("Multiple grader statuses found; returning the first one")
return status_list[0]
def set_response(self, response_str):
"""
Input a response to the prompt.
"""
input_css = "textarea.short-form-response"
self.q(css=input_css).fill(response_str)
def save_response(self):
"""
Save the response for later submission.
"""
self.q(css='input.save-button').first.click()
EmptyPromise(
lambda: 'save' in self.alert_message.lower(),
"Status message saved"
).fulfill()
def submit_response(self):
"""
Submit a response for grading.
"""
self.q(css='input.submit-button').first.click()
# modal dialog confirmation
self.q(css='button.ok-button').first.click()
# Ensure that the submission completes
self._wait_for_submitted(self.assessment_type)
def _wait_for_submitted(self, assessment_type):
"""
Wait for the submission to complete.
`assessment_type` is either 'self', 'ai', or 'peer'
"""
if assessment_type == 'self':
RubricPage(self.browser).wait_for_page()
elif assessment_type == 'ai' or assessment_type == "peer":
EmptyPromise(
lambda: self.grader_status != 'Unanswered',
"Problem status is no longer 'unanswered'"
).fulfill()
else:
self.warning("Unrecognized assessment type '{0}'".format(assessment_type))
EmptyPromise(lambda: True, "Unrecognized assessment type").fulfill()
| agpl-3.0 |
DARKPOP/external_chromium_org | tools/telemetry/telemetry/core/platform/profiler/__init__.py | 94 | 2391 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
from telemetry.core import exceptions
class Profiler(object):
"""A sampling profiler provided by the platform.
A profiler is started on its constructor, and should
gather data until CollectProfile().
The life cycle is normally tied to a single page,
i.e., multiple profilers will be created for a page set.
WillCloseBrowser() is called right before the browser
is closed to allow any further cleanup.
"""
def __init__(self, browser_backend, platform_backend, output_path, state):
self._browser_backend = browser_backend
self._platform_backend = platform_backend
self._output_path = output_path
self._state = state
@classmethod
def name(cls):
"""User-friendly name of this profiler."""
raise NotImplementedError()
@classmethod
def is_supported(cls, browser_type):
"""True iff this profiler is currently supported by the platform."""
raise NotImplementedError()
@classmethod
def CustomizeBrowserOptions(cls, browser_type, options):
"""Override to customize the Browser's options before it is created."""
pass
@classmethod
def WillCloseBrowser(cls, browser_backend, platform_backend):
"""Called before the browser is stopped."""
pass
def _GetProcessOutputFileMap(self):
"""Returns a dict with pid: output_file."""
all_pids = ([self._browser_backend.pid] +
self._platform_backend.GetChildPids(self._browser_backend.pid))
process_name_counts = collections.defaultdict(int)
process_output_file_map = {}
for pid in all_pids:
try:
cmd_line = self._platform_backend.GetCommandLine(pid)
process_name = self._browser_backend.GetProcessName(cmd_line)
output_file = '%s.%s%s' % (self._output_path, process_name,
process_name_counts[process_name])
process_name_counts[process_name] += 1
process_output_file_map[pid] = output_file
except exceptions.ProcessGoneException:
# Ignore processes that disappeared since calling GetChildPids().
continue
return process_output_file_map
def CollectProfile(self):
"""Collect the profile from the profiler."""
raise NotImplementedError()
| bsd-3-clause |
looker/sentry | src/sentry/south_migrations/0345_add_citext.py | 3 | 120001 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from sentry.utils.db import is_postgres
class Migration(DataMigration):
def forwards(self, orm):
db.commit_transaction()
try:
self._forwards(orm)
except Exception:
db.start_transaction()
raise
db.start_transaction()
def _forwards(self, orm):
if is_postgres():
db.execute('CREATE EXTENSION IF NOT EXISTS citext')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.apiapplication': {
'Meta': {
'object_name': 'ApiApplication'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'client_id': (
'django.db.models.fields.CharField', [], {
'default': "'4fa8a4398f674b29976d6b1d8a9c11008e0c882196504f2cadf27683c8ad950c'",
'unique': 'True',
'max_length': '64'
}
),
'client_secret': (
'sentry.db.models.fields.encrypted.EncryptedTextField', [], {
'default': "'b9d415a393e546e3b6ee522b76a799dee90fbb1e28af4de3a7dc7a294a009ab6'"
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'homepage_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'default': "'Sharing Drum'",
'max_length': '64',
'blank': 'True'
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'privacy_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'terms_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.apiauthorization': {
'Meta': {
'unique_together': "(('user', 'application'),)",
'object_name': 'ApiAuthorization'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apigrant': {
'Meta': {
'object_name': 'ApiGrant'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']"
}
),
'code': (
'django.db.models.fields.CharField', [], {
'default': "'7bd8f5ffe37b4a5d869f82023ddbc02c'",
'max_length': '64',
'db_index': 'True'
}
),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 8, 22, 0, 0)',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'redirect_uri': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apikey': {
'Meta': {
'object_name': 'ApiKey'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32'
}),
'label': (
'django.db.models.fields.CharField', [], {
'default': "'Default'",
'max_length': '64',
'blank': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Organization']"
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.apitoken': {
'Meta': {
'object_name': 'ApiToken'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 9, 21, 0, 0)',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'refresh_token': (
'django.db.models.fields.CharField', [], {
'default': "'3d2f286fda4144fba0b5c2a3f6e5c91d19e875a2e67e476d9c134f9bb612bd37'",
'max_length': '64',
'unique': 'True',
'null': 'True'
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'token': (
'django.db.models.fields.CharField', [], {
'default': "'15231c62a9274cc981cd7ffb66a5a21cf144fef022284bb088b5d19e9487a6cf'",
'unique': 'True',
'max_length': '64'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.auditlogentry': {
'Meta': {
'object_name': 'AuditLogEntry'
},
'actor': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_actors'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'actor_key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True',
'blank': 'True'
}
),
'actor_label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'target_object':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'target_user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_targets'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.authenticator': {
'Meta': {
'unique_together': "(('user', 'type'),)",
'object_name': 'Authenticator',
'db_table': "'auth_authenticator'"
},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authidentity': {
'Meta': {
'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))",
'object_name': 'AuthIdentity'
},
'auth_provider': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.AuthProvider']"
}
),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_verified':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authprovider': {
'Meta': {
'object_name': 'AuthProvider'
},
'config':
('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_global_access':
('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'default_role':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50'
}),
'default_teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'unique': 'True'
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'sync_time':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_expires': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 8, 29, 0, 0)',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
}),
'title': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'upstream_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.broadcastseen': {
'Meta': {
'unique_together': "(('broadcast', 'user'),)",
'object_name': 'BroadcastSeen'
},
'broadcast': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Broadcast']"
}
),
'date_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.commit': {
'Meta': {
'unique_together': "(('repository_id', 'key'),)",
'object_name': 'Commit',
'index_together': "(('repository_id', 'date_added'),)"
},
'author': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.CommitAuthor']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'message': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {
'unique_together':
"(('organization_id', 'email'), ('organization_id', 'external_id'))",
'object_name':
'CommitAuthor'
},
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '164',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.commitfilechange': {
'Meta': {
'unique_together': "(('commit', 'filename'),)",
'object_name': 'CommitFileChange'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'filename': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '1'
})
},
'sentry.counter': {
'Meta': {
'object_name': 'Counter',
'db_table': "'sentry_projectcounter'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'unique': 'True'
}
),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deploy': {
'Meta': {
'object_name': 'Deploy'
},
'date_finished':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'notified': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'db_index': 'True',
'blank': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.distribution': {
'Meta': {
'unique_together': "(('release', 'name'),)",
'object_name': 'Distribution'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.dsymapp': {
'Meta': {
'unique_together': "(('project', 'platform', 'app_id'),)",
'object_name': 'DSymApp'
},
'app_id': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'sync_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
})
},
'sentry.environment': {
'Meta': {
'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))",
'object_name': 'Environment'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Project']",
'through': "orm['sentry.EnvironmentProject']",
'symmetrical': 'False'
}
)
},
'sentry.environmentproject': {
'Meta': {
'unique_together': "(('project', 'environment'),)",
'object_name': 'EnvironmentProject'
},
'environment': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Environment']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.event': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group_id', 'datetime'),)"
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'time_spent':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {
'unique_together': "(('raw_event', 'processing_issue'),)",
'object_name': 'EventProcessingIssue'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'processing_issue': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProcessingIssue']"
}
),
'raw_event': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.RawEvent']"
}
)
},
'sentry.eventtag': {
'Meta': {
'unique_together':
"(('event_id', 'key_id', 'value_id'),)",
'object_name':
'EventTag',
'index_together':
"(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {
'unique_together':
"(('project', 'ident'), ('project', 'hash'))",
'object_name':
'EventUser',
'index_together':
"(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'username':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
})
},
'sentry.featureadoption': {
'Meta': {
'unique_together': "(('organization', 'feature_id'),)",
'object_name': 'FeatureAdoption'
},
'applicable': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'complete': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'feature_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
)
},
'sentry.file': {
'Meta': {
'object_name': 'File'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'legacy_blob'",
'null': 'True',
'to': "orm['sentry.FileBlob']"
}
),
'blobs': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.FileBlob']",
'through': "orm['sentry.FileBlobIndex']",
'symmetrical': 'False'
}
),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'null': 'True'
}),
'headers': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.fileblob': {
'Meta': {
'object_name': 'FileBlob'
},
'checksum':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
)
},
'sentry.fileblobindex': {
'Meta': {
'unique_together': "(('file', 'blob', 'offset'),)",
'object_name': 'FileBlobIndex'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.FileBlob']"
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'short_id'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'",
'index_together': "(('project', 'first_release'),)"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']",
'null': 'True',
'on_delete': 'models.PROTECT'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'short_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'time_spent_total':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'times_seen': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupcommitresolution': {
'Meta': {
'unique_together': "(('group_id', 'commit_id'),)",
'object_name': 'GroupCommitResolution'
},
'commit_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
})
},
'sentry.groupemailthread': {
'Meta': {
'unique_together': "(('email', 'group'), ('email', 'msgid'))",
'object_name': 'GroupEmailThread'
},
'date': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'msgid': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Project']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'group_tombstone_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'state':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {
'object_name': 'GroupRedirect'
},
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'unique': 'True'
})
},
'sentry.grouprelease': {
'Meta': {
'unique_together': "(('group_id', 'release_id', 'environment'),)",
'object_name': 'GroupRelease'
},
'environment':
('django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64'
}),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.groupresolution': {
'Meta': {
'object_name': 'GroupResolution'
},
'actor_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'type':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_active': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'rule': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}
),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.groupsnooze': {
'Meta': {
'object_name': 'GroupSnooze'
},
'actor_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'state': ('jsonfield.fields.JSONField', [], {
'null': 'True'
}),
'until': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'user_count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'user_window':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'window':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.groupsubscription': {
'Meta': {
'unique_together': "(('group', 'user'),)",
'object_name': 'GroupSubscription'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Project']"
}
),
'reason':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('group_id', 'key', 'value'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'",
'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.grouptombstone': {
'Meta': {
'object_name': 'GroupTombstone'
},
'actor_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'unique': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.integration': {
'Meta': {
'unique_together': "(('provider', 'external_id'),)",
'object_name': 'Integration'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'default_auth_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'external_id': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organizations': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'integrations'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationIntegration']",
'to': "orm['sentry.Organization']"
}
),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'integrations'",
'symmetrical': 'False',
'through': "orm['sentry.ProjectIntegration']",
'to': "orm['sentry.Project']"
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationaccessrequest': {
'Meta': {
'unique_together': "(('team', 'member'),)",
'object_name': 'OrganizationAccessRequest'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'member': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationavatar': {
'Meta': {
'object_name': 'OrganizationAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.Organization']"
}
)
},
'sentry.organizationintegration': {
'Meta': {
'unique_together': "(('organization', 'integration'),)",
'object_name': 'OrganizationIntegration'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'integration': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Integration']"
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
)
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'), ('organization', 'email'))",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': (
'django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMemberTeam']",
'blank': 'True'
}
),
'token': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'unique': 'True',
'null': 'True',
'blank': 'True'
}
),
'type': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'sentry_orgmember_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.organizationmemberteam': {
'Meta': {
'unique_together': "(('team', 'organizationmember'),)",
'object_name': 'OrganizationMemberTeam',
'db_table': "'sentry_organizationmember_teams'"
},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'organizationmember': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationonboardingtask': {
'Meta': {
'unique_together': "(('organization', 'task'),)",
'object_name': 'OrganizationOnboardingTask'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.organizationoption': {
'Meta': {
'unique_together': "(('organization', 'key'),)",
'object_name': 'OrganizationOption',
'db_table': "'sentry_organizationoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {
'unique_together': "(('project', 'checksum', 'type'),)",
'object_name': 'ProcessingIssue'
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'db_index': 'True'
}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '30'
})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'), ('organization', 'slug'))",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'first_event': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'flags':
('django.db.models.fields.BigIntegerField', [], {
'default': '0',
'null': 'True'
}),
'forced_color': (
'django.db.models.fields.CharField', [], {
'max_length': '6',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.projectbookmark': {
'Meta': {
'unique_together': "(('project_id', 'user'),)",
'object_name': 'ProjectBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.projectdsymfile': {
'Meta': {
'unique_together': "(('project', 'uuid'),)",
'object_name': 'ProjectDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'uuid': ('django.db.models.fields.CharField', [], {
'max_length': '36'
})
},
'sentry.projectintegration': {
'Meta': {
'unique_together': "(('project', 'integration'),)",
'object_name': 'ProjectIntegration'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'integration': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Integration']"
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'rate_limit_count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'rate_limit_window':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {
'unique_together': "(('project_id', 'platform'),)",
'object_name': 'ProjectPlatform'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.rawevent': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'RawEvent'
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.release': {
'Meta': {
'unique_together': "(('organization', 'version'),)",
'object_name': 'Release'
},
'authors': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'commit_count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_released':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_commit_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'last_deploy_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'new_groups':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True',
'blank': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'releases'",
'symmetrical': 'False',
'through': "orm['sentry.ReleaseProject']",
'to': "orm['sentry.Project']"
}
),
'ref': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'total_deploys':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.releasecommit': {
'Meta': {
'unique_together': "(('release', 'commit'), ('release', 'order'))",
'object_name': 'ReleaseCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseenvironment': {
'Meta': {
'unique_together': "(('organization_id', 'release_id', 'environment_id'),)",
'object_name': 'ReleaseEnvironment',
'db_table': "'sentry_environmentrelease'"
},
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.releasefile': {
'Meta': {
'unique_together': "(('release', 'ident'),)",
'object_name': 'ReleaseFile'
},
'dist': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Distribution']",
'null': 'True'
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseheadcommit': {
'Meta': {
'unique_together': "(('repository_id', 'release'),)",
'object_name': 'ReleaseHeadCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.releaseproject': {
'Meta': {
'unique_together': "(('project', 'release'),)",
'object_name': 'ReleaseProject',
'db_table': "'sentry_release_project'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.repository': {
'Meta': {
'unique_together':
"(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))",
'object_name':
'Repository'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'integration_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'provider':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'url': ('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.reprocessingreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'ReprocessingReport'
},
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.savedsearch': {
'Meta': {
'unique_together': "(('project', 'name'),)",
'object_name': 'SavedSearch'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_default': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {
'unique_together': "(('project', 'user'),)",
'object_name': 'SavedSearchUserDefault',
'db_table': "'sentry_savedsearch_userdefault'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'savedsearch': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.SavedSearch']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.scheduleddeletion': {
'Meta': {
'unique_together': "(('app_label', 'model_name', 'object_id'),)",
'object_name': 'ScheduledDeletion'
},
'aborted': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'actor_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_scheduled': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 9, 21, 0, 0)'
}
),
'guid': (
'django.db.models.fields.CharField', [], {
'default': "'403e9e64b9b54d0094e066a091fc8cbb'",
'unique': 'True',
'max_length': '32'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'in_progress': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'model_name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.scheduledjob': {
'Meta': {
'object_name': 'ScheduledJob'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'payload': ('jsonfield.fields.JSONField', [], {
'default': '{}'
})
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'unique_together': "(('organization', 'slug'),)",
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_password_expired':
('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_active': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'db_column': "'first_name'",
'blank': 'True'
}
),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'session_nonce':
('django.db.models.fields.CharField', [], {
'max_length': '12',
'null': 'True'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useravatar': {
'Meta': {
'object_name': 'UserAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.useremail': {
'Meta': {
'unique_together': "(('user', 'email'),)",
'object_name': 'UserEmail'
},
'date_hash_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_verified': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'emails'",
'to': "orm['sentry.User']"
}
),
'validation_hash': (
'django.db.models.fields.CharField', [], {
'default': "u'9v567thLrN7no7fHOp5vMhDiAw7rEJQF'",
'max_length': '32'
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'UserReport',
'index_together': "(('project', 'event_id'), ('project', 'date_added'))"
},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'event_user_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.versiondsymfile': {
'Meta': {
'unique_together': "(('dsym_file', 'version', 'build'),)",
'object_name': 'VersionDSymFile'
},
'build':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'dsym_app': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymApp']"
}
),
'dsym_file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProjectDSymFile']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '32'
})
}
}
complete_apps = ['sentry']
symmetrical = True
| bsd-3-clause |
comiconomenclaturist/libretime | python_apps/media-monitor/mm2/mm2.py | 10 | 1061 | # -*- coding: utf-8 -*-
import sys
import os
from media.saas.launcher import setup_global, launch_instance, setup_logger
from media.monitor.config import MMConfig
def main(global_config, log_config):
""" function to run hosted install """
mm_config = MMConfig(global_config)
log = setup_logger( log_config, mm_config['media-monitor']['logpath'] )
setup_global(log)
launch_instance('hosted_install', '/', global_config)
__doc__ = """
Usage:
mm2.py --config=<path> --apiclient=<path> --log=<path>
Options:
-h --help Show this screen
--config=<path> path to mm2 config
--apiclient=<path> path to apiclient config
--log=<path> log config at <path>
"""
if __name__ == '__main__':
from docopt import docopt
args = docopt(__doc__,version="mm1.99")
for k in ['--apiclient','--config','--log']:
if not os.path.exists(args[k]):
print("'%s' must exist" % args[k])
sys.exit(0)
print("Running mm1.99")
main(args['--config'],args['--apiclient'],args['--log'])
| agpl-3.0 |
tjsavage/full_nonrel_starter | django/test/_doctest.py | 152 | 100621 | # This is a slightly modified version of the doctest.py that shipped with Python 2.4
# It incorporates changes that have been submitted to the Python ticket tracker
# as ticket #1521051. These changes allow for a DoctestRunner and Doctest base
# class to be specified when constructing a DoctestSuite.
# Module doctest.
# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
# Major enhancements and refactoring by:
# Jim Fulton
# Edward Loper
# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
r"""Module doctest -- a framework for running examples in docstrings.
In simplest use, end each module M to be tested with:
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
Then running the module as a script will cause the examples in the
docstrings to get executed and verified:
python M.py
This won't display anything unless an example fails, in which case the
failing example(s) and the cause(s) of the failure(s) are printed to stdout
(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
line of output is "Test failed.".
Run it with the -v switch instead:
python M.py -v
and a detailed report of all examples tried is printed to stdout, along
with assorted summaries at the end.
You can force verbose mode by passing "verbose=True" to testmod, or prohibit
it by passing "verbose=False". In either of those cases, sys.argv is not
examined by testmod.
There are a variety of other ways to run doctests, including integration
with the unittest framework, and support for running non-Python text
files containing doctests. There are also many ways to override parts
of doctest's default behaviors. See the Library Reference Manual for
details.
"""
__docformat__ = 'reStructuredText en'
__all__ = [
# 0, Option Flags
'register_optionflag',
'DONT_ACCEPT_TRUE_FOR_1',
'DONT_ACCEPT_BLANKLINE',
'NORMALIZE_WHITESPACE',
'ELLIPSIS',
'IGNORE_EXCEPTION_DETAIL',
'COMPARISON_FLAGS',
'REPORT_UDIFF',
'REPORT_CDIFF',
'REPORT_NDIFF',
'REPORT_ONLY_FIRST_FAILURE',
'REPORTING_FLAGS',
# 1. Utility Functions
'is_private',
# 2. Example & DocTest
'Example',
'DocTest',
# 3. Doctest Parser
'DocTestParser',
# 4. Doctest Finder
'DocTestFinder',
# 5. Doctest Runner
'DocTestRunner',
'OutputChecker',
'DocTestFailure',
'UnexpectedException',
'DebugRunner',
# 6. Test Functions
'testmod',
'testfile',
'run_docstring_examples',
# 7. Tester
'Tester',
# 8. Unittest Support
'DocTestSuite',
'DocFileSuite',
'set_unittest_reportflags',
# 9. Debugging Support
'script_from_examples',
'testsource',
'debug_src',
'debug',
]
import __future__
import sys, traceback, inspect, linecache, os, re
import unittest, difflib, pdb, tempfile
import warnings
from StringIO import StringIO
if sys.platform.startswith('java'):
# On Jython, isclass() reports some modules as classes. Patch it.
def patch_isclass(isclass):
def patched_isclass(obj):
return isclass(obj) and hasattr(obj, '__module__')
return patched_isclass
inspect.isclass = patch_isclass(inspect.isclass)
# Don't whine about the deprecated is_private function in this
# module's tests.
warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
__name__, 0)
# There are 4 basic classes:
# - Example: a <source, want> pair, plus an intra-docstring line number.
# - DocTest: a collection of examples, parsed from a docstring, plus
# info about where the docstring came from (name, filename, lineno).
# - DocTestFinder: extracts DocTests from a given object's docstring and
# its contained objects' docstrings.
# - DocTestRunner: runs DocTest cases, and accumulates statistics.
#
# So the basic picture is:
#
# list of:
# +------+ +---------+ +-------+
# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
# +------+ +---------+ +-------+
# | Example |
# | ... |
# | Example |
# +---------+
# Option constants.
OPTIONFLAGS_BY_NAME = {}
def register_optionflag(name):
flag = 1 << len(OPTIONFLAGS_BY_NAME)
OPTIONFLAGS_BY_NAME[name] = flag
return flag
DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
ELLIPSIS = register_optionflag('ELLIPSIS')
IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
DONT_ACCEPT_BLANKLINE |
NORMALIZE_WHITESPACE |
ELLIPSIS |
IGNORE_EXCEPTION_DETAIL)
REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
REPORTING_FLAGS = (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF |
REPORT_ONLY_FIRST_FAILURE)
# Special string markers for use in `want` strings:
BLANKLINE_MARKER = '<BLANKLINE>'
ELLIPSIS_MARKER = '...'
######################################################################
## Table of Contents
######################################################################
# 1. Utility Functions
# 2. Example & DocTest -- store test cases
# 3. DocTest Parser -- extracts examples from strings
# 4. DocTest Finder -- extracts test cases from objects
# 5. DocTest Runner -- runs test cases
# 6. Test Functions -- convenient wrappers for testing
# 7. Tester Class -- for backwards compatibility
# 8. Unittest Support
# 9. Debugging Support
# 10. Example Usage
######################################################################
## 1. Utility Functions
######################################################################
def is_private(prefix, base):
"""prefix, base -> true iff name prefix + "." + base is "private".
Prefix may be an empty string, and base does not contain a period.
Prefix is ignored (although functions you write conforming to this
protocol may make use of it).
Return true iff base begins with an (at least one) underscore, but
does not both begin and end with (at least) two underscores.
>>> is_private("a.b", "my_func")
False
>>> is_private("____", "_my_func")
True
>>> is_private("someclass", "__init__")
False
>>> is_private("sometypo", "__init_")
True
>>> is_private("x.y.z", "_")
True
>>> is_private("_x.y.z", "__")
False
>>> is_private("", "") # senseless but consistent
False
"""
warnings.warn("is_private is deprecated; it wasn't useful; "
"examine DocTestFinder.find() lists instead",
DeprecationWarning, stacklevel=2)
return base[:1] == "_" and not base[:2] == "__" == base[-2:]
def _extract_future_flags(globs):
"""
Return the compiler-flags associated with the future features that
have been imported into the given namespace (globs).
"""
flags = 0
for fname in __future__.all_feature_names:
feature = globs.get(fname, None)
if feature is getattr(__future__, fname):
flags |= feature.compiler_flag
return flags
def _normalize_module(module, depth=2):
"""
Return the module specified by `module`. In particular:
- If `module` is a module, then return module.
- If `module` is a string, then import and return the
module with that name.
- If `module` is None, then return the calling module.
The calling module is assumed to be the module of
the stack frame at the given depth in the call stack.
"""
if inspect.ismodule(module):
return module
elif isinstance(module, (str, unicode)):
return __import__(module, globals(), locals(), ["*"])
elif module is None:
return sys.modules[sys._getframe(depth).f_globals['__name__']]
else:
raise TypeError("Expected a module, string, or None")
def _indent(s, indent=4):
"""
Add the given number of space characters to the beginning every
non-blank line in `s`, and return the result.
"""
# This regexp matches the start of non-blank lines:
return re.sub('(?m)^(?!$)', indent*' ', s)
def _exception_traceback(exc_info):
"""
Return a string containing a traceback message for the given
exc_info tuple (as returned by sys.exc_info()).
"""
# Get a traceback message.
excout = StringIO()
exc_type, exc_val, exc_tb = exc_info
traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
return excout.getvalue()
# Override some StringIO methods.
class _SpoofOut(StringIO):
def getvalue(self):
result = StringIO.getvalue(self)
# If anything at all was written, make sure there's a trailing
# newline. There's no way for the expected output to indicate
# that a trailing newline is missing.
if result and not result.endswith("\n"):
result += "\n"
# Prevent softspace from screwing up the next test case, in
# case they used print with a trailing comma in an example.
if hasattr(self, "softspace"):
del self.softspace
return result
def truncate(self, size=None):
StringIO.truncate(self, size)
if hasattr(self, "softspace"):
del self.softspace
# Worst-case linear-time ellipsis matching.
def _ellipsis_match(want, got):
"""
Essentially the only subtle case:
>>> _ellipsis_match('aa...aa', 'aaa')
False
"""
if ELLIPSIS_MARKER not in want:
return want == got
# Find "the real" strings.
ws = want.split(ELLIPSIS_MARKER)
assert len(ws) >= 2
# Deal with exact matches possibly needed at one or both ends.
startpos, endpos = 0, len(got)
w = ws[0]
if w: # starts with exact match
if got.startswith(w):
startpos = len(w)
del ws[0]
else:
return False
w = ws[-1]
if w: # ends with exact match
if got.endswith(w):
endpos -= len(w)
del ws[-1]
else:
return False
if startpos > endpos:
# Exact end matches required more characters than we have, as in
# _ellipsis_match('aa...aa', 'aaa')
return False
# For the rest, we only need to find the leftmost non-overlapping
# match for each piece. If there's no overall match that way alone,
# there's no overall match period.
for w in ws:
# w may be '' at times, if there are consecutive ellipses, or
# due to an ellipsis at the start or end of `want`. That's OK.
# Search for an empty string succeeds, and doesn't change startpos.
startpos = got.find(w, startpos, endpos)
if startpos < 0:
return False
startpos += len(w)
return True
def _comment_line(line):
"Return a commented form of the given line"
line = line.rstrip()
if line:
return '# '+line
else:
return '#'
class _OutputRedirectingPdb(pdb.Pdb):
"""
A specialized version of the python debugger that redirects stdout
to a given stream when interacting with the user. Stdout is *not*
redirected when traced code is executed.
"""
def __init__(self, out):
self.__out = out
self.__debugger_used = False
pdb.Pdb.__init__(self)
def set_trace(self):
self.__debugger_used = True
pdb.Pdb.set_trace(self)
def set_continue(self):
# Calling set_continue unconditionally would break unit test coverage
# reporting, as Bdb.set_continue calls sys.settrace(None).
if self.__debugger_used:
pdb.Pdb.set_continue(self)
def trace_dispatch(self, *args):
# Redirect stdout to the given stream.
save_stdout = sys.stdout
sys.stdout = self.__out
# Call Pdb's trace dispatch method.
try:
return pdb.Pdb.trace_dispatch(self, *args)
finally:
sys.stdout = save_stdout
# [XX] Normalize with respect to os.path.pardir?
def _module_relative_path(module, path):
if not inspect.ismodule(module):
raise TypeError('Expected a module: %r' % module)
if path.startswith('/'):
raise ValueError('Module-relative files may not have absolute paths')
# Find the base directory for the path.
if hasattr(module, '__file__'):
# A normal module/package
basedir = os.path.split(module.__file__)[0]
elif module.__name__ == '__main__':
# An interactive session.
if len(sys.argv)>0 and sys.argv[0] != '':
basedir = os.path.split(sys.argv[0])[0]
else:
basedir = os.curdir
else:
# A module w/o __file__ (this includes builtins)
raise ValueError("Can't resolve paths relative to the module " +
module + " (it has no __file__)")
# Combine the base directory and the path.
return os.path.join(basedir, *(path.split('/')))
######################################################################
## 2. Example & DocTest
######################################################################
## - An "example" is a <source, want> pair, where "source" is a
## fragment of source code, and "want" is the expected output for
## "source." The Example class also includes information about
## where the example was extracted from.
##
## - A "doctest" is a collection of examples, typically extracted from
## a string (such as an object's docstring). The DocTest class also
## includes information about where the string was extracted from.
class Example:
"""
A single doctest example, consisting of source code and expected
output. `Example` defines the following attributes:
- source: A single Python statement, always ending with a newline.
The constructor adds a newline if needed.
- want: The expected output from running the source code (either
from stdout, or a traceback in case of exception). `want` ends
with a newline unless it's empty, in which case it's an empty
string. The constructor adds a newline if needed.
- exc_msg: The exception message generated by the example, if
the example is expected to generate an exception; or `None` if
it is not expected to generate an exception. This exception
message is compared against the return value of
`traceback.format_exception_only()`. `exc_msg` ends with a
newline unless it's `None`. The constructor adds a newline
if needed.
- lineno: The line number within the DocTest string containing
this Example where the Example begins. This line number is
zero-based, with respect to the beginning of the DocTest.
- indent: The example's indentation in the DocTest string.
I.e., the number of space characters that preceed the
example's first prompt.
- options: A dictionary mapping from option flags to True or
False, which is used to override default options for this
example. Any option flags not contained in this dictionary
are left at their default value (as specified by the
DocTestRunner's optionflags). By default, no options are set.
"""
def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
options=None):
# Normalize inputs.
if not source.endswith('\n'):
source += '\n'
if want and not want.endswith('\n'):
want += '\n'
if exc_msg is not None and not exc_msg.endswith('\n'):
exc_msg += '\n'
# Store properties.
self.source = source
self.want = want
self.lineno = lineno
self.indent = indent
if options is None: options = {}
self.options = options
self.exc_msg = exc_msg
class DocTest:
"""
A collection of doctest examples that should be run in a single
namespace. Each `DocTest` defines the following attributes:
- examples: the list of examples.
- globs: The namespace (aka globals) that the examples should
be run in.
- name: A name identifying the DocTest (typically, the name of
the object whose docstring this DocTest was extracted from).
- filename: The name of the file that this DocTest was extracted
from, or `None` if the filename is unknown.
- lineno: The line number within filename where this DocTest
begins, or `None` if the line number is unavailable. This
line number is zero-based, with respect to the beginning of
the file.
- docstring: The string that the examples were extracted from,
or `None` if the string is unavailable.
"""
def __init__(self, examples, globs, name, filename, lineno, docstring):
"""
Create a new DocTest containing the given examples. The
DocTest's globals are initialized with a copy of `globs`.
"""
assert not isinstance(examples, basestring), \
"DocTest no longer accepts str; use DocTestParser instead"
self.examples = examples
self.docstring = docstring
self.globs = globs.copy()
self.name = name
self.filename = filename
self.lineno = lineno
def __repr__(self):
if len(self.examples) == 0:
examples = 'no examples'
elif len(self.examples) == 1:
examples = '1 example'
else:
examples = '%d examples' % len(self.examples)
return ('<DocTest %s from %s:%s (%s)>' %
(self.name, self.filename, self.lineno, examples))
# This lets us sort tests by name:
def __cmp__(self, other):
if not isinstance(other, DocTest):
return -1
return cmp((self.name, self.filename, self.lineno, id(self)),
(other.name, other.filename, other.lineno, id(other)))
######################################################################
## 3. DocTestParser
######################################################################
class DocTestParser:
"""
A class used to parse strings containing doctest examples.
"""
# This regular expression is used to find doctest examples in a
# string. It defines three groups: `source` is the source code
# (including leading indentation and prompts); `indent` is the
# indentation of the first (PS1) line of the source code; and
# `want` is the expected output (including leading indentation).
_EXAMPLE_RE = re.compile(r'''
# Source consists of a PS1 line followed by zero or more PS2 lines.
(?P<source>
(?:^(?P<indent> [ ]*) >>> .*) # PS1 line
(?:\n [ ]* \.\.\. .*)*) # PS2 lines
\n?
# Want consists of any non-blank lines that do not start with PS1.
(?P<want> (?:(?![ ]*$) # Not a blank line
(?![ ]*>>>) # Not a line starting with PS1
.*$\n? # But any other line
)*)
''', re.MULTILINE | re.VERBOSE)
# A regular expression for handling `want` strings that contain
# expected exceptions. It divides `want` into three pieces:
# - the traceback header line (`hdr`)
# - the traceback stack (`stack`)
# - the exception message (`msg`), as generated by
# traceback.format_exception_only()
# `msg` may have multiple lines. We assume/require that the
# exception message is the first non-indented line starting with a word
# character following the traceback header line.
_EXCEPTION_RE = re.compile(r"""
# Grab the traceback header. Different versions of Python have
# said different things on the first traceback line.
^(?P<hdr> Traceback\ \(
(?: most\ recent\ call\ last
| innermost\ last
) \) :
)
\s* $ # toss trailing whitespace on the header.
(?P<stack> .*?) # don't blink: absorb stuff until...
^ (?P<msg> \w+ .*) # a line *starts* with alphanum.
""", re.VERBOSE | re.MULTILINE | re.DOTALL)
# A callable returning a true value iff its argument is a blank line
# or contains a single comment.
_IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
def parse(self, string, name='<string>'):
"""
Divide the given string into examples and intervening text,
and return them as a list of alternating Examples and strings.
Line numbers for the Examples are 0-based. The optional
argument `name` is a name identifying this string, and is only
used for error messages.
"""
string = string.expandtabs()
# If all lines begin with the same indentation, then strip it.
min_indent = self._min_indent(string)
if min_indent > 0:
string = '\n'.join([l[min_indent:] for l in string.split('\n')])
output = []
charno, lineno = 0, 0
# Find all doctest examples in the string:
for m in self._EXAMPLE_RE.finditer(string):
# Add the pre-example text to `output`.
output.append(string[charno:m.start()])
# Update lineno (lines before this example)
lineno += string.count('\n', charno, m.start())
# Extract info from the regexp match.
(source, options, want, exc_msg) = \
self._parse_example(m, name, lineno)
# Create an Example, and add it to the list.
if not self._IS_BLANK_OR_COMMENT(source):
output.append( Example(source, want, exc_msg,
lineno=lineno,
indent=min_indent+len(m.group('indent')),
options=options) )
# Update lineno (lines inside this example)
lineno += string.count('\n', m.start(), m.end())
# Update charno.
charno = m.end()
# Add any remaining post-example text to `output`.
output.append(string[charno:])
return output
def get_doctest(self, string, globs, name, filename, lineno):
"""
Extract all doctest examples from the given string, and
collect them into a `DocTest` object.
`globs`, `name`, `filename`, and `lineno` are attributes for
the new `DocTest` object. See the documentation for `DocTest`
for more information.
"""
return DocTest(self.get_examples(string, name), globs,
name, filename, lineno, string)
def get_examples(self, string, name='<string>'):
"""
Extract all doctest examples from the given string, and return
them as a list of `Example` objects. Line numbers are
0-based, because it's most common in doctests that nothing
interesting appears on the same line as opening triple-quote,
and so the first interesting line is called \"line 1\" then.
The optional argument `name` is a name identifying this
string, and is only used for error messages.
"""
return [x for x in self.parse(string, name)
if isinstance(x, Example)]
def _parse_example(self, m, name, lineno):
"""
Given a regular expression match from `_EXAMPLE_RE` (`m`),
return a pair `(source, want)`, where `source` is the matched
example's source code (with prompts and indentation stripped);
and `want` is the example's expected output (with indentation
stripped).
`name` is the string's name, and `lineno` is the line number
where the example starts; both are used for error messages.
"""
# Get the example's indentation level.
indent = len(m.group('indent'))
# Divide source into lines; check that they're properly
# indented; and then strip their indentation & prompts.
source_lines = m.group('source').split('\n')
self._check_prompt_blank(source_lines, indent, name, lineno)
self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
source = '\n'.join([sl[indent+4:] for sl in source_lines])
# Divide want into lines; check that it's properly indented; and
# then strip the indentation. Spaces before the last newline should
# be preserved, so plain rstrip() isn't good enough.
want = m.group('want')
want_lines = want.split('\n')
if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
del want_lines[-1] # forget final newline & spaces after it
self._check_prefix(want_lines, ' '*indent, name,
lineno + len(source_lines))
want = '\n'.join([wl[indent:] for wl in want_lines])
# If `want` contains a traceback message, then extract it.
m = self._EXCEPTION_RE.match(want)
if m:
exc_msg = m.group('msg')
else:
exc_msg = None
# Extract options from the source.
options = self._find_options(source, name, lineno)
return source, options, want, exc_msg
# This regular expression looks for option directives in the
# source code of an example. Option directives are comments
# starting with "doctest:". Warning: this may give false
# positives for string-literals that contain the string
# "#doctest:". Eliminating these false positives would require
# actually parsing the string; but we limit them by ignoring any
# line containing "#doctest:" that is *followed* by a quote mark.
_OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
re.MULTILINE)
def _find_options(self, source, name, lineno):
"""
Return a dictionary containing option overrides extracted from
option directives in the given source string.
`name` is the string's name, and `lineno` is the line number
where the example starts; both are used for error messages.
"""
options = {}
# (note: with the current regexp, this will match at most once:)
for m in self._OPTION_DIRECTIVE_RE.finditer(source):
option_strings = m.group(1).replace(',', ' ').split()
for option in option_strings:
if (option[0] not in '+-' or
option[1:] not in OPTIONFLAGS_BY_NAME):
raise ValueError('line %r of the doctest for %s '
'has an invalid option: %r' %
(lineno+1, name, option))
flag = OPTIONFLAGS_BY_NAME[option[1:]]
options[flag] = (option[0] == '+')
if options and self._IS_BLANK_OR_COMMENT(source):
raise ValueError('line %r of the doctest for %s has an option '
'directive on a line with no example: %r' %
(lineno, name, source))
return options
# This regular expression finds the indentation of every non-blank
# line in a string.
_INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
def _min_indent(self, s):
"Return the minimum indentation of any non-blank line in `s`"
indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
if len(indents) > 0:
return min(indents)
else:
return 0
def _check_prompt_blank(self, lines, indent, name, lineno):
"""
Given the lines of a source string (including prompts and
leading indentation), check to make sure that every prompt is
followed by a space character. If any line is not followed by
a space character, then raise ValueError.
"""
for i, line in enumerate(lines):
if len(line) >= indent+4 and line[indent+3] != ' ':
raise ValueError('line %r of the docstring for %s '
'lacks blank after %s: %r' %
(lineno+i+1, name,
line[indent:indent+3], line))
def _check_prefix(self, lines, prefix, name, lineno):
"""
Check that every line in the given list starts with the given
prefix; if any line does not, then raise a ValueError.
"""
for i, line in enumerate(lines):
if line and not line.startswith(prefix):
raise ValueError('line %r of the docstring for %s has '
'inconsistent leading whitespace: %r' %
(lineno+i+1, name, line))
######################################################################
## 4. DocTest Finder
######################################################################
class DocTestFinder:
"""
A class used to extract the DocTests that are relevant to a given
object, from its docstring and the docstrings of its contained
objects. Doctests can currently be extracted from the following
object types: modules, functions, classes, methods, staticmethods,
classmethods, and properties.
"""
def __init__(self, verbose=False, parser=DocTestParser(),
recurse=True, _namefilter=None, exclude_empty=True):
"""
Create a new doctest finder.
The optional argument `parser` specifies a class or
function that should be used to create new DocTest objects (or
objects that implement the same interface as DocTest). The
signature for this factory function should match the signature
of the DocTest constructor.
If the optional argument `recurse` is false, then `find` will
only examine the given object, and not any contained objects.
If the optional argument `exclude_empty` is false, then `find`
will include tests for objects with empty docstrings.
"""
self._parser = parser
self._verbose = verbose
self._recurse = recurse
self._exclude_empty = exclude_empty
# _namefilter is undocumented, and exists only for temporary backward-
# compatibility support of testmod's deprecated isprivate mess.
self._namefilter = _namefilter
def find(self, obj, name=None, module=None, globs=None,
extraglobs=None):
"""
Return a list of the DocTests that are defined by the given
object's docstring, or by any of its contained objects'
docstrings.
The optional parameter `module` is the module that contains
the given object. If the module is not specified or is None, then
the test finder will attempt to automatically determine the
correct module. The object's module is used:
- As a default namespace, if `globs` is not specified.
- To prevent the DocTestFinder from extracting DocTests
from objects that are imported from other modules.
- To find the name of the file containing the object.
- To help find the line number of the object within its
file.
Contained objects whose module does not match `module` are ignored.
If `module` is False, no attempt to find the module will be made.
This is obscure, of use mostly in tests: if `module` is False, or
is None but cannot be found automatically, then all objects are
considered to belong to the (non-existent) module, so all contained
objects will (recursively) be searched for doctests.
The globals for each DocTest is formed by combining `globs`
and `extraglobs` (bindings in `extraglobs` override bindings
in `globs`). A new copy of the globals dictionary is created
for each DocTest. If `globs` is not specified, then it
defaults to the module's `__dict__`, if specified, or {}
otherwise. If `extraglobs` is not specified, then it defaults
to {}.
"""
# If name was not specified, then extract it from the object.
if name is None:
name = getattr(obj, '__name__', None)
if name is None:
raise ValueError("DocTestFinder.find: name must be given "
"when obj.__name__ doesn't exist: %r" %
(type(obj),))
# Find the module that contains the given object (if obj is
# a module, then module=obj.). Note: this may fail, in which
# case module will be None.
if module is False:
module = None
elif module is None:
module = inspect.getmodule(obj)
# Read the module's source code. This is used by
# DocTestFinder._find_lineno to find the line number for a
# given object's docstring.
try:
file = inspect.getsourcefile(obj) or inspect.getfile(obj)
source_lines = linecache.getlines(file)
if not source_lines:
source_lines = None
except TypeError:
source_lines = None
# Initialize globals, and merge in extraglobs.
if globs is None:
if module is None:
globs = {}
else:
globs = module.__dict__.copy()
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
# Recursively explore `obj`, extracting DocTests.
tests = []
self._find(tests, obj, name, module, source_lines, globs, {})
return tests
def _filter(self, obj, prefix, base):
"""
Return true if the given object should not be examined.
"""
return (self._namefilter is not None and
self._namefilter(prefix, base))
def _from_module(self, module, object):
"""
Return true if the given object is defined in the given
module.
"""
if module is None:
return True
elif inspect.isfunction(object):
return module.__dict__ is object.func_globals
elif inspect.isclass(object):
return module.__name__ == object.__module__
elif inspect.getmodule(object) is not None:
return module is inspect.getmodule(object)
elif hasattr(object, '__module__'):
return module.__name__ == object.__module__
elif isinstance(object, property):
return True # [XX] no way not be sure.
else:
raise ValueError("object must be a class or function")
def _find(self, tests, obj, name, module, source_lines, globs, seen):
"""
Find tests for the given object and any contained objects, and
add them to `tests`.
"""
if self._verbose:
print 'Finding tests in %s' % name
# If we've already processed this object, then ignore it.
if id(obj) in seen:
return
seen[id(obj)] = 1
# Find a test for this object, and add it to the list of tests.
test = self._get_test(obj, name, module, globs, source_lines)
if test is not None:
tests.append(test)
# Look for tests in a module's contained objects.
if inspect.ismodule(obj) and self._recurse:
for valname, val in obj.__dict__.items():
# Check if this contained object should be ignored.
if self._filter(val, name, valname):
continue
valname = '%s.%s' % (name, valname)
# Recurse to functions & classes.
if ((inspect.isfunction(val) or inspect.isclass(val)) and
self._from_module(module, val)):
self._find(tests, val, valname, module, source_lines,
globs, seen)
# Look for tests in a module's __test__ dictionary.
if inspect.ismodule(obj) and self._recurse:
for valname, val in getattr(obj, '__test__', {}).items():
if not isinstance(valname, basestring):
raise ValueError("DocTestFinder.find: __test__ keys "
"must be strings: %r" %
(type(valname),))
if not (inspect.isfunction(val) or inspect.isclass(val) or
inspect.ismethod(val) or inspect.ismodule(val) or
isinstance(val, basestring)):
raise ValueError("DocTestFinder.find: __test__ values "
"must be strings, functions, methods, "
"classes, or modules: %r" %
(type(val),))
valname = '%s.__test__.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
# Look for tests in a class's contained objects.
if inspect.isclass(obj) and self._recurse:
for valname, val in obj.__dict__.items():
# Check if this contained object should be ignored.
if self._filter(val, name, valname):
continue
# Special handling for staticmethod/classmethod.
if isinstance(val, staticmethod):
val = getattr(obj, valname)
if isinstance(val, classmethod):
val = getattr(obj, valname).im_func
# Recurse to methods, properties, and nested classes.
if ((inspect.isfunction(val) or inspect.isclass(val) or
isinstance(val, property)) and
self._from_module(module, val)):
valname = '%s.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
def _get_test(self, obj, name, module, globs, source_lines):
"""
Return a DocTest for the given object, if it defines a docstring;
otherwise, return None.
"""
# Extract the object's docstring. If it doesn't have one,
# then return None (no test for this object).
if isinstance(obj, basestring):
docstring = obj
else:
try:
if obj.__doc__ is None:
docstring = ''
else:
docstring = obj.__doc__
if not isinstance(docstring, basestring):
docstring = str(docstring)
except (TypeError, AttributeError):
docstring = ''
# Find the docstring's location in the file.
lineno = self._find_lineno(obj, source_lines)
# Don't bother if the docstring is empty.
if self._exclude_empty and not docstring:
return None
# Return a DocTest for this object.
if module is None:
filename = None
else:
filename = getattr(module, '__file__', module.__name__)
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
return self._parser.get_doctest(docstring, globs, name,
filename, lineno)
def _find_lineno(self, obj, source_lines):
"""
Return a line number of the given object's docstring. Note:
this method assumes that the object has a docstring.
"""
lineno = None
# Find the line number for modules.
if inspect.ismodule(obj):
lineno = 0
# Find the line number for classes.
# Note: this could be fooled if a class is defined multiple
# times in a single file.
if inspect.isclass(obj):
if source_lines is None:
return None
pat = re.compile(r'^\s*class\s*%s\b' %
getattr(obj, '__name__', '-'))
for i, line in enumerate(source_lines):
if pat.match(line):
lineno = i
break
# Find the line number for functions & methods.
if inspect.ismethod(obj): obj = obj.im_func
if inspect.isfunction(obj): obj = obj.func_code
if inspect.istraceback(obj): obj = obj.tb_frame
if inspect.isframe(obj): obj = obj.f_code
if inspect.iscode(obj):
lineno = getattr(obj, 'co_firstlineno', None)-1
# Find the line number where the docstring starts. Assume
# that it's the first line that begins with a quote mark.
# Note: this could be fooled by a multiline function
# signature, where a continuation line begins with a quote
# mark.
if lineno is not None:
if source_lines is None:
return lineno+1
pat = re.compile('(^|.*:)\s*\w*("|\')')
for lineno in range(lineno, len(source_lines)):
if pat.match(source_lines[lineno]):
return lineno
# We couldn't find the line number.
return None
######################################################################
## 5. DocTest Runner
######################################################################
class DocTestRunner:
"""
A class used to run DocTest test cases, and accumulate statistics.
The `run` method is used to process a single DocTest case. It
returns a tuple `(f, t)`, where `t` is the number of test cases
tried, and `f` is the number of test cases that failed.
>>> tests = DocTestFinder().find(_TestClass)
>>> runner = DocTestRunner(verbose=False)
>>> for test in tests:
... print runner.run(test)
(0, 2)
(0, 1)
(0, 2)
(0, 2)
The `summarize` method prints a summary of all the test cases that
have been run by the runner, and returns an aggregated `(f, t)`
tuple:
>>> runner.summarize(verbose=1)
4 items passed all tests:
2 tests in _TestClass
2 tests in _TestClass.__init__
2 tests in _TestClass.get
1 tests in _TestClass.square
7 tests in 4 items.
7 passed and 0 failed.
Test passed.
(0, 7)
The aggregated number of tried examples and failed examples is
also available via the `tries` and `failures` attributes:
>>> runner.tries
7
>>> runner.failures
0
The comparison between expected outputs and actual outputs is done
by an `OutputChecker`. This comparison may be customized with a
number of option flags; see the documentation for `testmod` for
more information. If the option flags are insufficient, then the
comparison may also be customized by passing a subclass of
`OutputChecker` to the constructor.
The test runner's display output can be controlled in two ways.
First, an output function (`out) can be passed to
`TestRunner.run`; this function will be called with strings that
should be displayed. It defaults to `sys.stdout.write`. If
capturing the output is not sufficient, then the display output
can be also customized by subclassing DocTestRunner, and
overriding the methods `report_start`, `report_success`,
`report_unexpected_exception`, and `report_failure`.
"""
# This divider string is used to separate failure messages, and to
# separate sections of the summary.
DIVIDER = "*" * 70
def __init__(self, checker=None, verbose=None, optionflags=0):
"""
Create a new test runner.
Optional keyword arg `checker` is the `OutputChecker` that
should be used to compare the expected outputs and actual
outputs of doctest examples.
Optional keyword arg 'verbose' prints lots of stuff if true,
only failures if false; by default, it's true iff '-v' is in
sys.argv.
Optional argument `optionflags` can be used to control how the
test runner compares expected output to actual output, and how
it displays failures. See the documentation for `testmod` for
more information.
"""
self._checker = checker or OutputChecker()
if verbose is None:
verbose = '-v' in sys.argv
self._verbose = verbose
self.optionflags = optionflags
self.original_optionflags = optionflags
# Keep track of the examples we've run.
self.tries = 0
self.failures = 0
self._name2ft = {}
# Create a fake output target for capturing doctest output.
self._fakeout = _SpoofOut()
#/////////////////////////////////////////////////////////////////
# Reporting methods
#/////////////////////////////////////////////////////////////////
def report_start(self, out, test, example):
"""
Report that the test runner is about to process the given
example. (Only displays a message if verbose=True)
"""
if self._verbose:
if example.want:
out('Trying:\n' + _indent(example.source) +
'Expecting:\n' + _indent(example.want))
else:
out('Trying:\n' + _indent(example.source) +
'Expecting nothing\n')
def report_success(self, out, test, example, got):
"""
Report that the given example ran successfully. (Only
displays a message if verbose=True)
"""
if self._verbose:
out("ok\n")
def report_failure(self, out, test, example, got):
"""
Report that the given example failed.
"""
out(self._failure_header(test, example) +
self._checker.output_difference(example, got, self.optionflags))
def report_unexpected_exception(self, out, test, example, exc_info):
"""
Report that the given example raised an unexpected exception.
"""
out(self._failure_header(test, example) +
'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
def _failure_header(self, test, example):
out = [self.DIVIDER]
if test.filename:
if test.lineno is not None and example.lineno is not None:
lineno = test.lineno + example.lineno + 1
else:
lineno = '?'
out.append('File "%s", line %s, in %s' %
(test.filename, lineno, test.name))
else:
out.append('Line %s, in %s' % (example.lineno+1, test.name))
out.append('Failed example:')
source = example.source
out.append(_indent(source))
return '\n'.join(out)
#/////////////////////////////////////////////////////////////////
# DocTest Running
#/////////////////////////////////////////////////////////////////
def __run(self, test, compileflags, out):
"""
Run the examples in `test`. Write the outcome of each example
with one of the `DocTestRunner.report_*` methods, using the
writer function `out`. `compileflags` is the set of compiler
flags that should be used to execute examples. Return a tuple
`(f, t)`, where `t` is the number of examples tried, and `f`
is the number of examples that failed. The examples are run
in the namespace `test.globs`.
"""
# Keep track of the number of failures and tries.
failures = tries = 0
# Save the option flags (since option directives can be used
# to modify them).
original_optionflags = self.optionflags
SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
check = self._checker.check_output
# Process each example.
for examplenum, example in enumerate(test.examples):
# If REPORT_ONLY_FIRST_FAILURE is set, then suppress
# reporting after the first failure.
quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
failures > 0)
# Merge in the example's options.
self.optionflags = original_optionflags
if example.options:
for (optionflag, val) in example.options.items():
if val:
self.optionflags |= optionflag
else:
self.optionflags &= ~optionflag
# Record that we started this example.
tries += 1
if not quiet:
self.report_start(out, test, example)
# Use a special filename for compile(), so we can retrieve
# the source code during interactive debugging (see
# __patched_linecache_getlines).
filename = '<doctest %s[%d]>' % (test.name, examplenum)
# Run the example in the given context (globs), and record
# any exception that gets raised. (But don't intercept
# keyboard interrupts.)
try:
# Don't blink! This is where the user's code gets run.
exec compile(example.source, filename, "single",
compileflags, 1) in test.globs
self.debugger.set_continue() # ==== Example Finished ====
exception = None
except KeyboardInterrupt:
raise
except:
exception = sys.exc_info()
self.debugger.set_continue() # ==== Example Finished ====
got = self._fakeout.getvalue() # the actual output
self._fakeout.truncate(0)
outcome = FAILURE # guilty until proved innocent or insane
# If the example executed without raising any exceptions,
# verify its output.
if exception is None:
if check(example.want, got, self.optionflags):
outcome = SUCCESS
# The example raised an exception: check if it was expected.
else:
exc_info = sys.exc_info()
exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
if not quiet:
got += _exception_traceback(exc_info)
# If `example.exc_msg` is None, then we weren't expecting
# an exception.
if example.exc_msg is None:
outcome = BOOM
# We expected an exception: see whether it matches.
elif check(example.exc_msg, exc_msg, self.optionflags):
outcome = SUCCESS
# Another chance if they didn't care about the detail.
elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
m1 = re.match(r'[^:]*:', example.exc_msg)
m2 = re.match(r'[^:]*:', exc_msg)
if m1 and m2 and check(m1.group(0), m2.group(0),
self.optionflags):
outcome = SUCCESS
# Report the outcome.
if outcome is SUCCESS:
if not quiet:
self.report_success(out, test, example, got)
elif outcome is FAILURE:
if not quiet:
self.report_failure(out, test, example, got)
failures += 1
elif outcome is BOOM:
if not quiet:
self.report_unexpected_exception(out, test, example,
exc_info)
failures += 1
else:
assert False, ("unknown outcome", outcome)
# Restore the option flags (in case they were modified)
self.optionflags = original_optionflags
# Record and return the number of failures and tries.
self.__record_outcome(test, failures, tries)
return failures, tries
def __record_outcome(self, test, f, t):
"""
Record the fact that the given DocTest (`test`) generated `f`
failures out of `t` tried examples.
"""
f2, t2 = self._name2ft.get(test.name, (0,0))
self._name2ft[test.name] = (f+f2, t+t2)
self.failures += f
self.tries += t
__LINECACHE_FILENAME_RE = re.compile(r'<doctest '
r'(?P<name>[\w\.]+)'
r'\[(?P<examplenum>\d+)\]>$')
def __patched_linecache_getlines(self, filename, module_globals=None):
m = self.__LINECACHE_FILENAME_RE.match(filename)
if m and m.group('name') == self.test.name:
example = self.test.examples[int(m.group('examplenum'))]
return example.source.splitlines(True)
else:
if sys.version_info < (2, 5, 0):
return self.save_linecache_getlines(filename)
else:
return self.save_linecache_getlines(filename, module_globals)
def run(self, test, compileflags=None, out=None, clear_globs=True):
"""
Run the examples in `test`, and display the results using the
writer function `out`.
The examples are run in the namespace `test.globs`. If
`clear_globs` is true (the default), then this namespace will
be cleared after the test runs, to help with garbage
collection. If you would like to examine the namespace after
the test completes, then use `clear_globs=False`.
`compileflags` gives the set of flags that should be used by
the Python compiler when running the examples. If not
specified, then it will default to the set of future-import
flags that apply to `globs`.
The output of each example is checked using
`DocTestRunner.check_output`, and the results are formatted by
the `DocTestRunner.report_*` methods.
"""
self.test = test
if compileflags is None:
compileflags = _extract_future_flags(test.globs)
save_stdout = sys.stdout
if out is None:
out = save_stdout.write
sys.stdout = self._fakeout
# Patch pdb.set_trace to restore sys.stdout during interactive
# debugging (so it's not still redirected to self._fakeout).
# Note that the interactive output will go to *our*
# save_stdout, even if that's not the real sys.stdout; this
# allows us to write test cases for the set_trace behavior.
save_set_trace = pdb.set_trace
self.debugger = _OutputRedirectingPdb(save_stdout)
self.debugger.reset()
pdb.set_trace = self.debugger.set_trace
# Patch linecache.getlines, so we can see the example's source
# when we're inside the debugger.
self.save_linecache_getlines = linecache.getlines
linecache.getlines = self.__patched_linecache_getlines
try:
return self.__run(test, compileflags, out)
finally:
sys.stdout = save_stdout
pdb.set_trace = save_set_trace
linecache.getlines = self.save_linecache_getlines
if clear_globs:
test.globs.clear()
#/////////////////////////////////////////////////////////////////
# Summarization
#/////////////////////////////////////////////////////////////////
def summarize(self, verbose=None):
"""
Print a summary of all the test cases that have been run by
this DocTestRunner, and return a tuple `(f, t)`, where `f` is
the total number of failed examples, and `t` is the total
number of tried examples.
The optional `verbose` argument controls how detailed the
summary is. If the verbosity is not specified, then the
DocTestRunner's verbosity is used.
"""
if verbose is None:
verbose = self._verbose
notests = []
passed = []
failed = []
totalt = totalf = 0
for x in self._name2ft.items():
name, (f, t) = x
assert f <= t
totalt += t
totalf += f
if t == 0:
notests.append(name)
elif f == 0:
passed.append( (name, t) )
else:
failed.append(x)
if verbose:
if notests:
print len(notests), "items had no tests:"
notests.sort()
for thing in notests:
print " ", thing
if passed:
print len(passed), "items passed all tests:"
passed.sort()
for thing, count in passed:
print " %3d tests in %s" % (count, thing)
if failed:
print self.DIVIDER
print len(failed), "items had failures:"
failed.sort()
for thing, (f, t) in failed:
print " %3d of %3d in %s" % (f, t, thing)
if verbose:
print totalt, "tests in", len(self._name2ft), "items."
print totalt - totalf, "passed and", totalf, "failed."
if totalf:
print "***Test Failed***", totalf, "failures."
elif verbose:
print "Test passed."
return totalf, totalt
#/////////////////////////////////////////////////////////////////
# Backward compatibility cruft to maintain doctest.master.
#/////////////////////////////////////////////////////////////////
def merge(self, other):
d = self._name2ft
for name, (f, t) in other._name2ft.items():
if name in d:
print "*** DocTestRunner.merge: '" + name + "' in both" \
" testers; summing outcomes."
f2, t2 = d[name]
f = f + f2
t = t + t2
d[name] = f, t
class OutputChecker:
"""
A class used to check the whether the actual output from a doctest
example matches the expected output. `OutputChecker` defines two
methods: `check_output`, which compares a given pair of outputs,
and returns true if they match; and `output_difference`, which
returns a string describing the differences between two outputs.
"""
def check_output(self, want, got, optionflags):
"""
Return True iff the actual output from an example (`got`)
matches the expected output (`want`). These strings are
always considered to match if they are identical; but
depending on what option flags the test runner is using,
several non-exact match types are also possible. See the
documentation for `TestRunner` for more information about
option flags.
"""
# Handle the common case first, for efficiency:
# if they're string-identical, always return true.
if got == want:
return True
# The values True and False replaced 1 and 0 as the return
# value for boolean comparisons in Python 2.3.
if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
if (got,want) == ("True\n", "1\n"):
return True
if (got,want) == ("False\n", "0\n"):
return True
# <BLANKLINE> can be used as a special sequence to signify a
# blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
if not (optionflags & DONT_ACCEPT_BLANKLINE):
# Replace <BLANKLINE> in want with a blank line.
want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
'', want)
# If a line in got contains only spaces, then remove the
# spaces.
got = re.sub('(?m)^\s*?$', '', got)
if got == want:
return True
# This flag causes doctest to ignore any differences in the
# contents of whitespace strings. Note that this can be used
# in conjunction with the ELLIPSIS flag.
if optionflags & NORMALIZE_WHITESPACE:
got = ' '.join(got.split())
want = ' '.join(want.split())
if got == want:
return True
# The ELLIPSIS flag says to let the sequence "..." in `want`
# match any substring in `got`.
if optionflags & ELLIPSIS:
if _ellipsis_match(want, got):
return True
# We didn't find any match; return false.
return False
# Should we do a fancy diff?
def _do_a_fancy_diff(self, want, got, optionflags):
# Not unless they asked for a fancy diff.
if not optionflags & (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF):
return False
# If expected output uses ellipsis, a meaningful fancy diff is
# too hard ... or maybe not. In two real-life failures Tim saw,
# a diff was a major help anyway, so this is commented out.
# [todo] _ellipsis_match() knows which pieces do and don't match,
# and could be the basis for a kick-ass diff in this case.
##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
## return False
# ndiff does intraline difference marking, so can be useful even
# for 1-line differences.
if optionflags & REPORT_NDIFF:
return True
# The other diff types need at least a few lines to be helpful.
return want.count('\n') > 2 and got.count('\n') > 2
def output_difference(self, example, got, optionflags):
"""
Return a string describing the differences between the
expected output for a given example (`example`) and the actual
output (`got`). `optionflags` is the set of option flags used
to compare `want` and `got`.
"""
want = example.want
# If <BLANKLINE>s are being used, then replace blank lines
# with <BLANKLINE> in the actual output string.
if not (optionflags & DONT_ACCEPT_BLANKLINE):
got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
# Check if we should use diff.
if self._do_a_fancy_diff(want, got, optionflags):
# Split want & got into lines.
want_lines = want.splitlines(True) # True == keep line ends
got_lines = got.splitlines(True)
# Use difflib to find their differences.
if optionflags & REPORT_UDIFF:
diff = difflib.unified_diff(want_lines, got_lines, n=2)
diff = list(diff)[2:] # strip the diff header
kind = 'unified diff with -expected +actual'
elif optionflags & REPORT_CDIFF:
diff = difflib.context_diff(want_lines, got_lines, n=2)
diff = list(diff)[2:] # strip the diff header
kind = 'context diff with expected followed by actual'
elif optionflags & REPORT_NDIFF:
engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
diff = list(engine.compare(want_lines, got_lines))
kind = 'ndiff with -expected +actual'
else:
assert 0, 'Bad diff option'
# Remove trailing whitespace on diff output.
diff = [line.rstrip() + '\n' for line in diff]
return 'Differences (%s):\n' % kind + _indent(''.join(diff))
# If we're not using diff, then simply list the expected
# output followed by the actual output.
if want and got:
return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
elif want:
return 'Expected:\n%sGot nothing\n' % _indent(want)
elif got:
return 'Expected nothing\nGot:\n%s' % _indent(got)
else:
return 'Expected nothing\nGot nothing\n'
class DocTestFailure(Exception):
"""A DocTest example has failed in debugging mode.
The exception instance has variables:
- test: the DocTest object being run
- excample: the Example object that failed
- got: the actual output
"""
def __init__(self, test, example, got):
self.test = test
self.example = example
self.got = got
def __str__(self):
return str(self.test)
class UnexpectedException(Exception):
"""A DocTest example has encountered an unexpected exception
The exception instance has variables:
- test: the DocTest object being run
- excample: the Example object that failed
- exc_info: the exception info
"""
def __init__(self, test, example, exc_info):
self.test = test
self.example = example
self.exc_info = exc_info
def __str__(self):
return str(self.test)
class DebugRunner(DocTestRunner):
r"""Run doc tests but raise an exception as soon as there is a failure.
If an unexpected exception occurs, an UnexpectedException is raised.
It contains the test, the example, and the original exception:
>>> runner = DebugRunner(verbose=False)
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
... {}, 'foo', 'foo.py', 0)
>>> try:
... runner.run(test)
... except UnexpectedException, failure:
... pass
>>> failure.test is test
True
>>> failure.example.want
'42\n'
>>> exc_info = failure.exc_info
>>> raise exc_info[0], exc_info[1], exc_info[2]
Traceback (most recent call last):
...
KeyError
We wrap the original exception to give the calling application
access to the test and example information.
If the output doesn't match, then a DocTestFailure is raised:
>>> test = DocTestParser().get_doctest('''
... >>> x = 1
... >>> x
... 2
... ''', {}, 'foo', 'foo.py', 0)
>>> try:
... runner.run(test)
... except DocTestFailure, failure:
... pass
DocTestFailure objects provide access to the test:
>>> failure.test is test
True
As well as to the example:
>>> failure.example.want
'2\n'
and the actual output:
>>> failure.got
'1\n'
If a failure or error occurs, the globals are left intact:
>>> del test.globs['__builtins__']
>>> test.globs
{'x': 1}
>>> test = DocTestParser().get_doctest('''
... >>> x = 2
... >>> raise KeyError
... ''', {}, 'foo', 'foo.py', 0)
>>> runner.run(test)
Traceback (most recent call last):
...
UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
>>> del test.globs['__builtins__']
>>> test.globs
{'x': 2}
But the globals are cleared if there is no error:
>>> test = DocTestParser().get_doctest('''
... >>> x = 2
... ''', {}, 'foo', 'foo.py', 0)
>>> runner.run(test)
(0, 1)
>>> test.globs
{}
"""
def run(self, test, compileflags=None, out=None, clear_globs=True):
r = DocTestRunner.run(self, test, compileflags, out, False)
if clear_globs:
test.globs.clear()
return r
def report_unexpected_exception(self, out, test, example, exc_info):
raise UnexpectedException(test, example, exc_info)
def report_failure(self, out, test, example, got):
raise DocTestFailure(test, example, got)
######################################################################
## 6. Test Functions
######################################################################
# These should be backwards compatible.
# For backward compatibility, a global instance of a DocTestRunner
# class, updated by testmod.
master = None
def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
report=True, optionflags=0, extraglobs=None,
raise_on_error=False, exclude_empty=False):
"""m=None, name=None, globs=None, verbose=None, isprivate=None,
report=True, optionflags=0, extraglobs=None, raise_on_error=False,
exclude_empty=False
Test examples in docstrings in functions and classes reachable
from module m (or the current module if m is not supplied), starting
with m.__doc__. Unless isprivate is specified, private names
are not skipped.
Also test examples reachable from dict m.__test__ if it exists and is
not None. m.__test__ maps names to functions, classes and strings;
function and class docstrings are tested even if the name is private;
strings are tested directly, as if they were docstrings.
Return (#failures, #tests).
See doctest.__doc__ for an overview.
Optional keyword arg "name" gives the name of the module; by default
use m.__name__.
Optional keyword arg "globs" gives a dict to be used as the globals
when executing examples; by default, use m.__dict__. A copy of this
dict is actually used for each docstring, so that each docstring's
examples start with a clean slate.
Optional keyword arg "extraglobs" gives a dictionary that should be
merged into the globals that are used to execute examples. By
default, no extra globals are used. This is new in 2.4.
Optional keyword arg "verbose" prints lots of stuff if true, prints
only failures if false; by default, it's true iff "-v" is in sys.argv.
Optional keyword arg "report" prints a summary at the end when true,
else prints nothing at the end. In verbose mode, the summary is
detailed, else very brief (in fact, empty if all tests passed).
Optional keyword arg "optionflags" or's together module constants,
and defaults to 0. This is new in 2.3. Possible values (see the
docs for details):
DONT_ACCEPT_TRUE_FOR_1
DONT_ACCEPT_BLANKLINE
NORMALIZE_WHITESPACE
ELLIPSIS
IGNORE_EXCEPTION_DETAIL
REPORT_UDIFF
REPORT_CDIFF
REPORT_NDIFF
REPORT_ONLY_FIRST_FAILURE
Optional keyword arg "raise_on_error" raises an exception on the
first unexpected exception or failure. This allows failures to be
post-mortem debugged.
Deprecated in Python 2.4:
Optional keyword arg "isprivate" specifies a function used to
determine whether a name is private. The default function is
treat all functions as public. Optionally, "isprivate" can be
set to doctest.is_private to skip over functions marked as private
using the underscore naming convention; see its docs for details.
Advanced tomfoolery: testmod runs methods of a local instance of
class doctest.Tester, then merges the results into (or creates)
global Tester instance doctest.master. Methods of doctest.master
can be called directly too, if you want to do something unusual.
Passing report=0 to testmod is especially useful then, to delay
displaying a summary. Invoke doctest.master.summarize(verbose)
when you're done fiddling.
"""
global master
if isprivate is not None:
warnings.warn("the isprivate argument is deprecated; "
"examine DocTestFinder.find() lists instead",
DeprecationWarning)
# If no module was given, then use __main__.
if m is None:
# DWA - m will still be None if this wasn't invoked from the command
# line, in which case the following TypeError is about as good an error
# as we should expect
m = sys.modules.get('__main__')
# Check that we were actually given a module.
if not inspect.ismodule(m):
raise TypeError("testmod: module required; %r" % (m,))
# If no name was given, then use the module's name.
if name is None:
name = m.__name__
# Find, parse, and run all tests in the given module.
finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
if raise_on_error:
runner = DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
return runner.failures, runner.tries
def testfile(filename, module_relative=True, name=None, package=None,
globs=None, verbose=None, report=True, optionflags=0,
extraglobs=None, raise_on_error=False, parser=DocTestParser()):
"""
Test examples in the given file. Return (#failures, #tests).
Optional keyword arg "module_relative" specifies how filenames
should be interpreted:
- If "module_relative" is True (the default), then "filename"
specifies a module-relative path. By default, this path is
relative to the calling module's directory; but if the
"package" argument is specified, then it is relative to that
package. To ensure os-independence, "filename" should use
"/" characters to separate path segments, and should not
be an absolute path (i.e., it may not begin with "/").
- If "module_relative" is False, then "filename" specifies an
os-specific path. The path may be absolute or relative (to
the current working directory).
Optional keyword arg "name" gives the name of the test; by default
use the file's basename.
Optional keyword argument "package" is a Python package or the
name of a Python package whose directory should be used as the
base directory for a module relative filename. If no package is
specified, then the calling module's directory is used as the base
directory for module relative filenames. It is an error to
specify "package" if "module_relative" is False.
Optional keyword arg "globs" gives a dict to be used as the globals
when executing examples; by default, use {}. A copy of this dict
is actually used for each docstring, so that each docstring's
examples start with a clean slate.
Optional keyword arg "extraglobs" gives a dictionary that should be
merged into the globals that are used to execute examples. By
default, no extra globals are used.
Optional keyword arg "verbose" prints lots of stuff if true, prints
only failures if false; by default, it's true iff "-v" is in sys.argv.
Optional keyword arg "report" prints a summary at the end when true,
else prints nothing at the end. In verbose mode, the summary is
detailed, else very brief (in fact, empty if all tests passed).
Optional keyword arg "optionflags" or's together module constants,
and defaults to 0. Possible values (see the docs for details):
DONT_ACCEPT_TRUE_FOR_1
DONT_ACCEPT_BLANKLINE
NORMALIZE_WHITESPACE
ELLIPSIS
IGNORE_EXCEPTION_DETAIL
REPORT_UDIFF
REPORT_CDIFF
REPORT_NDIFF
REPORT_ONLY_FIRST_FAILURE
Optional keyword arg "raise_on_error" raises an exception on the
first unexpected exception or failure. This allows failures to be
post-mortem debugged.
Optional keyword arg "parser" specifies a DocTestParser (or
subclass) that should be used to extract tests from the files.
Advanced tomfoolery: testmod runs methods of a local instance of
class doctest.Tester, then merges the results into (or creates)
global Tester instance doctest.master. Methods of doctest.master
can be called directly too, if you want to do something unusual.
Passing report=0 to testmod is especially useful then, to delay
displaying a summary. Invoke doctest.master.summarize(verbose)
when you're done fiddling.
"""
global master
if package and not module_relative:
raise ValueError("Package may only be specified for module-"
"relative paths.")
# Relativize the path
if module_relative:
package = _normalize_module(package)
filename = _module_relative_path(package, filename)
# If no name was given, then use the file's name.
if name is None:
name = os.path.basename(filename)
# Assemble the globals.
if globs is None:
globs = {}
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
if raise_on_error:
runner = DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
# Read the file, convert it to a test, and run it.
s = open(filename).read()
test = parser.get_doctest(s, globs, name, filename, 0)
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
return runner.failures, runner.tries
def run_docstring_examples(f, globs, verbose=False, name="NoName",
compileflags=None, optionflags=0):
"""
Test examples in the given object's docstring (`f`), using `globs`
as globals. Optional argument `name` is used in failure messages.
If the optional argument `verbose` is true, then generate output
even if there are no failures.
`compileflags` gives the set of flags that should be used by the
Python compiler when running the examples. If not specified, then
it will default to the set of future-import flags that apply to
`globs`.
Optional keyword arg `optionflags` specifies options for the
testing and output. See the documentation for `testmod` for more
information.
"""
# Find, parse, and run all tests in the given module.
finder = DocTestFinder(verbose=verbose, recurse=False)
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
for test in finder.find(f, name, globs=globs):
runner.run(test, compileflags=compileflags)
######################################################################
## 7. Tester
######################################################################
# This is provided only for backwards compatibility. It's not
# actually used in any way.
class Tester:
def __init__(self, mod=None, globs=None, verbose=None,
isprivate=None, optionflags=0):
warnings.warn("class Tester is deprecated; "
"use class doctest.DocTestRunner instead",
DeprecationWarning, stacklevel=2)
if mod is None and globs is None:
raise TypeError("Tester.__init__: must specify mod or globs")
if mod is not None and not inspect.ismodule(mod):
raise TypeError("Tester.__init__: mod must be a module; %r" %
(mod,))
if globs is None:
globs = mod.__dict__
self.globs = globs
self.verbose = verbose
self.isprivate = isprivate
self.optionflags = optionflags
self.testfinder = DocTestFinder(_namefilter=isprivate)
self.testrunner = DocTestRunner(verbose=verbose,
optionflags=optionflags)
def runstring(self, s, name):
test = DocTestParser().get_doctest(s, self.globs, name, None, None)
if self.verbose:
print "Running string", name
(f,t) = self.testrunner.run(test)
if self.verbose:
print f, "of", t, "examples failed in string", name
return (f,t)
def rundoc(self, object, name=None, module=None):
f = t = 0
tests = self.testfinder.find(object, name, module=module,
globs=self.globs)
for test in tests:
(f2, t2) = self.testrunner.run(test)
(f,t) = (f+f2, t+t2)
return (f,t)
def rundict(self, d, name, module=None):
import new
m = new.module(name)
m.__dict__.update(d)
if module is None:
module = False
return self.rundoc(m, name, module)
def run__test__(self, d, name):
import new
m = new.module(name)
m.__test__ = d
return self.rundoc(m, name)
def summarize(self, verbose=None):
return self.testrunner.summarize(verbose)
def merge(self, other):
self.testrunner.merge(other.testrunner)
######################################################################
## 8. Unittest Support
######################################################################
_unittest_reportflags = 0
def set_unittest_reportflags(flags):
"""Sets the unittest option flags.
The old flag is returned so that a runner could restore the old
value if it wished to:
>>> old = _unittest_reportflags
>>> set_unittest_reportflags(REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE) == old
True
>>> import doctest
>>> doctest._unittest_reportflags == (REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE)
True
Only reporting flags can be set:
>>> set_unittest_reportflags(ELLIPSIS)
Traceback (most recent call last):
...
ValueError: ('Only reporting flags allowed', 8)
>>> set_unittest_reportflags(old) == (REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE)
True
"""
global _unittest_reportflags
if (flags & REPORTING_FLAGS) != flags:
raise ValueError("Only reporting flags allowed", flags)
old = _unittest_reportflags
_unittest_reportflags = flags
return old
class DocTestCase(unittest.TestCase):
def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
checker=None, runner=DocTestRunner):
unittest.TestCase.__init__(self)
self._dt_optionflags = optionflags
self._dt_checker = checker
self._dt_test = test
self._dt_setUp = setUp
self._dt_tearDown = tearDown
self._dt_runner = runner
def setUp(self):
test = self._dt_test
if self._dt_setUp is not None:
self._dt_setUp(test)
def tearDown(self):
test = self._dt_test
if self._dt_tearDown is not None:
self._dt_tearDown(test)
test.globs.clear()
def runTest(self):
test = self._dt_test
old = sys.stdout
new = StringIO()
optionflags = self._dt_optionflags
if not (optionflags & REPORTING_FLAGS):
# The option flags don't include any reporting flags,
# so add the default reporting flags
optionflags |= _unittest_reportflags
runner = self._dt_runner(optionflags=optionflags,
checker=self._dt_checker, verbose=False)
try:
runner.DIVIDER = "-"*70
failures, tries = runner.run(
test, out=new.write, clear_globs=False)
finally:
sys.stdout = old
if failures:
raise self.failureException(self.format_failure(new.getvalue()))
def format_failure(self, err):
test = self._dt_test
if test.lineno is None:
lineno = 'unknown line number'
else:
lineno = '%s' % test.lineno
lname = '.'.join(test.name.split('.')[-1:])
return ('Failed doctest test for %s\n'
' File "%s", line %s, in %s\n\n%s'
% (test.name, test.filename, lineno, lname, err)
)
def debug(self):
r"""Run the test case without results and without catching exceptions
The unit test framework includes a debug method on test cases
and test suites to support post-mortem debugging. The test code
is run in such a way that errors are not caught. This way a
caller can catch the errors and initiate post-mortem debugging.
The DocTestCase provides a debug method that raises
UnexpectedException errors if there is an unexepcted
exception:
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
... {}, 'foo', 'foo.py', 0)
>>> case = DocTestCase(test)
>>> try:
... case.debug()
... except UnexpectedException, failure:
... pass
The UnexpectedException contains the test, the example, and
the original exception:
>>> failure.test is test
True
>>> failure.example.want
'42\n'
>>> exc_info = failure.exc_info
>>> raise exc_info[0], exc_info[1], exc_info[2]
Traceback (most recent call last):
...
KeyError
If the output doesn't match, then a DocTestFailure is raised:
>>> test = DocTestParser().get_doctest('''
... >>> x = 1
... >>> x
... 2
... ''', {}, 'foo', 'foo.py', 0)
>>> case = DocTestCase(test)
>>> try:
... case.debug()
... except DocTestFailure, failure:
... pass
DocTestFailure objects provide access to the test:
>>> failure.test is test
True
As well as to the example:
>>> failure.example.want
'2\n'
and the actual output:
>>> failure.got
'1\n'
"""
self.setUp()
runner = DebugRunner(optionflags=self._dt_optionflags,
checker=self._dt_checker, verbose=False)
runner.run(self._dt_test)
self.tearDown()
def id(self):
return self._dt_test.name
def __repr__(self):
name = self._dt_test.name.split('.')
return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
__str__ = __repr__
def shortDescription(self):
return "Doctest: " + self._dt_test.name
def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
test_class=DocTestCase, **options):
"""
Convert doctest tests for a module to a unittest test suite.
This converts each documentation string in a module that
contains doctest tests to a unittest test case. If any of the
tests in a doc string fail, then the test case fails. An exception
is raised showing the name of the file containing the test and a
(sometimes approximate) line number.
The `module` argument provides the module to be tested. The argument
can be either a module or a module name.
If no argument is given, the calling module is used.
A number of options may be provided as keyword arguments:
setUp
A set-up function. This is called before running the
tests in each file. The setUp function will be passed a DocTest
object. The setUp function can access the test globals as the
globs attribute of the test passed.
tearDown
A tear-down function. This is called after running the
tests in each file. The tearDown function will be passed a DocTest
object. The tearDown function can access the test globals as the
globs attribute of the test passed.
globs
A dictionary containing initial global variables for the tests.
optionflags
A set of doctest option flags expressed as an integer.
"""
if test_finder is None:
test_finder = DocTestFinder()
module = _normalize_module(module)
tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
if globs is None:
globs = module.__dict__
if not tests:
# Why do we want to do this? Because it reveals a bug that might
# otherwise be hidden.
raise ValueError(module, "has no tests")
tests.sort()
suite = unittest.TestSuite()
for test in tests:
if len(test.examples) == 0:
continue
if not test.filename:
filename = module.__file__
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
test.filename = filename
suite.addTest(test_class(test, **options))
return suite
class DocFileCase(DocTestCase):
def id(self):
return '_'.join(self._dt_test.name.split('.'))
def __repr__(self):
return self._dt_test.filename
__str__ = __repr__
def format_failure(self, err):
return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
% (self._dt_test.name, self._dt_test.filename, err)
)
def DocFileTest(path, module_relative=True, package=None,
globs=None, parser=DocTestParser(), **options):
if globs is None:
globs = {}
if package and not module_relative:
raise ValueError("Package may only be specified for module-"
"relative paths.")
# Relativize the path.
if module_relative:
package = _normalize_module(package)
path = _module_relative_path(package, path)
# Find the file and read it.
name = os.path.basename(path)
doc = open(path).read()
# Convert it to a test, and wrap it in a DocFileCase.
test = parser.get_doctest(doc, globs, name, path, 0)
return DocFileCase(test, **options)
def DocFileSuite(*paths, **kw):
"""A unittest suite for one or more doctest files.
The path to each doctest file is given as a string; the
interpretation of that string depends on the keyword argument
"module_relative".
A number of options may be provided as keyword arguments:
module_relative
If "module_relative" is True, then the given file paths are
interpreted as os-independent module-relative paths. By
default, these paths are relative to the calling module's
directory; but if the "package" argument is specified, then
they are relative to that package. To ensure os-independence,
"filename" should use "/" characters to separate path
segments, and may not be an absolute path (i.e., it may not
begin with "/").
If "module_relative" is False, then the given file paths are
interpreted as os-specific paths. These paths may be absolute
or relative (to the current working directory).
package
A Python package or the name of a Python package whose directory
should be used as the base directory for module relative paths.
If "package" is not specified, then the calling module's
directory is used as the base directory for module relative
filenames. It is an error to specify "package" if
"module_relative" is False.
setUp
A set-up function. This is called before running the
tests in each file. The setUp function will be passed a DocTest
object. The setUp function can access the test globals as the
globs attribute of the test passed.
tearDown
A tear-down function. This is called after running the
tests in each file. The tearDown function will be passed a DocTest
object. The tearDown function can access the test globals as the
globs attribute of the test passed.
globs
A dictionary containing initial global variables for the tests.
optionflags
A set of doctest option flags expressed as an integer.
parser
A DocTestParser (or subclass) that should be used to extract
tests from the files.
"""
suite = unittest.TestSuite()
# We do this here so that _normalize_module is called at the right
# level. If it were called in DocFileTest, then this function
# would be the caller and we might guess the package incorrectly.
if kw.get('module_relative', True):
kw['package'] = _normalize_module(kw.get('package'))
for path in paths:
suite.addTest(DocFileTest(path, **kw))
return suite
######################################################################
## 9. Debugging Support
######################################################################
def script_from_examples(s):
r"""Extract script from text with examples.
Converts text with examples to a Python script. Example input is
converted to regular code. Example output and all other words
are converted to comments:
>>> text = '''
... Here are examples of simple math.
...
... Python has super accurate integer addition
...
... >>> 2 + 2
... 5
...
... And very friendly error messages:
...
... >>> 1/0
... To Infinity
... And
... Beyond
...
... You can use logic if you want:
...
... >>> if 0:
... ... blah
... ... blah
... ...
...
... Ho hum
... '''
>>> print script_from_examples(text)
# Here are examples of simple math.
#
# Python has super accurate integer addition
#
2 + 2
# Expected:
## 5
#
# And very friendly error messages:
#
1/0
# Expected:
## To Infinity
## And
## Beyond
#
# You can use logic if you want:
#
if 0:
blah
blah
#
# Ho hum
"""
output = []
for piece in DocTestParser().parse(s):
if isinstance(piece, Example):
# Add the example's source code (strip trailing NL)
output.append(piece.source[:-1])
# Add the expected output:
want = piece.want
if want:
output.append('# Expected:')
output += ['## '+l for l in want.split('\n')[:-1]]
else:
# Add non-example text.
output += [_comment_line(l)
for l in piece.split('\n')[:-1]]
# Trim junk on both ends.
while output and output[-1] == '#':
output.pop()
while output and output[0] == '#':
output.pop(0)
# Combine the output, and return it.
return '\n'.join(output)
def testsource(module, name):
"""Extract the test sources from a doctest docstring as a script.
Provide the module (or dotted name of the module) containing the
test to be debugged and the name (within the module) of the object
with the doc string with tests to be debugged.
"""
module = _normalize_module(module)
tests = DocTestFinder().find(module)
test = [t for t in tests if t.name == name]
if not test:
raise ValueError(name, "not found in tests")
test = test[0]
testsrc = script_from_examples(test.docstring)
return testsrc
def debug_src(src, pm=False, globs=None):
"""Debug a single doctest docstring, in argument `src`'"""
testsrc = script_from_examples(src)
debug_script(testsrc, pm, globs)
def debug_script(src, pm=False, globs=None):
"Debug a test script. `src` is the script, as a string."
import pdb
# Note that tempfile.NameTemporaryFile() cannot be used. As the
# docs say, a file so created cannot be opened by name a second time
# on modern Windows boxes, and execfile() needs to open it.
srcfilename = tempfile.mktemp(".py", "doctestdebug")
f = open(srcfilename, 'w')
f.write(src)
f.close()
try:
if globs:
globs = globs.copy()
else:
globs = {}
if pm:
try:
execfile(srcfilename, globs, globs)
except:
print sys.exc_info()[1]
pdb.post_mortem(sys.exc_info()[2])
else:
# Note that %r is vital here. '%s' instead can, e.g., cause
# backslashes to get treated as metacharacters on Windows.
pdb.run("execfile(%r)" % srcfilename, globs, globs)
finally:
os.remove(srcfilename)
def debug(module, name, pm=False):
"""Debug a single doctest docstring.
Provide the module (or dotted name of the module) containing the
test to be debugged and the name (within the module) of the object
with the docstring with tests to be debugged.
"""
module = _normalize_module(module)
testsrc = testsource(module, name)
debug_script(testsrc, pm, module.__dict__)
######################################################################
## 10. Example Usage
######################################################################
class _TestClass:
"""
A pointless class, for sanity-checking of docstring testing.
Methods:
square()
get()
>>> _TestClass(13).get() + _TestClass(-12).get()
1
>>> hex(_TestClass(13).square().get())
'0xa9'
"""
def __init__(self, val):
"""val -> _TestClass object with associated value val.
>>> t = _TestClass(123)
>>> print t.get()
123
"""
self.val = val
def square(self):
"""square() -> square TestClass's associated value
>>> _TestClass(13).square().get()
169
"""
self.val = self.val ** 2
return self
def get(self):
"""get() -> return TestClass's associated value.
>>> x = _TestClass(-42)
>>> print x.get()
-42
"""
return self.val
__test__ = {"_TestClass": _TestClass,
"string": r"""
Example of a string object, searched as-is.
>>> x = 1; y = 2
>>> x + y, x * y
(3, 2)
""",
"bool-int equivalence": r"""
In 2.2, boolean expressions displayed
0 or 1. By default, we still accept
them. This can be disabled by passing
DONT_ACCEPT_TRUE_FOR_1 to the new
optionflags argument.
>>> 4 == 4
1
>>> 4 == 4
True
>>> 4 > 4
0
>>> 4 > 4
False
""",
"blank lines": r"""
Blank lines can be marked with <BLANKLINE>:
>>> print 'foo\n\nbar\n'
foo
<BLANKLINE>
bar
<BLANKLINE>
""",
"ellipsis": r"""
If the ellipsis flag is used, then '...' can be used to
elide substrings in the desired output:
>>> print range(1000) #doctest: +ELLIPSIS
[0, 1, 2, ..., 999]
""",
"whitespace normalization": r"""
If the whitespace normalization flag is used, then
differences in whitespace are ignored.
>>> print range(30) #doctest: +NORMALIZE_WHITESPACE
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29]
""",
}
def _test():
r = unittest.TextTestRunner()
r.run(DocTestSuite())
if __name__ == "__main__":
_test()
| bsd-3-clause |
kiddhustle/wiardfmblog | django/template/__init__.py | 561 | 3247 | """
This is the Django template system.
How it works:
The Lexer.tokenize() function converts a template string (i.e., a string containing
markup with custom template tags) to tokens, which can be either plain text
(TOKEN_TEXT), variables (TOKEN_VAR) or block statements (TOKEN_BLOCK).
The Parser() class takes a list of tokens in its constructor, and its parse()
method returns a compiled template -- which is, under the hood, a list of
Node objects.
Each Node is responsible for creating some sort of output -- e.g. simple text
(TextNode), variable values in a given context (VariableNode), results of basic
logic (IfNode), results of looping (ForNode), or anything else. The core Node
types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can
define their own custom node types.
Each Node has a render() method, which takes a Context and returns a string of
the rendered node. For example, the render() method of a Variable Node returns
the variable's value as a string. The render() method of an IfNode returns the
rendered output of whatever was inside the loop, recursively.
The Template class is a convenient wrapper that takes care of template
compilation and rendering.
Usage:
The only thing you should ever use directly in this file is the Template class.
Create a compiled template object with a template_string, then call render()
with a context. In the compilation stage, the TemplateSyntaxError exception
will be raised if the template doesn't have proper syntax.
Sample code:
>>> from django import template
>>> s = u'<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>'
>>> t = template.Template(s)
(t is now a compiled template, and its render() method can be called multiple
times with multiple contexts)
>>> c = template.Context({'test':True, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html><h1>Hello</h1></html>'
>>> c = template.Context({'test':False, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html></html>'
"""
# Template lexing symbols
from django.template.base import (ALLOWED_VARIABLE_CHARS, BLOCK_TAG_END,
BLOCK_TAG_START, COMMENT_TAG_END, COMMENT_TAG_START,
FILTER_ARGUMENT_SEPARATOR, FILTER_SEPARATOR, SINGLE_BRACE_END,
SINGLE_BRACE_START, TOKEN_BLOCK, TOKEN_COMMENT, TOKEN_TEXT, TOKEN_VAR,
TRANSLATOR_COMMENT_MARK, UNKNOWN_SOURCE, VARIABLE_ATTRIBUTE_SEPARATOR,
VARIABLE_TAG_END, VARIABLE_TAG_START, filter_re, tag_re)
# Exceptions
from django.template.base import (ContextPopException, InvalidTemplateLibrary,
TemplateDoesNotExist, TemplateEncodingError, TemplateSyntaxError,
VariableDoesNotExist)
# Template parts
from django.template.base import (Context, FilterExpression, Lexer, Node,
NodeList, Parser, RequestContext, Origin, StringOrigin, Template,
TextNode, Token, TokenParser, Variable, VariableNode, constant_string,
filter_raw_string)
# Compiling templates
from django.template.base import (compile_string, resolve_variable,
unescape_string_literal, generic_tag_compiler)
# Library management
from django.template.base import (Library, add_to_builtins, builtins,
get_library, get_templatetags_modules, get_text_list, import_library,
libraries)
__all__ = ('Template', 'Context', 'RequestContext', 'compile_string')
| bsd-3-clause |
MonamAgarwal/final | GTG/gtk/plugins.py | 1 | 12625 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
""" Dialog for loading plugins """
from gi.repository import Gtk, Pango
from GTG import _
from GTG import info
from GTG.core.plugins import GnomeConfig
from GTG.core.plugins.engine import PluginEngine
from GTG.gtk import ViewConfig
# columns in PluginsDialog.plugin_store
PLUGINS_COL_ID = 0
PLUGINS_COL_ENABLED = 1
PLUGINS_COL_NAME = 2
PLUGINS_COL_SHORT_DESC = 3
PLUGINS_COL_ACTIVATABLE = 4
def plugin_icon(column, cell, store, iterator, data):
""" Callback to set the content of a PluginTree cell.
See PluginsDialog._init_plugin_tree().
"""
cell.set_property('icon-name', 'gtg-plugin')
cell.set_property('sensitive',
store.get_value(iterator, PLUGINS_COL_ACTIVATABLE))
def plugin_error_short_text(plugin):
""" Return small version of description of missing module dependencies
for displaying in plugin markup """
if not plugin.error:
return ""
# get lists
modules = plugin.missing_modules
dbus = plugin.missing_dbus
# convert to strings
if modules:
modules = "<small><b>%s</b></small>" % ', '.join(modules)
if dbus:
ifaces = ["%s:%s" % (a, b) for (a, b) in dbus]
dbus = "<small><b>%s</b></small>" % ', '.join(ifaces)
# combine
if modules and not dbus:
text = '\n'.join((GnomeConfig.miss2, modules))
elif dbus and not modules:
text = '\n'.join((GnomeConfig.dmiss2, dbus))
elif modules and dbus:
text = '\n'.join((GnomeConfig.bmiss2, modules, dbus))
else:
text = ""
return text
def plugin_error_text(plugin):
""" Generate some helpful text about missing module dependencies. """
if not plugin.error:
return GnomeConfig.CANLOAD
# describe missing dependencies
text = "<b>%s</b>. \n" % GnomeConfig.CANNOTLOAD
# get lists
modules = plugin.missing_modules
dbus = plugin.missing_dbus
# convert to strings
if modules:
modules = "<small><b>%s</b></small>" % ', '.join(modules)
if dbus:
ifaces = ["%s:%s" % (a, b) for (a, b) in dbus]
dbus = "<small><b>%s</b></small>" % ', '.join(ifaces)
# combine
if modules and not dbus:
text += '\n'.join((GnomeConfig.MODULEMISSING, modules))
elif dbus and not modules:
text += '\n'.join((GnomeConfig.DBUSMISSING, dbus))
elif modules and dbus:
text += '\n'.join((GnomeConfig.MODULANDDBUS, modules, dbus))
else:
text += GnomeConfig.UNKNOWN
return text
def plugin_markup(column, cell, store, iterator, self):
""" Callback to set the content of a PluginTree cell.
See PluginsDialog._init_plugin_tree().
"""
name = store.get_value(iterator, PLUGINS_COL_NAME)
desc = store.get_value(iterator, PLUGINS_COL_SHORT_DESC)
plugin_id = store.get_value(iterator, PLUGINS_COL_ID)
plugin = self.pengine.get_plugin(plugin_id)
error_text = plugin_error_short_text(plugin)
if error_text != "":
text = "<b>%s</b>\n%s\n<i>%s</i>" % (name, desc, error_text)
else:
text = "<b>%s</b>\n%s" % (name, desc)
cell.set_property('markup', text)
cell.set_property('sensitive',
store.get_value(iterator, PLUGINS_COL_ACTIVATABLE))
class PluginsDialog:
""" Dialog for Plugins configuration """
def __init__(self, config_obj):
self.config_obj = config_obj
self.config = self.config_obj.get_subconfig("plugins")
builder = Gtk.Builder()
builder.add_from_file(ViewConfig.PLUGINS_UI_FILE)
self.dialog = builder.get_object("PluginsDialog")
self.dialog.set_title(_("Plugins - %s" % info.NAME))
self.plugin_tree = builder.get_object("PluginTree")
self.plugin_configure = builder.get_object("plugin_configure")
self.plugin_about = builder.get_object("PluginAboutDialog")
self.plugin_depends = builder.get_object('PluginDepends')
self.pengine = PluginEngine()
# plugin config initiation
if self.pengine.get_plugins():
self.config.set("disabled",
[p.module_name for p in self.pengine.get_plugins("disabled")])
self.config.set("enabled",
[p.module_name for p in self.pengine.get_plugins("enabled")])
# see constants PLUGINS_COL_* for column meanings
self.plugin_store = Gtk.ListStore(str, bool, str, str, bool)
builder.connect_signals({
'on_plugins_help':
self.on_help,
'on_plugins_close':
self.on_close,
'on_PluginsDialog_delete_event':
self.on_close,
'on_PluginTree_cursor_changed':
self.on_plugin_select,
'on_plugin_about':
self.on_plugin_about,
'on_plugin_configure':
self.on_plugin_configure,
'on_PluginAboutDialog_close':
self.on_plugin_about_close,
})
def _init_plugin_tree(self):
""" Initialize the PluginTree Gtk.TreeView.
The format is modelled after the one used in gedit; see
http://git.gnome.org/browse/gedit/tree/gedit/gedit-plugin-mapnager.c
"""
# force creation of the Gtk.ListStore so we can reference it
self._refresh_plugin_store()
# renderer for the toggle column
renderer = Gtk.CellRendererToggle()
renderer.set_property('xpad', 6)
renderer.connect('toggled', self.on_plugin_toggle)
# toggle column
column = Gtk.TreeViewColumn(None, renderer, active=PLUGINS_COL_ENABLED,
activatable=PLUGINS_COL_ACTIVATABLE,
sensitive=PLUGINS_COL_ACTIVATABLE)
self.plugin_tree.append_column(column)
# plugin name column
column = Gtk.TreeViewColumn()
column.set_spacing(6)
# icon renderer for the plugin name column
icon_renderer = Gtk.CellRendererPixbuf()
icon_renderer.set_property('stock-size', Gtk.IconSize.SMALL_TOOLBAR)
icon_renderer.set_property('xpad', 3)
column.pack_start(icon_renderer, False)
column.set_cell_data_func(icon_renderer, plugin_icon)
# text renderer for the plugin name column
name_renderer = Gtk.CellRendererText()
name_renderer.set_property('ellipsize', Pango.EllipsizeMode.END)
column.pack_start(name_renderer, True)
column.set_cell_data_func(name_renderer, plugin_markup, self)
self.plugin_tree.append_column(column)
# finish setup
self.plugin_tree.set_model(self.plugin_store)
self.plugin_tree.set_search_column(2)
def _refresh_plugin_store(self):
""" Refresh status of plugins and put it in a Gtk.ListStore """
self.plugin_store.clear()
self.pengine.recheck_plugin_errors(True)
for name, plugin in self.pengine.plugins.items():
# activateable if there is no error
self.plugin_store.append((name, plugin.enabled, plugin.full_name,
plugin.short_description,
not plugin.error))
def activate(self):
""" Refresh status of plugins and show the dialog """
if len(self.plugin_tree.get_columns()) == 0:
self._init_plugin_tree()
else:
self._refresh_plugin_store()
self.dialog.show_all()
def on_close(self, widget, data=None):
""" Close the plugins dialog."""
self.dialog.hide()
return True
@classmethod
def on_help(cls, widget):
""" In future, this will open help for plugins """
return True
def on_plugin_toggle(self, widget, path):
"""Toggle a plugin enabled/disabled."""
iterator = self.plugin_store.get_iter(path)
plugin_id = self.plugin_store.get_value(iterator, PLUGINS_COL_ID)
plugin = self.pengine.get_plugin(plugin_id)
plugin.enabled = not self.plugin_store.get_value(iterator,
PLUGINS_COL_ENABLED)
plugins_enabled = self.config.get("enabled")
plugins_disabled = self.config.get("disabled")
if plugin.enabled:
self.pengine.activate_plugins([plugin])
plugins_enabled.append(plugin.module_name)
if plugin.module_name in plugins_disabled:
plugins_disabled.remove(plugin.module_name)
else:
self.pengine.deactivate_plugins([plugin])
plugins_disabled.append(plugin.module_name)
if plugin.module_name in plugins_enabled:
plugins_enabled.remove(plugin.module_name)
self.config.set("enabled", plugins_enabled)
self.config.set("disabled", plugins_disabled)
self.plugin_store.set_value(iterator, PLUGINS_COL_ENABLED,
plugin.enabled)
self._update_plugin_configure(plugin)
self.config_obj.save()
def on_plugin_select(self, plugin_tree):
""" Callback when user select/unselect a plugin
Update the button "Configure plugin" sensitivity """
model, iterator = plugin_tree.get_selection().get_selected()
if iterator is not None:
plugin_id = model.get_value(iterator, PLUGINS_COL_ID)
plugin = self.pengine.get_plugin(plugin_id)
self._update_plugin_configure(plugin)
def _update_plugin_configure(self, plugin):
""" Enable the button "Configure Plugin" appropriate. """
configurable = plugin.active and plugin.is_configurable()
self.plugin_configure.set_property('sensitive', configurable)
def on_plugin_configure(self, widget):
""" Show the dialog for plugin configuration """
_, iterator = self.plugin_tree.get_selection().get_selected()
if iterator is None:
return
plugin_id = self.plugin_store.get_value(iterator, PLUGINS_COL_ID)
plugin = self.pengine.get_plugin(plugin_id)
plugin.instance.configure_dialog(self.dialog)
def on_plugin_about(self, widget):
""" Display information about a plugin. """
_, iterator = self.plugin_tree.get_selection().get_selected()
if iterator is None:
return
plugin_id = self.plugin_store.get_value(iterator, PLUGINS_COL_ID)
plugin = self.pengine.get_plugin(plugin_id)
#FIXME About plugin dialog looks much more different than
#it is in the current trunk
#FIXME repair it!
#FIXME Author is not usually set and is preserved from
#previous plugin... :/
self.plugin_about.set_program_name(plugin.full_name)
self.plugin_about.set_version(plugin.version)
authors = plugin.authors
if isinstance(authors, str):
authors = "\n".join(author.strip()
for author in authors.split(','))
authors = [authors, ]
self.plugin_about.set_authors(authors)
description = plugin.description.replace(r'\n', "\n")
self.plugin_about.set_comments(description)
self.plugin_depends.set_label(plugin_error_text(plugin))
self.plugin_about.show_all()
def on_plugin_about_close(self, widget, data=None):
""" Close the PluginAboutDialog. """
self.plugin_about.hide()
return True
| gpl-3.0 |
gpoesia/servo | tests/wpt/css-tests/tools/pytest/_pytest/assertion/reinterpret.py | 176 | 15212 | """
Find intermediate evalutation results in assert statements through builtin AST.
"""
import ast
import sys
import _pytest._code
import py
from _pytest.assertion import util
u = py.builtin._totext
class AssertionError(util.BuiltinAssertionError):
def __init__(self, *args):
util.BuiltinAssertionError.__init__(self, *args)
if args:
# on Python2.6 we get len(args)==2 for: assert 0, (x,y)
# on Python2.7 and above we always get len(args) == 1
# with args[0] being the (x,y) tuple.
if len(args) > 1:
toprint = args
else:
toprint = args[0]
try:
self.msg = u(toprint)
except Exception:
self.msg = u(
"<[broken __repr__] %s at %0xd>"
% (toprint.__class__, id(toprint)))
else:
f = _pytest._code.Frame(sys._getframe(1))
try:
source = f.code.fullsource
if source is not None:
try:
source = source.getstatement(f.lineno, assertion=True)
except IndexError:
source = None
else:
source = str(source.deindent()).strip()
except py.error.ENOENT:
source = None
# this can also occur during reinterpretation, when the
# co_filename is set to "<run>".
if source:
self.msg = reinterpret(source, f, should_fail=True)
else:
self.msg = "<could not determine information>"
if not self.args:
self.args = (self.msg,)
if sys.version_info > (3, 0):
AssertionError.__module__ = "builtins"
if sys.platform.startswith("java"):
# See http://bugs.jython.org/issue1497
_exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
"ListComp", "GeneratorExp", "Yield", "Compare", "Call",
"Repr", "Num", "Str", "Attribute", "Subscript", "Name",
"List", "Tuple")
_stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
"AugAssign", "Print", "For", "While", "If", "With", "Raise",
"TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
"Exec", "Global", "Expr", "Pass", "Break", "Continue")
_expr_nodes = set(getattr(ast, name) for name in _exprs)
_stmt_nodes = set(getattr(ast, name) for name in _stmts)
def _is_ast_expr(node):
return node.__class__ in _expr_nodes
def _is_ast_stmt(node):
return node.__class__ in _stmt_nodes
else:
def _is_ast_expr(node):
return isinstance(node, ast.expr)
def _is_ast_stmt(node):
return isinstance(node, ast.stmt)
try:
_Starred = ast.Starred
except AttributeError:
# Python 2. Define a dummy class so isinstance() will always be False.
class _Starred(object): pass
class Failure(Exception):
"""Error found while interpreting AST."""
def __init__(self, explanation=""):
self.cause = sys.exc_info()
self.explanation = explanation
def reinterpret(source, frame, should_fail=False):
mod = ast.parse(source)
visitor = DebugInterpreter(frame)
try:
visitor.visit(mod)
except Failure:
failure = sys.exc_info()[1]
return getfailure(failure)
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
"compute assert expression before the assert or use --assert=plain)")
def run(offending_line, frame=None):
if frame is None:
frame = _pytest._code.Frame(sys._getframe(1))
return reinterpret(offending_line, frame)
def getfailure(e):
explanation = util.format_explanation(e.explanation)
value = e.cause[1]
if str(value):
lines = explanation.split('\n')
lines[0] += " << %s" % (value,)
explanation = '\n'.join(lines)
text = "%s: %s" % (e.cause[0].__name__, explanation)
if text.startswith('AssertionError: assert '):
text = text[16:]
return text
operator_map = {
ast.BitOr : "|",
ast.BitXor : "^",
ast.BitAnd : "&",
ast.LShift : "<<",
ast.RShift : ">>",
ast.Add : "+",
ast.Sub : "-",
ast.Mult : "*",
ast.Div : "/",
ast.FloorDiv : "//",
ast.Mod : "%",
ast.Eq : "==",
ast.NotEq : "!=",
ast.Lt : "<",
ast.LtE : "<=",
ast.Gt : ">",
ast.GtE : ">=",
ast.Pow : "**",
ast.Is : "is",
ast.IsNot : "is not",
ast.In : "in",
ast.NotIn : "not in"
}
unary_map = {
ast.Not : "not %s",
ast.Invert : "~%s",
ast.USub : "-%s",
ast.UAdd : "+%s"
}
class DebugInterpreter(ast.NodeVisitor):
"""Interpret AST nodes to gleam useful debugging information. """
def __init__(self, frame):
self.frame = frame
def generic_visit(self, node):
# Fallback when we don't have a special implementation.
if _is_ast_expr(node):
mod = ast.Expression(node)
co = self._compile(mod)
try:
result = self.frame.eval(co)
except Exception:
raise Failure()
explanation = self.frame.repr(result)
return explanation, result
elif _is_ast_stmt(node):
mod = ast.Module([node])
co = self._compile(mod, "exec")
try:
self.frame.exec_(co)
except Exception:
raise Failure()
return None, None
else:
raise AssertionError("can't handle %s" %(node,))
def _compile(self, source, mode="eval"):
return compile(source, "<assertion interpretation>", mode)
def visit_Expr(self, expr):
return self.visit(expr.value)
def visit_Module(self, mod):
for stmt in mod.body:
self.visit(stmt)
def visit_Name(self, name):
explanation, result = self.generic_visit(name)
# See if the name is local.
source = "%r in locals() is not globals()" % (name.id,)
co = self._compile(source)
try:
local = self.frame.eval(co)
except Exception:
# have to assume it isn't
local = None
if local is None or not self.frame.is_true(local):
return name.id, result
return explanation, result
def visit_Compare(self, comp):
left = comp.left
left_explanation, left_result = self.visit(left)
for op, next_op in zip(comp.ops, comp.comparators):
next_explanation, next_result = self.visit(next_op)
op_symbol = operator_map[op.__class__]
explanation = "%s %s %s" % (left_explanation, op_symbol,
next_explanation)
source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_left=left_result,
__exprinfo_right=next_result)
except Exception:
raise Failure(explanation)
try:
if not self.frame.is_true(result):
break
except KeyboardInterrupt:
raise
except:
break
left_explanation, left_result = next_explanation, next_result
if util._reprcompare is not None:
res = util._reprcompare(op_symbol, left_result, next_result)
if res:
explanation = res
return explanation, result
def visit_BoolOp(self, boolop):
is_or = isinstance(boolop.op, ast.Or)
explanations = []
for operand in boolop.values:
explanation, result = self.visit(operand)
explanations.append(explanation)
if result == is_or:
break
name = is_or and " or " or " and "
explanation = "(" + name.join(explanations) + ")"
return explanation, result
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
operand_explanation, operand_result = self.visit(unary.operand)
explanation = pattern % (operand_explanation,)
co = self._compile(pattern % ("__exprinfo_expr",))
try:
result = self.frame.eval(co, __exprinfo_expr=operand_result)
except Exception:
raise Failure(explanation)
return explanation, result
def visit_BinOp(self, binop):
left_explanation, left_result = self.visit(binop.left)
right_explanation, right_result = self.visit(binop.right)
symbol = operator_map[binop.op.__class__]
explanation = "(%s %s %s)" % (left_explanation, symbol,
right_explanation)
source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_left=left_result,
__exprinfo_right=right_result)
except Exception:
raise Failure(explanation)
return explanation, result
def visit_Call(self, call):
func_explanation, func = self.visit(call.func)
arg_explanations = []
ns = {"__exprinfo_func" : func}
arguments = []
for arg in call.args:
arg_explanation, arg_result = self.visit(arg)
if isinstance(arg, _Starred):
arg_name = "__exprinfo_star"
ns[arg_name] = arg_result
arguments.append("*%s" % (arg_name,))
arg_explanations.append("*%s" % (arg_explanation,))
else:
arg_name = "__exprinfo_%s" % (len(ns),)
ns[arg_name] = arg_result
arguments.append(arg_name)
arg_explanations.append(arg_explanation)
for keyword in call.keywords:
arg_explanation, arg_result = self.visit(keyword.value)
if keyword.arg:
arg_name = "__exprinfo_%s" % (len(ns),)
keyword_source = "%s=%%s" % (keyword.arg)
arguments.append(keyword_source % (arg_name,))
arg_explanations.append(keyword_source % (arg_explanation,))
else:
arg_name = "__exprinfo_kwds"
arguments.append("**%s" % (arg_name,))
arg_explanations.append("**%s" % (arg_explanation,))
ns[arg_name] = arg_result
if getattr(call, 'starargs', None):
arg_explanation, arg_result = self.visit(call.starargs)
arg_name = "__exprinfo_star"
ns[arg_name] = arg_result
arguments.append("*%s" % (arg_name,))
arg_explanations.append("*%s" % (arg_explanation,))
if getattr(call, 'kwargs', None):
arg_explanation, arg_result = self.visit(call.kwargs)
arg_name = "__exprinfo_kwds"
ns[arg_name] = arg_result
arguments.append("**%s" % (arg_name,))
arg_explanations.append("**%s" % (arg_explanation,))
args_explained = ", ".join(arg_explanations)
explanation = "%s(%s)" % (func_explanation, args_explained)
args = ", ".join(arguments)
source = "__exprinfo_func(%s)" % (args,)
co = self._compile(source)
try:
result = self.frame.eval(co, **ns)
except Exception:
raise Failure(explanation)
pattern = "%s\n{%s = %s\n}"
rep = self.frame.repr(result)
explanation = pattern % (rep, rep, explanation)
return explanation, result
def _is_builtin_name(self, name):
pattern = "%r not in globals() and %r not in locals()"
source = pattern % (name.id, name.id)
co = self._compile(source)
try:
return self.frame.eval(co)
except Exception:
return False
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
source_explanation, source_result = self.visit(attr.value)
explanation = "%s.%s" % (source_explanation, attr.attr)
source = "__exprinfo_expr.%s" % (attr.attr,)
co = self._compile(source)
try:
try:
result = self.frame.eval(co, __exprinfo_expr=source_result)
except AttributeError:
# Maybe the attribute name needs to be mangled?
if not attr.attr.startswith("__") or attr.attr.endswith("__"):
raise
source = "getattr(__exprinfo_expr.__class__, '__name__', '')"
co = self._compile(source)
class_name = self.frame.eval(co, __exprinfo_expr=source_result)
mangled_attr = "_" + class_name + attr.attr
source = "__exprinfo_expr.%s" % (mangled_attr,)
co = self._compile(source)
result = self.frame.eval(co, __exprinfo_expr=source_result)
except Exception:
raise Failure(explanation)
explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
self.frame.repr(result),
source_explanation, attr.attr)
# Check if the attr is from an instance.
source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
source = source % (attr.attr,)
co = self._compile(source)
try:
from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
except Exception:
from_instance = None
if from_instance is None or self.frame.is_true(from_instance):
rep = self.frame.repr(result)
pattern = "%s\n{%s = %s\n}"
explanation = pattern % (rep, rep, explanation)
return explanation, result
def visit_Assert(self, assrt):
test_explanation, test_result = self.visit(assrt.test)
explanation = "assert %s" % (test_explanation,)
if not self.frame.is_true(test_result):
try:
raise util.BuiltinAssertionError
except Exception:
raise Failure(explanation)
return explanation, test_result
def visit_Assign(self, assign):
value_explanation, value_result = self.visit(assign.value)
explanation = "... = %s" % (value_explanation,)
name = ast.Name("__exprinfo_expr", ast.Load(),
lineno=assign.value.lineno,
col_offset=assign.value.col_offset)
new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
col_offset=assign.col_offset)
mod = ast.Module([new_assign])
co = self._compile(mod, "exec")
try:
self.frame.exec_(co, __exprinfo_expr=value_result)
except Exception:
raise Failure(explanation)
return explanation, value_result
| mpl-2.0 |
hnakamur/ansible | lib/ansible/compat/tests/__init__.py | 339 | 1268 | # (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
This module contains things that are only needed for compat in the testsuites,
not in ansible itself. If you are not installing the test suite, you can
safely remove this subdirectory.
'''
#
# Compat for python2.7
#
# One unittest needs to import builtins via __import__() so we need to have
# the string that represents it
try:
import __builtin__
except ImportError:
BUILTINS = 'builtins'
else:
BUILTINS = '__builtin__'
| gpl-3.0 |
svagionitis/youtube-dl | youtube_dl/extractor/normalboots.py | 19 | 2263 | # encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
unified_strdate,
)
class NormalbootsIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?normalboots\.com/video/(?P<videoid>[0-9a-z-]*)/?$'
_TEST = {
'url': 'http://normalboots.com/video/home-alone-games-jontron/',
'md5': '8bf6de238915dd501105b44ef5f1e0f6',
'info_dict': {
'id': 'home-alone-games-jontron',
'ext': 'mp4',
'title': 'Home Alone Games - JonTron - NormalBoots',
'description': 'Jon is late for Christmas. Typical. Thanks to: Paul Ritchey for Co-Writing/Filming: http://www.youtube.com/user/ContinueShow Michael Azzi for Christmas Intro Animation: http://michafrar.tumblr.com/ Jerrod Waters for Christmas Intro Music: http://www.youtube.com/user/xXJerryTerryXx Casey Ormond for ‘Tense Battle Theme’:\xa0http://www.youtube.com/Kiamet/',
'uploader': 'JonTron',
'upload_date': '20140125',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('videoid')
webpage = self._download_webpage(url, video_id)
video_uploader = self._html_search_regex(r'Posted\sby\s<a\shref="[A-Za-z0-9/]*">(?P<uploader>[A-Za-z]*)\s</a>',
webpage, 'uploader')
raw_upload_date = self._html_search_regex('<span style="text-transform:uppercase; font-size:inherit;">[A-Za-z]+, (?P<date>.*)</span>',
webpage, 'date')
video_upload_date = unified_strdate(raw_upload_date)
player_url = self._html_search_regex(r'<iframe\swidth="[0-9]+"\sheight="[0-9]+"\ssrc="(?P<url>[\S]+)"', webpage, 'url')
player_page = self._download_webpage(player_url, video_id)
video_url = self._html_search_regex(r"file:\s'(?P<file>[^']+\.mp4)'", player_page, 'file')
return {
'id': video_id,
'url': video_url,
'title': self._og_search_title(webpage),
'description': self._og_search_description(webpage),
'thumbnail': self._og_search_thumbnail(webpage),
'uploader': video_uploader,
'upload_date': video_upload_date,
}
| unlicense |
HyperBaton/ansible | lib/ansible/modules/network/check_point/cp_mgmt_wildcard_facts.py | 20 | 3954 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage Check Point Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cp_mgmt_wildcard_facts
short_description: Get wildcard objects facts on Check Point over Web Services API
description:
- Get wildcard objects facts on Check Point devices.
- All operations are performed over Web Services API.
- This module handles both operations, get a specific object and get several objects,
For getting a specific object use the parameter 'name'.
version_added: "2.9"
author: "Or Soffer (@chkp-orso)"
options:
name:
description:
- Object name.
This parameter is relevant only for getting a specific object.
type: str
details_level:
description:
- The level of detail for some of the fields in the response can vary from showing only the UID value of the object to a fully detailed
representation of the object.
type: str
choices: ['uid', 'standard', 'full']
limit:
description:
- No more than that many results will be returned.
This parameter is relevant only for getting few objects.
type: int
offset:
description:
- Skip that many results before beginning to return them.
This parameter is relevant only for getting few objects.
type: int
order:
description:
- Sorts results by the given field. By default the results are sorted in the ascending order by name.
This parameter is relevant only for getting few objects.
type: list
suboptions:
ASC:
description:
- Sorts results by the given field in ascending order.
type: str
choices: ['name']
DESC:
description:
- Sorts results by the given field in descending order.
type: str
choices: ['name']
extends_documentation_fragment: checkpoint_facts
"""
EXAMPLES = """
- name: show-wildcard
cp_mgmt_wildcard_facts:
name: New Wildcard 1
- name: show-wildcards
cp_mgmt_wildcard_facts:
details_level: standard
limit: 50
offset: 0
"""
RETURN = """
ansible_facts:
description: The checkpoint object facts.
returned: always.
type: dict
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec_for_facts, api_call_facts
def main():
argument_spec = dict(
name=dict(type='str'),
details_level=dict(type='str', choices=['uid', 'standard', 'full']),
limit=dict(type='int'),
offset=dict(type='int'),
order=dict(type='list', options=dict(
ASC=dict(type='str', choices=['name']),
DESC=dict(type='str', choices=['name'])
))
)
argument_spec.update(checkpoint_argument_spec_for_facts)
module = AnsibleModule(argument_spec=argument_spec)
api_call_object = "wildcard"
api_call_object_plural_version = "wildcards"
result = api_call_facts(module, api_call_object, api_call_object_plural_version)
module.exit_json(ansible_facts=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
feroda/odoo | addons/share/wizard/__init__.py | 448 | 1067 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import share_wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kawamon/hue | desktop/core/ext-py/Babel-2.5.1/tests/messages/test_extract.py | 3 | 20944 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import codecs
import sys
import unittest
from babel.messages import extract
from babel._compat import BytesIO, StringIO
class ExtractPythonTestCase(unittest.TestCase):
def test_nested_calls(self):
buf = BytesIO(b"""\
msg1 = _(i18n_arg.replace(r'\"', '"'))
msg2 = ungettext(i18n_arg.replace(r'\"', '"'), multi_arg.replace(r'\"', '"'), 2)
msg3 = ungettext("Babel", multi_arg.replace(r'\"', '"'), 2)
msg4 = ungettext(i18n_arg.replace(r'\"', '"'), "Babels", 2)
msg5 = ungettext('bunny', 'bunnies', random.randint(1, 2))
msg6 = ungettext(arg0, 'bunnies', random.randint(1, 2))
msg7 = _(hello.there)
msg8 = gettext('Rabbit')
msg9 = dgettext('wiki', model.addPage())
msg10 = dngettext(getDomain(), 'Page', 'Pages', 3)
""")
messages = list(extract.extract_python(buf,
extract.DEFAULT_KEYWORDS.keys(),
[], {}))
self.assertEqual([
(1, '_', None, []),
(2, 'ungettext', (None, None, None), []),
(3, 'ungettext', (u'Babel', None, None), []),
(4, 'ungettext', (None, u'Babels', None), []),
(5, 'ungettext', (u'bunny', u'bunnies', None), []),
(6, 'ungettext', (None, u'bunnies', None), []),
(7, '_', None, []),
(8, 'gettext', u'Rabbit', []),
(9, 'dgettext', (u'wiki', None), []),
(10, 'dngettext', (None, u'Page', u'Pages', None), [])],
messages)
def test_extract_default_encoding_ascii(self):
buf = BytesIO(b'_("a")')
messages = list(extract.extract_python(
buf, list(extract.DEFAULT_KEYWORDS), [], {},
))
# Should work great in both py2 and py3
self.assertEqual([(1, '_', 'a', [])], messages)
def test_extract_default_encoding_utf8(self):
buf = BytesIO(u'_("☃")'.encode('UTF-8'))
messages = list(extract.extract_python(
buf, list(extract.DEFAULT_KEYWORDS), [], {},
))
self.assertEqual([(1, '_', u'☃', [])], messages)
def test_nested_comments(self):
buf = BytesIO(b"""\
msg = ngettext('pylon', # TRANSLATORS: shouldn't be
'pylons', # TRANSLATORS: seeing this
count)
""")
messages = list(extract.extract_python(buf, ('ngettext',),
['TRANSLATORS:'], {}))
self.assertEqual([(1, 'ngettext', (u'pylon', u'pylons', None), [])],
messages)
def test_comments_with_calls_that_spawn_multiple_lines(self):
buf = BytesIO(b"""\
# NOTE: This Comment SHOULD Be Extracted
add_notice(req, ngettext("Catalog deleted.",
"Catalogs deleted.", len(selected)))
# NOTE: This Comment SHOULD Be Extracted
add_notice(req, _("Locale deleted."))
# NOTE: This Comment SHOULD Be Extracted
add_notice(req, ngettext("Foo deleted.", "Foos deleted.", len(selected)))
# NOTE: This Comment SHOULD Be Extracted
# NOTE: And This One Too
add_notice(req, ngettext("Bar deleted.",
"Bars deleted.", len(selected)))
""")
messages = list(extract.extract_python(buf, ('ngettext', '_'), ['NOTE:'],
{'strip_comment_tags': False}))
self.assertEqual((6, '_', 'Locale deleted.',
[u'NOTE: This Comment SHOULD Be Extracted']),
messages[1])
self.assertEqual((10, 'ngettext', (u'Foo deleted.', u'Foos deleted.',
None),
[u'NOTE: This Comment SHOULD Be Extracted']),
messages[2])
self.assertEqual((3, 'ngettext',
(u'Catalog deleted.',
u'Catalogs deleted.', None),
[u'NOTE: This Comment SHOULD Be Extracted']),
messages[0])
self.assertEqual((15, 'ngettext', (u'Bar deleted.', u'Bars deleted.',
None),
[u'NOTE: This Comment SHOULD Be Extracted',
u'NOTE: And This One Too']),
messages[3])
def test_declarations(self):
buf = BytesIO(b"""\
class gettext(object):
pass
def render_body(context,x,y=_('Page arg 1'),z=_('Page arg 2'),**pageargs):
pass
def ngettext(y='arg 1',z='arg 2',**pageargs):
pass
class Meta:
verbose_name = _('log entry')
""")
messages = list(extract.extract_python(buf,
extract.DEFAULT_KEYWORDS.keys(),
[], {}))
self.assertEqual([(3, '_', u'Page arg 1', []),
(3, '_', u'Page arg 2', []),
(8, '_', u'log entry', [])],
messages)
def test_multiline(self):
buf = BytesIO(b"""\
msg1 = ngettext('pylon',
'pylons', count)
msg2 = ngettext('elvis',
'elvises',
count)
""")
messages = list(extract.extract_python(buf, ('ngettext',), [], {}))
self.assertEqual([(1, 'ngettext', (u'pylon', u'pylons', None), []),
(3, 'ngettext', (u'elvis', u'elvises', None), [])],
messages)
def test_npgettext(self):
buf = BytesIO(b"""\
msg1 = npgettext('Strings','pylon',
'pylons', count)
msg2 = npgettext('Strings','elvis',
'elvises',
count)
""")
messages = list(extract.extract_python(buf, ('npgettext',), [], {}))
self.assertEqual([(1, 'npgettext', (u'Strings', u'pylon', u'pylons', None), []),
(3, 'npgettext', (u'Strings', u'elvis', u'elvises', None), [])],
messages)
buf = BytesIO(b"""\
msg = npgettext('Strings', 'pylon', # TRANSLATORS: shouldn't be
'pylons', # TRANSLATORS: seeing this
count)
""")
messages = list(extract.extract_python(buf, ('npgettext',),
['TRANSLATORS:'], {}))
self.assertEqual([(1, 'npgettext', (u'Strings', u'pylon', u'pylons', None), [])],
messages)
def test_triple_quoted_strings(self):
buf = BytesIO(b"""\
msg1 = _('''pylons''')
msg2 = ngettext(r'''elvis''', \"\"\"elvises\"\"\", count)
msg2 = ngettext(\"\"\"elvis\"\"\", 'elvises', count)
""")
messages = list(extract.extract_python(buf,
extract.DEFAULT_KEYWORDS.keys(),
[], {}))
self.assertEqual([(1, '_', (u'pylons'), []),
(2, 'ngettext', (u'elvis', u'elvises', None), []),
(3, 'ngettext', (u'elvis', u'elvises', None), [])],
messages)
def test_multiline_strings(self):
buf = BytesIO(b"""\
_('''This module provides internationalization and localization
support for your Python programs by providing an interface to the GNU
gettext message catalog library.''')
""")
messages = list(extract.extract_python(buf,
extract.DEFAULT_KEYWORDS.keys(),
[], {}))
self.assertEqual(
[(1, '_',
u'This module provides internationalization and localization\n'
'support for your Python programs by providing an interface to '
'the GNU\ngettext message catalog library.', [])],
messages)
def test_concatenated_strings(self):
buf = BytesIO(b"""\
foobar = _('foo' 'bar')
""")
messages = list(extract.extract_python(buf,
extract.DEFAULT_KEYWORDS.keys(),
[], {}))
self.assertEqual(u'foobar', messages[0][2])
def test_unicode_string_arg(self):
buf = BytesIO(b"msg = _(u'Foo Bar')")
messages = list(extract.extract_python(buf, ('_',), [], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
def test_comment_tag(self):
buf = BytesIO(b"""
# NOTE: A translation comment
msg = _(u'Foo Bar')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
self.assertEqual([u'NOTE: A translation comment'], messages[0][3])
def test_comment_tag_multiline(self):
buf = BytesIO(b"""
# NOTE: A translation comment
# with a second line
msg = _(u'Foo Bar')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
self.assertEqual([u'NOTE: A translation comment', u'with a second line'],
messages[0][3])
def test_translator_comments_with_previous_non_translator_comments(self):
buf = BytesIO(b"""
# This shouldn't be in the output
# because it didn't start with a comment tag
# NOTE: A translation comment
# with a second line
msg = _(u'Foo Bar')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
self.assertEqual([u'NOTE: A translation comment', u'with a second line'],
messages[0][3])
def test_comment_tags_not_on_start_of_comment(self):
buf = BytesIO(b"""
# This shouldn't be in the output
# because it didn't start with a comment tag
# do NOTE: this will not be a translation comment
# NOTE: This one will be
msg = _(u'Foo Bar')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
self.assertEqual([u'NOTE: This one will be'], messages[0][3])
def test_multiple_comment_tags(self):
buf = BytesIO(b"""
# NOTE1: A translation comment for tag1
# with a second line
msg = _(u'Foo Bar1')
# NOTE2: A translation comment for tag2
msg = _(u'Foo Bar2')
""")
messages = list(extract.extract_python(buf, ('_',),
['NOTE1:', 'NOTE2:'], {}))
self.assertEqual(u'Foo Bar1', messages[0][2])
self.assertEqual([u'NOTE1: A translation comment for tag1',
u'with a second line'], messages[0][3])
self.assertEqual(u'Foo Bar2', messages[1][2])
self.assertEqual([u'NOTE2: A translation comment for tag2'], messages[1][3])
def test_two_succeeding_comments(self):
buf = BytesIO(b"""
# NOTE: one
# NOTE: two
msg = _(u'Foo Bar')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
self.assertEqual([u'NOTE: one', u'NOTE: two'], messages[0][3])
def test_invalid_translator_comments(self):
buf = BytesIO(b"""
# NOTE: this shouldn't apply to any messages
hello = 'there'
msg = _(u'Foo Bar')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
self.assertEqual([], messages[0][3])
def test_invalid_translator_comments2(self):
buf = BytesIO(b"""
# NOTE: Hi!
hithere = _('Hi there!')
# NOTE: you should not be seeing this in the .po
rows = [[v for v in range(0,10)] for row in range(0,10)]
# this (NOTE:) should not show up either
hello = _('Hello')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Hi there!', messages[0][2])
self.assertEqual([u'NOTE: Hi!'], messages[0][3])
self.assertEqual(u'Hello', messages[1][2])
self.assertEqual([], messages[1][3])
def test_invalid_translator_comments3(self):
buf = BytesIO(b"""
# NOTE: Hi,
# there!
hithere = _('Hi there!')
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Hi there!', messages[0][2])
self.assertEqual([], messages[0][3])
def test_comment_tag_with_leading_space(self):
buf = BytesIO(b"""
#: A translation comment
#: with leading spaces
msg = _(u'Foo Bar')
""")
messages = list(extract.extract_python(buf, ('_',), [':'], {}))
self.assertEqual(u'Foo Bar', messages[0][2])
self.assertEqual([u': A translation comment', u': with leading spaces'],
messages[0][3])
def test_different_signatures(self):
buf = BytesIO(b"""
foo = _('foo', 'bar')
n = ngettext('hello', 'there', n=3)
n = ngettext(n=3, 'hello', 'there')
n = ngettext(n=3, *messages)
n = ngettext()
n = ngettext('foo')
""")
messages = list(extract.extract_python(buf, ('_', 'ngettext'), [], {}))
self.assertEqual((u'foo', u'bar'), messages[0][2])
self.assertEqual((u'hello', u'there', None), messages[1][2])
self.assertEqual((None, u'hello', u'there'), messages[2][2])
self.assertEqual((None, None), messages[3][2])
self.assertEqual(None, messages[4][2])
self.assertEqual(('foo'), messages[5][2])
def test_utf8_message(self):
buf = BytesIO(u"""
# NOTE: hello
msg = _('Bonjour à tous')
""".encode('utf-8'))
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'],
{'encoding': 'utf-8'}))
self.assertEqual(u'Bonjour à tous', messages[0][2])
self.assertEqual([u'NOTE: hello'], messages[0][3])
def test_utf8_message_with_magic_comment(self):
buf = BytesIO(u"""# -*- coding: utf-8 -*-
# NOTE: hello
msg = _('Bonjour à tous')
""".encode('utf-8'))
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Bonjour à tous', messages[0][2])
self.assertEqual([u'NOTE: hello'], messages[0][3])
def test_utf8_message_with_utf8_bom(self):
buf = BytesIO(codecs.BOM_UTF8 + u"""
# NOTE: hello
msg = _('Bonjour à tous')
""".encode('utf-8'))
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Bonjour à tous', messages[0][2])
self.assertEqual([u'NOTE: hello'], messages[0][3])
def test_utf8_message_with_utf8_bom_and_magic_comment(self):
buf = BytesIO(codecs.BOM_UTF8 + u"""# -*- coding: utf-8 -*-
# NOTE: hello
msg = _('Bonjour à tous')
""".encode('utf-8'))
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Bonjour à tous', messages[0][2])
self.assertEqual([u'NOTE: hello'], messages[0][3])
def test_utf8_bom_with_latin_magic_comment_fails(self):
buf = BytesIO(codecs.BOM_UTF8 + u"""# -*- coding: latin-1 -*-
# NOTE: hello
msg = _('Bonjour à tous')
""".encode('utf-8'))
self.assertRaises(SyntaxError, list,
extract.extract_python(buf, ('_',), ['NOTE:'], {}))
def test_utf8_raw_strings_match_unicode_strings(self):
buf = BytesIO(codecs.BOM_UTF8 + u"""
msg = _('Bonjour à tous')
msgu = _(u'Bonjour à tous')
""".encode('utf-8'))
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual(u'Bonjour à tous', messages[0][2])
self.assertEqual(messages[0][2], messages[1][2])
def test_extract_strip_comment_tags(self):
buf = BytesIO(b"""\
#: This is a comment with a very simple
#: prefix specified
_('Servus')
# NOTE: This is a multiline comment with
# a prefix too
_('Babatschi')""")
messages = list(extract.extract('python', buf, comment_tags=['NOTE:', ':'],
strip_comment_tags=True))
self.assertEqual(u'Servus', messages[0][1])
self.assertEqual([u'This is a comment with a very simple',
u'prefix specified'], messages[0][2])
self.assertEqual(u'Babatschi', messages[1][1])
self.assertEqual([u'This is a multiline comment with',
u'a prefix too'], messages[1][2])
def test_nested_messages(self):
buf = BytesIO(b"""
# NOTE: First
_(u'Hello, {name}!', name=_(u'Foo Bar'))
# NOTE: Second
_(u'Hello, {name1} and {name2}!', name1=_(u'Heungsub'),
name2=_(u'Armin'))
# NOTE: Third
_(u'Hello, {0} and {1}!', _(u'Heungsub'),
_(u'Armin'))
""")
messages = list(extract.extract_python(buf, ('_',), ['NOTE:'], {}))
self.assertEqual((u'Hello, {name}!', None), messages[0][2])
self.assertEqual([u'NOTE: First'], messages[0][3])
self.assertEqual(u'Foo Bar', messages[1][2])
self.assertEqual([], messages[1][3])
self.assertEqual((u'Hello, {name1} and {name2}!', None), messages[2][2])
self.assertEqual([u'NOTE: Second'], messages[2][3])
self.assertEqual(u'Heungsub', messages[3][2])
self.assertEqual([], messages[3][3])
self.assertEqual(u'Armin', messages[4][2])
self.assertEqual([], messages[4][3])
self.assertEqual((u'Hello, {0} and {1}!', None), messages[5][2])
self.assertEqual([u'NOTE: Third'], messages[5][3])
self.assertEqual(u'Heungsub', messages[6][2])
self.assertEqual([], messages[6][3])
self.assertEqual(u'Armin', messages[7][2])
self.assertEqual([], messages[7][3])
class ExtractTestCase(unittest.TestCase):
def test_invalid_filter(self):
buf = BytesIO(b"""\
msg1 = _(i18n_arg.replace(r'\"', '"'))
msg2 = ungettext(i18n_arg.replace(r'\"', '"'), multi_arg.replace(r'\"', '"'), 2)
msg3 = ungettext("Babel", multi_arg.replace(r'\"', '"'), 2)
msg4 = ungettext(i18n_arg.replace(r'\"', '"'), "Babels", 2)
msg5 = ungettext('bunny', 'bunnies', random.randint(1, 2))
msg6 = ungettext(arg0, 'bunnies', random.randint(1, 2))
msg7 = _(hello.there)
msg8 = gettext('Rabbit')
msg9 = dgettext('wiki', model.addPage())
msg10 = dngettext(domain, 'Page', 'Pages', 3)
""")
messages = \
list(extract.extract('python', buf, extract.DEFAULT_KEYWORDS, [],
{}))
self.assertEqual([(5, (u'bunny', u'bunnies'), [], None),
(8, u'Rabbit', [], None),
(10, (u'Page', u'Pages'), [], None)], messages)
def test_invalid_extract_method(self):
buf = BytesIO(b'')
self.assertRaises(ValueError, list, extract.extract('spam', buf))
def test_different_signatures(self):
buf = BytesIO(b"""
foo = _('foo', 'bar')
n = ngettext('hello', 'there', n=3)
n = ngettext(n=3, 'hello', 'there')
n = ngettext(n=3, *messages)
n = ngettext()
n = ngettext('foo')
""")
messages = \
list(extract.extract('python', buf, extract.DEFAULT_KEYWORDS, [],
{}))
self.assertEqual(len(messages), 2)
self.assertEqual(u'foo', messages[0][1])
self.assertEqual((u'hello', u'there'), messages[1][1])
def test_empty_string_msgid(self):
buf = BytesIO(b"""\
msg = _('')
""")
stderr = sys.stderr
sys.stderr = StringIO()
try:
messages = \
list(extract.extract('python', buf, extract.DEFAULT_KEYWORDS,
[], {}))
self.assertEqual([], messages)
assert 'warning: Empty msgid.' in sys.stderr.getvalue()
finally:
sys.stderr = stderr
def test_warn_if_empty_string_msgid_found_in_context_aware_extraction_method(self):
buf = BytesIO(b"\nmsg = pgettext('ctxt', '')\n")
stderr = sys.stderr
sys.stderr = StringIO()
try:
messages = extract.extract('python', buf)
self.assertEqual([], list(messages))
assert 'warning: Empty msgid.' in sys.stderr.getvalue()
finally:
sys.stderr = stderr
def test_extract_allows_callable(self):
def arbitrary_extractor(fileobj, keywords, comment_tags, options):
return [(1, None, (), ())]
for x in extract.extract(arbitrary_extractor, BytesIO(b"")):
assert x[0] == 1
def test_future(self):
buf = BytesIO(br"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
nbsp = _('\xa0')
""")
messages = list(extract.extract('python', buf,
extract.DEFAULT_KEYWORDS, [], {}))
assert messages[0][1] == u'\xa0'
| apache-2.0 |
laszlocsomor/tensorflow | tensorflow/python/kernel_tests/constant_op_test.py | 3 | 33411 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ConstantOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import tensor_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class ConstantTest(test.TestCase):
def _testCpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=False):
tf_ans = ops.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64, np.complex128]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testGpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=True):
tf_ans = ops.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64, np.complex128]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testAll(self, x):
self._testCpu(x)
self._testGpu(x)
def testFloat(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float32))
self._testAll(np.empty((2, 0, 5)).astype(np.float32))
def testDouble(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float64))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float64))
self._testAll(np.empty((2, 0, 5)).astype(np.float64))
def testInt32(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int32))
self._testAll((100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(
np.int32))
self._testAll(np.empty((2, 0, 5)).astype(np.int32))
def testInt64(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int64))
self._testAll((100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(
np.int64))
self._testAll(np.empty((2, 0, 5)).astype(np.int64))
def testComplex64(self):
self._testAll(
np.complex(1, 2) *
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.complex64))
self._testAll(
np.complex(1, 2) *
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.complex64))
self._testAll(np.empty((2, 0, 5)).astype(np.complex64))
def testComplex128(self):
self._testAll(
np.complex(1, 2) *
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.complex128))
self._testAll(
np.complex(1, 2) *
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.complex128))
self._testAll(np.empty((2, 0, 5)).astype(np.complex128))
def testString(self):
self._testCpu(
np.array([compat.as_bytes(str(x)) for x in np.arange(-15, 15)]).reshape(
[2, 3, 5]))
self._testCpu(np.empty((2, 0, 5)).astype(np.str_))
def testVariant(self):
# TODO(ebrevdo): Re-enable use_gpu=True once non-DMA Variant
# copying between CPU and GPU is supported.
with self.test_session(use_gpu=False):
variant_tensor = tensor_pb2.TensorProto(
dtype=dtypes_lib.variant.as_datatype_enum,
tensor_shape=tensor_shape.TensorShape([]).as_proto(),
variant_val=[
tensor_pb2.VariantTensorDataProto(
# Match registration in variant_op_registry.cc
type_name=b"int",
metadata=np.array(1, dtype=np.int32).tobytes())
])
const = constant_op.constant(variant_tensor)
const_value = const.op.get_attr("value")
# Ensure we stored the tensor proto properly.
self.assertProtoEquals(variant_tensor, const_value)
# Smoke test -- ensure this executes without trouble.
# Right now, non-numpy-compatible objects cannot be returned from a
# session.run call; similarly, objects that can't be converted to
# native numpy types cannot be passed to ops.convert_to_tensor.
# TODO(ebrevdo): Add registration mechanism for
# ops.convert_to_tensor and for session.run output.
logging_const_op = logging_ops.Print(
const, [const],
message="Variant storing an int, decoded const value:").op
logging_const_op.run()
def testStringWithNulls(self):
with self.test_session():
val = ops.convert_to_tensor(b"\0\0\0\0").eval()
self.assertEqual(len(val), 4)
self.assertEqual(val, b"\0\0\0\0")
with self.test_session():
val = ops.convert_to_tensor(b"xx\0xx").eval()
self.assertEqual(len(val), 5)
self.assertAllEqual(val, b"xx\0xx")
nested = [[b"\0\0\0\0", b"xx\0xx"], [b"\0_\0_\0_\0", b"\0"]]
with self.test_session():
val = ops.convert_to_tensor(nested).eval()
# NOTE(mrry): Do not use assertAllEqual, because it converts nested to a
# numpy array, which loses the null terminators.
self.assertEqual(val.tolist(), nested)
def testExplicitShapeNumPy(self):
with ops.Graph().as_default():
c = constant_op.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32),
shape=[2, 3, 5])
self.assertEqual(c.get_shape(), [2, 3, 5])
def testImplicitShapeNumPy(self):
with ops.Graph().as_default():
c = constant_op.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self.assertEqual(c.get_shape(), [2, 3, 5])
def testExplicitShapeList(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[7])
self.assertEqual(c.get_shape(), [7])
def testImplicitShapeList(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7])
self.assertEqual(c.get_shape(), [7])
def testExplicitShapeNumber(self):
with ops.Graph().as_default():
c = constant_op.constant(1, shape=[1])
self.assertEqual(c.get_shape(), [1])
def testImplicitShapeNumber(self):
with ops.Graph().as_default():
c = constant_op.constant(1)
self.assertEqual(c.get_shape(), [])
def testShapeInconsistent(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[10])
self.assertEqual(c.get_shape(), [10])
# pylint: disable=g-long-lambda
def testShapeWrong(self):
with ops.Graph().as_default():
with self.assertRaisesWithPredicateMatch(
ValueError,
lambda e: ("Too many elements provided. Needed at most 5, "
"but received 7" == str(e))):
constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[5])
# pylint: enable=g-long-lambda
# TODO(b/35396543): Temporarily disable: suspicion that
# this is causing test timeouts.
def _testTooLargeConstant(self):
with ops.Graph().as_default():
large_array = np.zeros((512, 1024, 1024), dtype=np.float32)
with self.assertRaisesRegexp(
ValueError,
"Cannot create a tensor proto whose content is larger than 2GB."):
c = constant_op.constant(large_array)
# TODO(b/35396543): Temporarily disable: suspicion that
# this is causing test timeouts.
def _testTooLargeGraph(self):
with ops.Graph().as_default() as g:
large_array = np.zeros((256, 1024, 1024), dtype=np.float32)
c = constant_op.constant(large_array)
d = constant_op.constant(large_array)
with self.assertRaisesRegexp(ValueError,
"GraphDef cannot be larger than 2GB."):
g.as_graph_def()
def testSparseValuesRaiseErrors(self):
with self.assertRaisesRegexp(ValueError,
"setting an array element with a sequence"):
c = constant_op.constant([[1, 2], [3]], dtype=dtypes_lib.int32)
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = constant_op.constant([[1, 2], [3]])
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = constant_op.constant([[1, 2], [3], [4, 5]])
class AsTensorTest(test.TestCase):
def testAsTensorForTensorInput(self):
with ops.Graph().as_default():
t = constant_op.constant(10.0)
x = ops.convert_to_tensor(t)
self.assertIs(t, x)
def testAsTensorForNonTensorInput(self):
with ops.Graph().as_default():
x = ops.convert_to_tensor(10.0)
self.assertTrue(isinstance(x, ops.Tensor))
def testAsTensorForShapeInput(self):
with self.test_session():
x = ops.convert_to_tensor(tensor_shape.TensorShape([]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([1, 2, 3]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31-1, 2, 3]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([2**31-1, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31-1, 2, 3]),
dtype=dtypes_lib.int32)
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([2**31-1, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31, 2, 3]))
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual([2**31, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31, 2, 3]),
dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual([2**31, 2, 3], x.eval())
with self.assertRaisesRegexp(
ValueError, "a dimension is too large .2147483648."):
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31, 2, 3]),
dtype=dtypes_lib.int32)
x = ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3]), dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = array_ops.reshape(
array_ops.zeros([6]), tensor_shape.TensorShape([2, 3]))
self.assertAllEqual([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], x.eval())
with self.assertRaisesRegexp(ValueError, "partially known"):
ops.convert_to_tensor(tensor_shape.TensorShape(None))
with self.assertRaisesRegexp(ValueError, "partially known"):
ops.convert_to_tensor(tensor_shape.TensorShape([1, None, 64]))
with self.assertRaises(TypeError):
ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3]), dtype=dtypes_lib.float32)
def testAsTensorForDimensionInput(self):
with self.test_session():
x = ops.convert_to_tensor(tensor_shape.TensorShape([1, 2, 3])[1])
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual(2, x.eval())
x = ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3])[1], dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual(2, x.eval())
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
ops.convert_to_tensor(tensor_shape.TensorShape(None)[1])
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
ops.convert_to_tensor(tensor_shape.TensorShape([1, None, 64])[1])
with self.assertRaises(TypeError):
ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3])[1], dtype=dtypes_lib.float32)
class IdentityOpTest(test.TestCase):
def testIdTensor(self):
with ops.Graph().as_default():
x = constant_op.constant(2.0, shape=[6], name="input")
id_op = array_ops.identity(x, name="id")
self.assertTrue(isinstance(id_op.op.inputs[0], ops.Tensor))
self.assertProtoEquals("name: 'id' op: 'Identity' input: 'input' "
"attr { key: 'T' value { type: DT_FLOAT } }",
id_op.op.node_def)
class ZerosTest(test.TestCase):
def _Zeros(self, shape):
with self.test_session():
ret = array_ops.zeros(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(
np.array_equal(self._Zeros([2, 3]), np.array([[0] * 3] * 2)))
def testScalar(self):
self.assertEqual(0, self._Zeros([]))
self.assertEqual(0, self._Zeros(()))
with self.test_session():
scalar = array_ops.zeros(constant_op.constant([], dtype=dtypes_lib.int32))
self.assertEqual(0, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[0] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = array_ops.fill([2, 3], 12., name="fill")
# Constructs a tensor of zeros of the same dimensions as "d".
z = array_ops.zeros(array_ops.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testDtype(self):
with self.test_session():
d = array_ops.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = array_ops.zeros([2, 3])
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
z = array_ops.zeros(array_ops.shape(d))
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
# Test explicit type control
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128, dtypes_lib.int64,
dtypes_lib.bool, dtypes_lib.string
]:
z = array_ops.zeros([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
z_value = z.eval()
self.assertFalse(np.any(z_value))
self.assertEqual((2, 3), z_value.shape)
z = array_ops.zeros(array_ops.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
z_value = z.eval()
self.assertFalse(np.any(z_value))
self.assertEqual((2, 3), z_value.shape)
class ZerosLikeTest(test.TestCase):
def _compareZeros(self, dtype, fully_defined_shape, use_gpu):
with self.test_session(use_gpu=use_gpu):
# Creates a tensor of non-zero values with shape 2 x 3.
# NOTE(kearnes): The default numpy dtype associated with tf.string is
# np.object (and can't be changed without breaking a lot things), which
# causes a TypeError in constant_op.constant below. Here we catch the
# special case of tf.string and set the numpy dtype appropriately.
if dtype == dtypes_lib.string:
numpy_dtype = np.string_
else:
numpy_dtype = dtype.as_numpy_dtype
if fully_defined_shape:
d = constant_op.constant(
np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
else:
d = array_ops.placeholder(dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = array_ops.zeros_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
# Test that the shape is correct
if fully_defined_shape:
self.assertEqual([2, 3], z_var.get_shape())
# Test that the value is correct
feed_dict = {}
if not fully_defined_shape:
feed_dict[d] = np.ones((2, 3), dtype=numpy_dtype)
z_value = z_var.eval(feed_dict=feed_dict)
self.assertFalse(np.any(z_value))
self.assertEqual((2, 3), z_value.shape)
def testZerosLikeCPU(self):
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int8,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.uint16, dtypes_lib.int32,
dtypes_lib.int64, dtypes_lib.bool, dtypes_lib.complex64,
dtypes_lib.complex128, dtypes_lib.string
]:
self._compareZeros(dtype, fully_defined_shape=False, use_gpu=False)
self._compareZeros(dtype, fully_defined_shape=True, use_gpu=False)
def testZerosLikeGPU(self):
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.bool, dtypes_lib.int64, dtypes_lib.string
]:
self._compareZeros(dtype, fully_defined_shape=False, use_gpu=True)
self._compareZeros(dtype, fully_defined_shape=True, use_gpu=True)
def testZerosLikePartialShape(self):
d = array_ops.placeholder(dtypes_lib.float32, shape=[None, 4, None])
z = array_ops.zeros_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
def testZerosLikeDtype(self):
# Make sure zeros_like works even for dtypes that cannot be cast between
with self.test_session():
shape = (3, 5)
dtypes = np.float32, np.complex64
for in_type in dtypes:
x = np.arange(15).astype(in_type).reshape(*shape)
for out_type in dtypes:
y = array_ops.zeros_like(x, dtype=out_type).eval()
self.assertEqual(y.dtype, out_type)
self.assertEqual(y.shape, shape)
self.assertAllEqual(y, np.zeros(shape, dtype=out_type))
def testZerosLikeVariant(self):
# TODO(ebrevdo): Re-enable use_gpu=True once non-DMA Variant
# copying between CPU and GPU is supported AND we register a
# ZerosLike callback for GPU for Variant storing primitive types
# in variant_op_registry.cc.
with self.test_session(use_gpu=False):
variant_tensor = tensor_pb2.TensorProto(
dtype=dtypes_lib.variant.as_datatype_enum,
tensor_shape=tensor_shape.TensorShape([]).as_proto(),
variant_val=[
tensor_pb2.VariantTensorDataProto(
# Match registration in variant_op_registry.cc
type_name=b"int",
metadata=np.array(1, dtype=np.int32).tobytes())
])
const_variant = constant_op.constant(variant_tensor)
zeros_like = array_ops.zeros_like(const_variant)
zeros_like_op = logging_ops.Print(
zeros_like, [const_variant, zeros_like],
message="Variant storing an int, input and output of zeros_like:").op
# Smoke test -- ensure this executes without trouble.
# Right now, non-numpy-compatible objects cannot be returned from a
# session.run call; similarly, objects that can't be converted to
# native numpy types cannot be passed to ops.convert_to_tensor.
# TODO(ebrevdo): Add registration mechanism for
# ops.convert_to_tensor and for session.run output.
zeros_like_op.run()
class OnesTest(test.TestCase):
def _Ones(self, shape):
with self.test_session():
ret = array_ops.ones(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(np.array_equal(self._Ones([2, 3]), np.array([[1] * 3] * 2)))
def testScalar(self):
self.assertEqual(1, self._Ones([]))
self.assertEqual(1, self._Ones(()))
with self.test_session():
scalar = array_ops.ones(constant_op.constant([], dtype=dtypes_lib.int32))
self.assertEqual(1, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[1] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = array_ops.fill([2, 3], 12., name="fill")
# Constructs a tensor of ones of the same dimensions as "d".
z = array_ops.ones(array_ops.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testAutoPack(self):
with self.test_session():
h = array_ops.placeholder(dtypes_lib.int32, shape=[])
w = array_ops.placeholder(dtypes_lib.int32, shape=[])
z = array_ops.ones([h, w])
out = z.eval(feed_dict={h: 4, w: 16})
self.assertAllEqual(out, np.array([[1] * 16] * 4))
def testDtype(self):
with self.test_session():
d = array_ops.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = array_ops.ones([2, 3])
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = array_ops.ones(array_ops.shape(d))
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
# Test explicit type control
for dtype in (dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128,
dtypes_lib.int64, dtypes_lib.bool):
z = array_ops.ones([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = array_ops.ones(array_ops.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
class OnesLikeTest(test.TestCase):
def testOnesLike(self):
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int8,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.uint16, dtypes_lib.int32,
dtypes_lib.int64, dtypes_lib.bool, dtypes_lib.complex64,
dtypes_lib.complex128
]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = constant_op.constant(
np.ones(
(2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = array_ops.ones_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[1] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testOnesLikePartialShape(self):
d = array_ops.placeholder(dtypes_lib.float32, shape=[None, 4, None])
z = array_ops.ones_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
class FillTest(test.TestCase):
def _compare(self, dims, val, np_ans, use_gpu):
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.fill(dims, val, name="fill")
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
# Fill does not set the shape.
# self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, dims, val, np_ans):
self._compare(dims, val, np_ans, False)
self._compare(dims, val, np_ans, True)
def testFillFloat(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillDouble(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt32(self):
np_ans = np.array([[42] * 3] * 2).astype(np.int32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt64(self):
np_ans = np.array([[-42] * 3] * 2).astype(np.int64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillComplex64(self):
np_ans = np.array([[0.15] * 3] * 2).astype(np.complex64)
self._compare([2, 3], np_ans[0][0], np_ans, use_gpu=False)
def testFillComplex128(self):
np_ans = np.array([[0.15] * 3] * 2).astype(np.complex128)
self._compare([2, 3], np_ans[0][0], np_ans, use_gpu=False)
def testFillString(self):
np_ans = np.array([[b"yolo"] * 3] * 2)
with self.test_session(use_gpu=False):
tf_ans = array_ops.fill([2, 3], np_ans[0][0], name="fill").eval()
self.assertAllEqual(np_ans, tf_ans)
def testFillNegative(self):
with self.test_session():
for shape in (-1,), (2, -1), (-1, 2), (-2), (-3):
with self.assertRaises(ValueError):
array_ops.fill(shape, 7)
# Using a placeholder so this won't be caught in static analysis.
dims = array_ops.placeholder(dtypes_lib.int32)
fill_t = array_ops.fill(dims, 3.0)
for shape in (-1,), (2, -1), (-1, 2), (-2), (-3):
with self.assertRaises(errors_impl.InvalidArgumentError):
fill_t.eval({dims: shape})
def testShapeFunctionEdgeCases(self):
# Non-vector dimensions.
with self.assertRaises(ValueError):
array_ops.fill([[0, 1], [2, 3]], 1.0)
# Non-scalar value.
with self.assertRaises(ValueError):
array_ops.fill([3, 2], [1.0, 2.0])
# Partial dimension information.
f = array_ops.fill(array_ops.placeholder(dtypes_lib.int32, shape=(4,)), 3.0)
self.assertEqual([None, None, None, None], f.get_shape().as_list())
f = array_ops.fill(
[array_ops.placeholder(
dtypes_lib.int32, shape=()), 17], 1.0)
self.assertEqual([None, 17], f.get_shape().as_list())
def testGradient(self):
with self.test_session():
in_v = constant_op.constant(5.0)
out_shape = [3, 2]
out_filled = array_ops.fill(out_shape, in_v)
err = gradient_checker.compute_gradient_error(in_v, [], out_filled,
out_shape)
self.assertLess(err, 1e-3)
class PlaceholderTest(test.TestCase):
def testDtype(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=(10, 10), name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=(10, 10), name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float and "
r"shape \[10,10\]"):
p_identity.eval()
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :5]})
def testUnknownShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=None, name="p")
p_identity = array_ops.identity(p)
# can feed anything
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
feed_array = np.random.rand(4, 2, 5)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
def testScalarShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=[], name="p")
p_identity = array_ops.identity(p)
self.assertAllClose(p_identity.eval(feed_dict={p: 5}), 5)
def testPartialShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=[None, 3], name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :2]})
def testPartialShapeWhenNotFed(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=[None, 3], name="p")
p_identity = array_ops.identity(p)
# Should trigger an operator error, not a shape error.
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testControlDependency(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.int32, shape=[], name="p")
with ops.control_dependencies([p]):
c = constant_op.constant(5, dtypes_lib.int32)
d = math_ops.multiply(p, c)
val = np.array(2).astype(np.int)
self.assertEqual(10, d.eval(feed_dict={p: val}))
def testBadShape(self):
with self.assertRaises(ValueError):
array_ops.placeholder(dtypes_lib.float32, shape=(-1, 10))
def testTensorStr(self):
a = array_ops.placeholder(dtypes_lib.float32, shape=None, name="a")
self.assertEqual("<tf.Tensor 'a:0' shape=<unknown> dtype=float32>", repr(a))
b = array_ops.placeholder(dtypes_lib.int32, shape=(32, 40), name="b")
self.assertEqual("<tf.Tensor 'b:0' shape=(32, 40) dtype=int32>", repr(b))
c = array_ops.placeholder(dtypes_lib.qint32, shape=(32, None, 2), name="c")
self.assertEqual("<tf.Tensor 'c:0' shape=(32, ?, 2) dtype=qint32>", repr(c))
def testOldGraph(self):
# Load graph generated from earlier version of TF where
# placeholder shape was not set.
#
# a = tf.placeholder(tf.float32)
# b = a + 1.0
#
# Older graph's default shape is 'shape {}', not 'shape {
# unknown_rank: true }'
graph = """
node {
name: "Placeholder"
op: "Placeholder"
attr {
key: "dtype"
value {
type: DT_FLOAT
}
}
attr {
key: "shape"
value {
shape {
}
}
}
}
node {
name: "add/y"
op: "Const"
attr {
key: "dtype"
value {
type: DT_FLOAT
}
}
attr {
key: "value"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
}
float_val: 1.0
}
}
}
}
node {
name: "add"
op: "Add"
input: "Placeholder"
input: "add/y"
attr {
key: "T"
value {
type: DT_FLOAT
}
}
}
versions {
producer: 21
}
"""
gdef = graph_pb2.GraphDef()
text_format.Merge(graph, gdef)
with self.test_session():
p, ret = importer.import_graph_def(
gdef, return_elements=["Placeholder:0", "add:0"])
# Feed in a vector of two elements. Since the producer version
# of 21, a shape of {} is interpreted as "any shape". If
# producer version were 22, then we'd get a shape mismatch
# error.
self.assertAllEqual([2.0, 3.0], ret.eval(feed_dict={p: [1.0, 2.0]}))
class PlaceholderWithDefaultTest(test.TestCase):
def testFullShape(self):
with self.test_session():
p = array_ops.placeholder_with_default([[2, 2], [2, 2]], shape=[2, 2])
a = array_ops.identity(p)
self.assertAllEqual([[2, 2], [2, 2]], a.eval())
self.assertAllEqual(
[[3, 3], [3, 3]], a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[6, 6, 6], [6, 6, 6]]})
def testPartialShape(self):
with self.test_session():
p = array_ops.placeholder_with_default([1, 2, 3], shape=[None])
a = array_ops.identity(p)
self.assertAllEqual([1, 2, 3], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[2, 2], [2, 2]]})
def testNoShape(self):
with self.test_session():
p = array_ops.placeholder_with_default([17], shape=None)
a = array_ops.identity(p)
self.assertAllEqual([17], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
self.assertAllEqual(
[[3, 3], [3, 3]], a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
def testGradient(self):
with self.test_session():
x = array_ops.placeholder(dtypes_lib.float32, [5, 7])
y = array_ops.placeholder_with_default(x, None)
err = gradient_checker.compute_gradient_error(x, [5, 7], y, [5, 7])
self.assertLess(err, 1e-3)
if __name__ == "__main__":
test.main()
| apache-2.0 |
sequana/sequana | sequana/assembly.py | 1 | 4934 | # -*- coding: utf-8 -*-
#
# This file is part of Sequana software
#
# Copyright (c) 2018 - Sequana Development Team
#
# File author(s):
# Thomas Cokelaer <thomas.cokelaer@pasteur.fr>
#
# Distributed under the terms of the 3-clause BSD license.
# The full license is in the LICENSE file, distributed with this software.
#
# website: https://github.com/sequana/sequana
# documentation: http://sequana.readthedocs.io
#
##############################################################################
from sequana.lazy import pylab
from sequana.lazy import pandas as pd
import colorlog
logger = colorlog.getLogger(__name__)
__all__ = ["BUSCO"]
class BUSCO(object):
"""Wrapper of the BUSCO output
"BUSCO provides a quantitative measures for the assessment
of a genome assembly, gene set, transcriptome completeness, based on
evolutionarily-informed expectations of gene content from near-universal
single-copy orthologs selected from OrthoDB v9." -- BUSCO website 2017
This class reads the full report generated by BUSCO and provides some
visualisation of this report. The information is stored in a dataframe
:attr:`df`. The score can be retrieve with the attribute :attr:`score` in
percentage in the range 0-100.
:reference: http://busco.ezlab.org/
"""
def __init__(self, filename="full_table_testbusco.tsv"):
""".. rubric:: constructor
:filename: a valid BUSCO input file (full table). See example in sequana
code source (testing)
"""
self.df = pd.read_csv(filename, sep="\t", skiprows=4)
def pie_plot(self, filename=None, hold=False):
"""Plot PIE plot of the status (complete / fragment / missed)
.. plot::
:include-source:
from sequana import BUSCO, sequana_data
b = BUSCO(sequana_data("test_busco_full_table.tsv"))
b.pie_plot()
"""
if hold is False:
pylab.clf()
self.df.groupby('Status').count()['# Busco id'].plot(kind="pie")
pylab.ylabel("")
#pylab.title("Distribution Complete/Fragmented/Missing")
#pylab.legend()
if filename:
pylab.savefig(filename)
def scatter_plot(self, filename=None, hold=False):
"""Scatter plot of the score versus length of each ortholog
.. plot::
:include-source:
from sequana import BUSCO, sequana_data
b = BUSCO(sequana_data("test_busco_full_table.tsv"))
b.scatter_plot()
Missing are not show since there is no information about contig .
"""
if hold is False:
pylab.clf()
colors = ["green", "orange", "red", "blue"]
markers = ['o', 's', 'x', 'o']
for i, this in enumerate(["Complete", "Fragmented", "Duplicated"]):
mask = self.df.Status == this
if sum(mask)>0:
self.df[mask].plot(x="Length", y="Score", kind="scatter",
color=colors[i], ax=pylab.gca(),
marker=markers[i], label=this)
pylab.legend()
pylab.grid()
if filename:
pylab.savefig(filename)
def summary(self):
"""Return summary information of the missing, completed, fragemented
orthologs
"""
df = self.df.drop_duplicates(subset=["# Busco id"])
data = {}
data['S'] = sum(df.Status == "Complete")
data['F'] = sum(df.Status == "Fragmented")
data['D'] = sum(df.Status == "Duplicated")
data['C'] = data['S'] + data['D']
data['M'] = sum(df.Status == "Missing")
data['total'] = len(df)
data['C_pc'] = data['C'] *100. / data['total']
data['D_pc'] = data['D'] *100. / data['total']
data['S_pc'] = data['S'] *100. / data['total']
data['M_pc'] = data['M'] *100. / data['total']
data['F_pc'] = data['F'] *100. / data['total']
return data
def get_summary_string(self):
data = self.summary()
C = data['C_pc']
F = data["F_pc"]
D = data["D_pc"]
S = data["S_pc"]
M = data["M_pc"]
N = data["total"]
string = "C:{:.1f}%[S:{:.1f}%,D:{:.1f}%],F:{:.1f}%,M:{:.1f}%,n:{}"
return string.format(C, S, D, F, M, N)
def _get_score(self):
return self.summary()["C_pc"]
score = property(_get_score)
def __str__(self):
data = self.summary()
C = data['C']
F = data["F"]
D = data["D"]
S = data["S"]
M = data["M"]
N = data["total"]
string = """# BUSCO diagnostic
{}
{} Complete BUSCOs (C)
{} Complete and single-copy BUSCOs (S)
{} Complete and duplicated BUSCOs (D)
{} Fragmented BUSCOs (F)
{} Missing BUSCOs (M)
{} Total BUSCO groups searched
"""
return string.format(self.get_summary_string(), C, S, D, F, M, N)
| bsd-3-clause |
cyandterry/Python-Study | Interviews/Real_Life/FiveStars.py | 1 | 3303 | import datetime
yellow_player = None
black_player = None
black_player_score = 0
yellow_player_score = 0
goals = []
matches = []
players = {}
"""
players
{
black_players
}
data
{
request_type: register_player/goal,
player_color: yellow/black,
player_id: 12345,
}
"""
def handle_request(data):
# validation(data) check for validation
global yellow_player
global black_player
global black_player_score
global yellow_player_score
global goals
global matches
global players
request_type = data["request_type"]
if request_type == "register_player":
# update the player as yellow/black
if data["player_color"] == "yellow":
yellow_player = data["player_id"]
elif data["player_color"] == "black":
black_player = data["player_id"]
players[data['player_id']] = dict(wins=0,losses=0)
# After registration, need to clean up all the goals/matches/player data
if request_type == "goal":
# Before goal, 1. need to check if players are registered.
# 2. check if the game is finished
# record the score
if data["player_color"] == "yellow":
yellow_player_score += 1
goals.append((yellow_player, black_player, datetime.datetime.now()))
elif data["player_color"] == "black":
black_player_score += 1
goals.append((black_player, yellow_player, datetime.datetime.now()))
# check if the game is over
if black_player_score == 5:
# black wins
matches.append((black_player, 5, yellow_player, yellow_player_score,
datetime.datetime.now()))
players[black_player]["wins"] += 1
players[yellow_player]["losses"] += 1
elif yellow_player_score == 5:
# yellow wins
matches.append((yellow_player, 5, black_player, black_player_score,
datetime.datetime.now()))
players[yellow_player]["wins"] += 1
players[black_player]["losses"] += 1
if __name__ == "__main__":
# write test code here
handle_request(dict(request_type = 'register_player',
player_color = 'yellow',
player_id = '00001',
))
handle_request(dict(request_type = 'register_player',
player_color = 'black',
player_id = '00002',
))
handle_request(dict(request_type = 'goal',
player_color = 'yellow',
))
handle_request(dict(request_type = 'goal',
player_color = 'yellow',
))
handle_request(dict(request_type = 'goal',
player_color = 'yellow',
))
handle_request(dict(request_type = 'goal',
player_color = 'yellow',
))
handle_request(dict(request_type = 'goal',
player_color = 'yellow',
))
print 'yellow_player = ', yellow_player
print 'black_player = ', black_player
print 'black_player_score = ', black_player_score
print 'yellow_player_score = ', yellow_player_score
print 'goals = ', goals
print 'matches = ', matches
print 'players = ', players
| mit |
hgl888/chromium-crosswalk | components/cloud_devices/tools/prototype/prototype.py | 65 | 30901 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prototype of cloud device with support of local API.
This prototype has tons of flaws, not the least of which being that it
occasionally will block while waiting for commands to finish. However, this is
a quick sketch.
Script requires following components:
sudo apt-get install python-tornado
sudo apt-get install python-pip
sudo pip install google-api-python-client
sudo pip install ecdsa
"""
import atexit
import base64
import datetime
import json
import os
import random
import subprocess
import time
import traceback
from apiclient.discovery import build_from_document
from apiclient.errors import HttpError
import httplib2
from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.file import Storage
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
_OAUTH_SCOPE = 'https://www.googleapis.com/auth/clouddevices'
_CONFIG_FILE = 'config.json'
_API_DISCOVERY_FILE = 'discovery.json'
_DEVICE_STATE_FILE = 'device_state.json'
_DEVICE_SETUP_SSID = 'GCD Prototype %02d..Bcamprv'
_DEVICE_NAME = 'GCD Prototype'
_DEVICE_TYPE = 'vendor'
_DEVICE_PORT = 8080
DEVICE_DRAFT = {
'systemName': 'LEDFlasher',
'deviceKind': 'vendor',
'displayName': _DEVICE_NAME,
'channel': {
'supportedType': 'xmpp'
},
'commandDefs': {
'base': {
# TODO(vitalybuka): find new format for custom commands.
# 'vendorCommands': [{
# 'name': 'flashLED',
# 'parameter': [{
# 'name': 'times',
# 'type': 'string'
# }]
# }]
}
}
}
wpa_supplicant_cmd = 'wpa_supplicant -Dwext -i%s -cwpa_supplicant.conf'
ifconfig_cmd = 'ifconfig %s 192.168.0.3'
hostapd_cmd = 'hostapd hostapd-min.conf'
dhclient_release = 'dhclient -r %s'
dhclient_renew = 'dhclient %s'
dhcpd_cmd = 'udhcpd -f udhcpd.conf'
wpa_supplicant_conf = 'wpa_supplicant.conf'
wpa_supplicant_template = """
network={
ssid="%s"
scan_ssid=1
proto=WPA RSN
key_mgmt=WPA-PSK
pairwise=CCMP TKIP
group=CCMP TKIP
psk="%s"
}"""
hostapd_conf = 'hostapd-min.conf'
hostapd_template = """
interface=%s
driver=nl80211
ssid=%s
channel=1
"""
udhcpd_conf = 'udhcpd.conf'
udhcpd_template = """
start 192.168.0.20
end 192.168.0.254
interface %s
"""
class DeviceUnregisteredError(Exception):
pass
def ignore_errors(func):
def inner(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception: # pylint: disable=broad-except
print 'Got error in unsafe function:'
traceback.print_exc()
return inner
class CommandWrapperReal(object):
"""Command wrapper that executs shell commands."""
def __init__(self, cmd):
if type(cmd) in [str, unicode]:
cmd = cmd.split()
self.cmd = cmd
self.cmd_str = ' '.join(cmd)
self.process = None
def start(self):
print 'Start: ', self.cmd_str
if self.process:
self.end()
self.process = subprocess.Popen(self.cmd)
def wait(self):
print 'Wait: ', self.cmd_str
self.process.wait()
def end(self):
print 'End: ', self.cmd_str
if self.process:
self.process.terminate()
class CommandWrapperFake(object):
"""Command wrapper that just prints shell commands."""
def __init__(self, cmd):
self.cmd_str = ' '.join(cmd)
def start(self):
print 'Fake start: ', self.cmd_str
def wait(self):
print 'Fake wait: ', self.cmd_str
def end(self):
print 'Fake end: ', self.cmd_str
class CloudCommandHandlerFake(object):
"""Prints devices commands without execution."""
def __init__(self, ioloop):
pass
def handle_command(self, command_name, args):
if command_name == 'flashLED':
times = 1
if 'times' in args:
times = int(args['times'])
print 'Flashing LED %d times' % times
class CloudCommandHandlerReal(object):
"""Executes device commands."""
def __init__(self, ioloop, led_path):
self.ioloop = ioloop
self.led_path = led_path
def handle_command(self, command_name, args):
if command_name == 'flashLED':
times = 1
if 'times' in args:
times = int(args['times'])
print 'Really flashing LED %d times' % times
self.flash_led(times)
@ignore_errors
def flash_led(self, times):
self.set_led(times*2, True)
def set_led(self, times, value):
"""Set led value."""
if not times:
return
file_trigger = open(os.path.join(self.led_path, 'brightness'), 'w')
if value:
file_trigger.write('1')
else:
file_trigger.write('0')
file_trigger.close()
self.ioloop.add_timeout(datetime.timedelta(milliseconds=500),
lambda: self.set_led(times - 1, not value))
class WifiHandler(object):
"""Base class for wifi handlers."""
class Delegate(object):
def on_wifi_connected(self, unused_token):
"""Token is optional, and all delegates should support it being None."""
raise Exception('Unhandled condition: WiFi connected')
def __init__(self, ioloop, state, config, setup_ssid, delegate):
self.ioloop = ioloop
self.state = state
self.delegate = delegate
self.setup_ssid = setup_ssid
self.interface = config['wireless_interface']
def start(self):
raise Exception('Start not implemented!')
def get_ssid(self):
raise Exception('Get SSID not implemented!')
class WifiHandlerReal(WifiHandler):
"""Real wifi handler.
Note that by using CommandWrapperFake, you can run WifiHandlerReal on fake
devices for testing the wifi-specific logic.
"""
def __init__(self, ioloop, state, config, setup_ssid, delegate):
super(WifiHandlerReal, self).__init__(ioloop, state, config,
setup_ssid, delegate)
if config['simulate_commands']:
self.command_wrapper = CommandWrapperFake
else:
self.command_wrapper = CommandWrapperReal
self.hostapd = self.command_wrapper(hostapd_cmd)
self.wpa_supplicant = self.command_wrapper(
wpa_supplicant_cmd % self.interface)
self.dhcpd = self.command_wrapper(dhcpd_cmd)
def start(self):
if self.state.has_wifi():
self.switch_to_wifi(self.state.ssid(), self.state.password(), None)
else:
self.start_hostapd()
def start_hostapd(self):
hostapd_config = open(hostapd_conf, 'w')
hostapd_config.write(hostapd_template % (self.interface, self.setup_ssid))
hostapd_config.close()
self.hostapd.start()
time.sleep(3)
self.run_command(ifconfig_cmd % self.interface)
self.dhcpd.start()
def switch_to_wifi(self, ssid, passwd, token):
try:
udhcpd_config = open(udhcpd_conf, 'w')
udhcpd_config.write(udhcpd_template % self.interface)
udhcpd_config.close()
wpa_config = open(wpa_supplicant_conf, 'w')
wpa_config.write(wpa_supplicant_template % (ssid, passwd))
wpa_config.close()
self.hostapd.end()
self.dhcpd.end()
self.wpa_supplicant.start()
self.run_command(dhclient_release % self.interface)
self.run_command(dhclient_renew % self.interface)
self.state.set_wifi(ssid, passwd)
self.delegate.on_wifi_connected(token)
except DeviceUnregisteredError:
self.state.reset()
self.wpa_supplicant.end()
self.start_hostapd()
def stop(self):
self.hostapd.end()
self.wpa_supplicant.end()
self.dhcpd.end()
def get_ssid(self):
return self.state.get_ssid()
def run_command(self, cmd):
wrapper = self.command_wrapper(cmd)
wrapper.start()
wrapper.wait()
class WifiHandlerPassthrough(WifiHandler):
"""Passthrough wifi handler."""
def __init__(self, ioloop, state, config, setup_ssid, delegate):
super(WifiHandlerPassthrough, self).__init__(ioloop, state, config,
setup_ssid, delegate)
def start(self):
self.delegate.on_wifi_connected(None)
def switch_to_wifi(self, unused_ssid, unused_passwd, unused_token):
raise Exception('Should not be reached')
def stop(self):
pass
def get_ssid(self):
return 'dummy'
class State(object):
"""Device state."""
def __init__(self):
self.oauth_storage_ = Storage('oauth_creds')
self.clear()
def clear(self):
self.credentials_ = None
self.has_credentials_ = False
self.has_wifi_ = False
self.ssid_ = ''
self.password_ = ''
self.device_id_ = ''
def reset(self):
self.clear()
self.dump()
def dump(self):
"""Saves device state to file."""
json_obj = {
'has_credentials': self.has_credentials_,
'has_wifi': self.has_wifi_,
'ssid': self.ssid_,
'password': self.password_,
'device_id': self.device_id_
}
statefile = open(_DEVICE_STATE_FILE, 'w')
json.dump(json_obj, statefile)
statefile.close()
if self.has_credentials_:
self.oauth_storage_.put(self.credentials_)
def load(self):
if os.path.exists(_DEVICE_STATE_FILE):
statefile = open(_DEVICE_STATE_FILE, 'r')
json_obj = json.load(statefile)
statefile.close()
self.has_credentials_ = json_obj['has_credentials']
self.has_wifi_ = json_obj['has_wifi']
self.ssid_ = json_obj['ssid']
self.password_ = json_obj['password']
self.device_id_ = json_obj['device_id']
if self.has_credentials_:
self.credentials_ = self.oauth_storage_.get()
def set_credentials(self, credentials, device_id):
self.device_id_ = device_id
self.credentials_ = credentials
self.has_credentials_ = True
self.dump()
def set_wifi(self, ssid, password):
self.ssid_ = ssid
self.password_ = password
self.has_wifi_ = True
self.dump()
def has_wifi(self):
return self.has_wifi_
def has_credentials(self):
return self.has_credentials_
def credentials(self):
return self.credentials_
def ssid(self):
return self.ssid_
def password(self):
return self.password_
def device_id(self):
return self.device_id_
class Config(object):
"""Configuration parameters (should not change)"""
def __init__(self):
if not os.path.isfile(_CONFIG_FILE):
config = {
'oauth_client_id': '',
'oauth_secret': '',
'api_key': '',
'wireless_interface': ''
}
config_f = open(_CONFIG_FILE + '.sample', 'w')
config_f.write(json.dumps(credentials, sort_keys=True,
indent=2, separators=(',', ': ')))
config_f.close()
raise Exception('Missing ' + _CONFIG_FILE)
config_f = open(_CONFIG_FILE)
config = json.load(config_f)
config_f.close()
self.config = config
def __getitem__(self, item):
if item in self.config:
return self.config[item]
return None
class MDnsWrapper(object):
"""Handles mDNS requests to device."""
def __init__(self, command_wrapper):
self.command_wrapper = command_wrapper
self.avahi_wrapper = None
self.setup_name = None
self.device_id = ''
self.started = False
def start(self):
self.started = True
self.run_command()
def get_command(self):
"""Return the command to run mDNS daemon."""
cmd = [
'avahi-publish',
'-s', '--subtype=_%s._sub._privet._tcp' % _DEVICE_TYPE,
_DEVICE_NAME, '_privet._tcp', '%s' % _DEVICE_PORT,
'txtvers=3',
'type=%s' % _DEVICE_TYPE,
'ty=%s' % _DEVICE_NAME,
'id=%s' % self.device_id
]
if self.setup_name:
cmd.append('setup_ssid=' + self.setup_name)
return cmd
def run_command(self):
if self.avahi_wrapper:
self.avahi_wrapper.end()
self.avahi_wrapper.wait()
self.avahi_wrapper = self.command_wrapper(self.get_command())
self.avahi_wrapper.start()
def set_id(self, device_id):
self.device_id = device_id
if self.started:
self.run_command()
def set_setup_name(self, setup_name):
self.setup_name = setup_name
if self.started:
self.run_command()
class CloudDevice(object):
"""Handles device registration and commands."""
class Delegate(object):
def on_device_started(self):
raise Exception('Not implemented: Device started')
def on_device_stopped(self):
raise Exception('Not implemented: Device stopped')
def __init__(self, ioloop, state, config, command_wrapper, delegate):
self.state = state
self.http = httplib2.Http()
self.oauth_client_id = config['oauth_client_id']
self.oauth_secret = config['oauth_secret']
self.api_key = config['api_key']
if not os.path.isfile(_API_DISCOVERY_FILE):
raise Exception('Download https://developers.google.com/'
'cloud-devices/v1/discovery.json')
f = open(_API_DISCOVERY_FILE)
discovery = f.read()
f.close()
self.gcd = build_from_document(discovery, developerKey=self.api_key,
http=self.http)
self.ioloop = ioloop
self.active = True
self.device_id = None
self.credentials = None
self.delegate = delegate
self.command_handler = command_wrapper
def try_start(self, token):
"""Tries start or register device."""
if self.state.has_credentials():
self.credentials = self.state.credentials()
self.device_id = self.state.device_id()
self.run_device()
elif token:
self.register(token)
else:
print 'Device not registered and has no credentials.'
print 'Waiting for registration.'
def register(self, token):
"""Register device."""
resource = {
'deviceDraft': DEVICE_DRAFT,
'oauthClientId': self.oauth_client_id
}
self.gcd.registrationTickets().patch(registrationTicketId=token,
body=resource).execute()
final_ticket = self.gcd.registrationTickets().finalize(
registrationTicketId=token).execute()
authorization_code = final_ticket['robotAccountAuthorizationCode']
flow = OAuth2WebServerFlow(self.oauth_client_id, self.oauth_secret,
_OAUTH_SCOPE, redirect_uri='oob')
self.credentials = flow.step2_exchange(authorization_code)
self.device_id = final_ticket['deviceDraft']['id']
self.state.set_credentials(self.credentials, self.device_id)
print 'Registered with device_id ', self.device_id
self.run_device()
def run_device(self):
"""Runs device."""
self.credentials.authorize(self.http)
try:
self.gcd.devices().get(deviceId=self.device_id).execute()
except HttpError, e:
# Pretty good indication the device was deleted
if e.resp.status == 404:
raise DeviceUnregisteredError()
except AccessTokenRefreshError:
raise DeviceUnregisteredError()
self.check_commands()
self.delegate.on_device_started()
def check_commands(self):
"""Checks device commands."""
if not self.active:
return
print 'Checking commands...'
commands = self.gcd.commands().list(deviceId=self.device_id,
state='queued').execute()
if 'commands' in commands:
print 'Found ', len(commands['commands']), ' commands'
vendor_command_name = None
for command in commands['commands']:
try:
if command['name'].startswith('base._'):
vendor_command_name = command['name'][len('base._'):]
if 'parameters' in command:
parameters = command['parameters']
else:
parameters = {}
else:
vendor_command_name = None
except KeyError:
print 'Could not parse vendor command ',
print repr(command)
vendor_command_name = None
if vendor_command_name:
self.command_handler.handle_command(vendor_command_name, parameters)
self.gcd.commands().patch(commandId=command['id'],
body={'state': 'done'}).execute()
else:
print 'Found no commands'
self.ioloop.add_timeout(datetime.timedelta(milliseconds=1000),
self.check_commands)
def stop(self):
self.active = False
def get_device_id(self):
return self.device_id
def get_only(f):
def inner(self, request, response_func, *args):
if request.method != 'GET':
return False
return f(self, request, response_func, *args)
return inner
def post_only(f):
def inner(self, request, response_func, *args):
# if request.method != 'POST':
# return False
return f(self, request, response_func, *args)
return inner
def wifi_provisioning(f):
def inner(self, request, response_func, *args):
if self.on_wifi:
return False
return f(self, request, response_func, *args)
return inner
def post_provisioning(f):
def inner(self, request, response_func, *args):
if not self.on_wifi:
return False
return f(self, request, response_func, *args)
return inner
class WebRequestHandler(WifiHandler.Delegate, CloudDevice.Delegate):
"""Handles HTTP requests."""
class InvalidStepError(Exception):
pass
class InvalidPackageError(Exception):
pass
class EncryptionError(Exception):
pass
class CancelableClosure(object):
"""Allows to cancel callbacks."""
def __init__(self, function):
self.function = function
def __call__(self):
if self.function:
return self.function
return None
def cancel(self):
self.function = None
class DummySession(object):
"""Handles sessions."""
def __init__(self, session_id):
self.session_id = session_id
self.key = None
def do_step(self, step, package):
if step != 0:
raise self.InvalidStepError()
self.key = package
return self.key
def decrypt(self, cyphertext):
return json.loads(cyphertext[len(self.key):])
def encrypt(self, plain_data):
return self.key + json.dumps(plain_data)
def get_session_id(self):
return self.session_id
def get_stype(self):
return 'dummy'
def get_status(self):
return 'complete'
class EmptySession(object):
"""Handles sessions."""
def __init__(self, session_id):
self.session_id = session_id
self.key = None
def do_step(self, step, package):
if step != 0 or package != '':
raise self.InvalidStepError()
return ''
def decrypt(self, cyphertext):
return json.loads(cyphertext)
def encrypt(self, plain_data):
return json.dumps(plain_data)
def get_session_id(self):
return self.session_id
def get_stype(self):
return 'empty'
def get_status(self):
return 'complete'
def __init__(self, ioloop, state):
self.config = Config()
if self.config['on_real_device']:
mdns_wrappers = CommandWrapperReal
wifi_handler = WifiHandlerReal
else:
mdns_wrappers = CommandWrapperReal
wifi_handler = WifiHandlerPassthrough
if self.config['led_path']:
cloud_wrapper = CloudCommandHandlerReal(ioloop,
self.config['led_path'])
self.setup_real(self.config['led_path'])
else:
cloud_wrapper = CloudCommandHandlerFake(ioloop)
self.setup_fake()
self.setup_ssid = _DEVICE_SETUP_SSID % random.randint(0,99)
self.cloud_device = CloudDevice(ioloop, state, self.config,
cloud_wrapper, self)
self.wifi_handler = wifi_handler(ioloop, state, self.config,
self.setup_ssid, self)
self.mdns_wrapper = MDnsWrapper(mdns_wrappers)
self.on_wifi = False
self.registered = False
self.in_session = False
self.ioloop = ioloop
self.handlers = {
'/internal/ping': self.do_ping,
'/privet/info': self.do_info,
'/deprecated/wifi/switch': self.do_wifi_switch,
'/privet/v3/session/handshake': self.do_session_handshake,
'/privet/v3/session/cancel': self.do_session_cancel,
'/privet/v3/session/request': self.do_session_call,
'/privet/v3/setup/start':
self.get_insecure_api_handler(self.do_secure_setup_start),
'/privet/v3/setup/cancel':
self.get_insecure_api_handler(self.do_secure_setup_cancel),
'/privet/v3/setup/status':
self.get_insecure_api_handler(self.do_secure_status),
}
self.current_session = None
self.session_cancel_callback = None
self.session_handlers = {
'dummy': self.DummySession,
'empty': self.EmptySession
}
self.secure_handlers = {
'/privet/v3/setup/start': self.do_secure_setup_start,
'/privet/v3/setup/cancel': self.do_secure_setup_cancel,
'/privet/v3/setup/status': self.do_secure_status
}
@staticmethod
def setup_fake():
print 'Skipping device setup'
@staticmethod
def setup_real(led_path):
file_trigger = open(os.path.join(led_path, 'trigger'), 'w')
file_trigger.write('none')
file_trigger.close()
def start(self):
self.wifi_handler.start()
self.mdns_wrapper.set_setup_name(self.setup_ssid)
self.mdns_wrapper.start()
@get_only
def do_ping(self, unused_request, response_func):
response_func(200, {'pong': True})
return True
@get_only
def do_public_info(self, unused_request, response_func):
info = dict(self.get_common_info().items() + {
'stype': self.session_handlers.keys()}.items())
response_func(200, info)
@get_only
def do_info(self, unused_request, response_func):
specific_info = {
'x-privet-token': 'sample',
'api': sorted(self.handlers.keys())
}
info = dict(self.get_common_info().items() + specific_info.items())
response_func(200, info)
return True
@post_only
@wifi_provisioning
def do_wifi_switch(self, request, response_func):
"""Handles /deprecated/wifi/switch requests."""
data = json.loads(request.body)
try:
ssid = data['ssid']
passw = data['passw']
except KeyError:
print 'Malformed content: ' + repr(data)
response_func(400, {'error': 'invalidParams'})
traceback.print_exc()
return True
response_func(200, {'ssid': ssid})
self.wifi_handler.switch_to_wifi(ssid, passw, None)
# TODO(noamsml): Return to normal wifi after timeout (cancelable)
return True
@post_only
def do_session_handshake(self, request, response_func):
"""Handles /privet/v3/session/handshake requests."""
data = json.loads(request.body)
try:
stype = data['keyExchangeType']
step = data['step']
package = base64.b64decode(data['package'])
if 'sessionID' in data:
session_id = data['sessionID']
else:
session_id = "dummy"
except (KeyError, TypeError):
traceback.print_exc()
print 'Malformed content: ' + repr(data)
response_func(400, {'error': 'invalidParams'})
return True
if self.current_session:
if session_id != self.current_session.get_session_id():
response_func(400, {'error': 'maxSessionsExceeded'})
return True
if stype != self.current_session.get_stype():
response_func(400, {'error': 'unsupportedKeyExchangeType'})
return True
else:
if stype not in self.session_handlers:
response_func(400, {'error': 'unsupportedKeyExchangeType'})
return True
self.current_session = self.session_handlers[stype](session_id)
try:
output_package = self.current_session.do_step(step, package)
except self.InvalidStepError:
response_func(400, {'error': 'invalidStep'})
return True
except self.InvalidPackageError:
response_func(400, {'error': 'invalidPackage'})
return True
return_obj = {
'status': self.current_session.get_status(),
'step': step,
'package': base64.b64encode(output_package),
'sessionID': session_id
}
response_func(200, return_obj)
self.post_session_cancel()
return True
@post_only
def do_session_cancel(self, request, response_func):
"""Handles /privet/v3/session/cancel requests."""
data = json.loads(request.body)
try:
session_id = data['sessionID']
except KeyError:
response_func(400, {'error': 'invalidParams'})
return True
if self.current_session and session_id == self.current_session.session_id:
self.current_session = None
if self.session_cancel_callback:
self.session_cancel_callback.cancel()
response_func(200, {'status': 'cancelled', 'sessionID': session_id})
else:
response_func(400, {'error': 'unknownSession'})
return True
@post_only
def do_session_call(self, request, response_func):
"""Handles /privet/v3/session/call requests."""
try:
session_id = request.headers['X-Privet-SessionID']
except KeyError:
response_func(400, {'error': 'unknownSession'})
return True
if (not self.current_session or
session_id != self.current_session.session_id):
response_func(400, {'error': 'unknownSession'})
return True
try:
decrypted = self.current_session.decrypt(request.body)
except self.EncryptionError:
response_func(400, {'error': 'encryptionError'})
return True
def encrypted_response_func(code, data):
if 'error' in data:
self.encrypted_send_response(request, code, dict(data.items() + {
'api': decrypted['api']
}.items()))
else:
self.encrypted_send_response(request, code, {
'api': decrypted['api'],
'output': data
})
if ('api' not in decrypted or 'input' not in decrypted or
type(decrypted['input']) != dict):
print 'Invalid params in API stage'
encrypted_response_func(200, {'error': 'invalidParams'})
return True
if decrypted['api'] in self.secure_handlers:
self.secure_handlers[decrypted['api']](request,
encrypted_response_func,
decrypted['input'])
else:
encrypted_response_func(200, {'error': 'unknownApi'})
self.post_session_cancel()
return True
def get_insecure_api_handler(self, handler):
def inner(request, func):
return self.insecure_api_handler(request, func, handler)
return inner
@post_only
def insecure_api_handler(self, request, response_func, handler):
real_params = json.loads(request.body) if request.body else {}
handler(request, response_func, real_params)
return True
def do_secure_status(self, unused_request, response_func, unused_params):
"""Handles /privet/v3/setup/status requests."""
setup = {
'registration': {
'required': True
},
'wifi': {
'required': True
}
}
if self.on_wifi:
setup['wifi']['status'] = 'complete'
setup['wifi']['ssid'] = '' # TODO(noamsml): Add SSID to status
else:
setup['wifi']['status'] = 'available'
if self.cloud_device.get_device_id():
setup['registration']['status'] = 'complete'
setup['registration']['id'] = self.cloud_device.get_device_id()
else:
setup['registration']['status'] = 'available'
response_func(200, setup)
def do_secure_setup_start(self, unused_request, response_func, params):
"""Handles /privet/v3/setup/start requests."""
has_wifi = False
token = None
try:
if 'wifi' in params:
has_wifi = True
ssid = params['wifi']['ssid']
passw = params['wifi']['passphrase']
if 'registration' in params:
token = params['registration']['ticketID']
except KeyError:
print 'Invalid params in bootstrap stage'
response_func(400, {'error': 'invalidParams'})
return
try:
if has_wifi:
self.wifi_handler.switch_to_wifi(ssid, passw, token)
elif token:
self.cloud_device.register(token)
else:
response_func(400, {'error': 'invalidParams'})
return
except HttpError as e:
print e # TODO(noamsml): store error message in this case
self.do_secure_status(unused_request, response_func, params)
def do_secure_setup_cancel(self, request, response_func, params):
pass
def handle_request(self, request):
def response_func(code, data):
self.real_send_response(request, code, data)
handled = False
print '[INFO] %s %s' % (request.method, request.path)
if request.path in self.handlers:
handled = self.handlers[request.path](request, response_func)
if not handled:
self.real_send_response(request, 404, {'error': 'notFound'})
def encrypted_send_response(self, request, code, data):
self.raw_send_response(request, code,
self.current_session.encrypt(data))
def real_send_response(self, request, code, data):
data = json.dumps(data, sort_keys=True, indent=2, separators=(',', ': '))
data += '\n'
self.raw_send_response(request, code, data)
def raw_send_response(self, request, code, data):
request.write('HTTP/1.1 %d Maybe OK\n' % code)
request.write('Content-Type: application/json\n')
request.write('Content-Length: %s\n\n' % len(data))
request.write(data)
request.finish()
def device_state(self):
return 'idle'
def get_common_info(self):
return {
'version': '3.0',
'name': 'Sample Device',
'device_state': self.device_state()
}
def post_session_cancel(self):
if self.session_cancel_callback:
self.session_cancel_callback.cancel()
self.session_cancel_callback = self.CancelableClosure(self.session_cancel)
self.ioloop.add_timeout(datetime.timedelta(minutes=2),
self.session_cancel_callback)
def session_cancel(self):
self.current_session = None
# WifiHandler.Delegate implementation
def on_wifi_connected(self, token):
self.mdns_wrapper.set_setup_name(None)
self.cloud_device.try_start(token)
self.on_wifi = True
def on_device_started(self):
self.mdns_wrapper.set_id(self.cloud_device.get_device_id())
def on_device_stopped(self):
pass
def stop(self):
self.wifi_handler.stop()
self.cloud_device.stop()
def main():
state = State()
state.load()
ioloop = IOLoop.instance()
handler = WebRequestHandler(ioloop, state)
handler.start()
def logic_stop():
handler.stop()
atexit.register(logic_stop)
server = HTTPServer(handler.handle_request)
server.listen(_DEVICE_PORT)
ioloop.start()
if __name__ == '__main__':
main()
| bsd-3-clause |
ya-yura/snapboard | snapboard/sampledata.py | 4 | 4553 | #
# Sample data for testing
#
leadins = """To characterize a linguistic level L,
On the other hand,
This suggests that
It appears that
Furthermore,
We will bring evidence in favor of the following thesis:
To provide a constituent structure for T(Z,K),
From C1, it follows that
For any transformation which is sufficiently diversified in application to be of any interest,
Analogously,
Clearly,
Note that
Of course,
Suppose, for instance, that
Thus
With this clarification,
Conversely,
We have already seen that
By combining adjunctions and certain deformations,
I suggested that these results would follow from the assumption that
If the position of the trace in (99c) were only relatively inaccessible to movement,
However, this assumption is not correct, since
Comparing these examples with their parasitic gap counterparts in (96) and (97), we see that
In the discussion of resumptive pronouns following (81),
So far,
Nevertheless,
For one thing,
Summarizing, then, we assume that
A consequence of the approach just outlined is that
Presumably,
On our assumptions,
It may be, then, that
It must be emphasized, once again, that
Let us continue to suppose that
Notice, incidentally, that """
# List of LEADINs to buy time.
subjects = """ the notion of level of grammaticalness
a case of semigrammaticalness of a different sort
most of the methodological work in modern linguistics
a subset of English sentences interesting on quite independent grounds
the natural general principle that will subsume this case
an important property of these three types of EC
any associated supporting element
the appearance of parasitic gaps in domains relatively inaccessible to ordinary extraction
the speaker-hearer's linguistic intuition
the descriptive power of the base component
the earlier discussion of deviance
this analysis of a formative as a pair of sets of features
this selectionally introduced contextual feature
a descriptively adequate grammar
the fundamental error of regarding functional notions as categorial
relational information
the systematic use of complex symbols
the theory of syntactic features developed earlier"""
# List of SUBJECTs chosen for maximum professorial macho.
verbs = """can be defined in such a way as to impose
delimits
suffices to account for
cannot be arbitrary in
is not subject to
does not readily tolerate
raises serious doubts about
is not quite equivalent to
does not affect the structure of
may remedy and, at the same time, eliminate
is not to be considered in determining
is to be regarded as
is unspecified with respect to
is, apparently, determined by
is necessary to impose an interpretation on
appears to correlate rather closely with
is rather different from"""
#List of VERBs chosen for autorecursive obfuscation.
objects = """ problems of phonemic and morphological analysis.
a corpus of utterance tokens upon which conformity has been defined by the paired utterance test.
the traditional practice of grammarians.
the levels of acceptability from fairly high (e.g. (99a)) to virtual gibberish (e.g. (98d)).
a stipulation to place the constructions into these various categories.
a descriptive fact.
a parasitic gap construction.
the extended c-command discussed in connection with (34).
the ultimate standard that determines the accuracy of any proposed grammar.
the system of base rules exclusive of the lexicon.
irrelevant intervening contexts in selectional rules.
nondistinctness in the sense of distinctive feature theory.
a general convention regarding the forms of the grammar.
an abstract underlying order.
an important distinction in language use.
the requirement that branching is not tolerated within the dominance scope of a complex symbol.
the strong generative capacity of the theory."""
# List of OBJECTs selected for profound sententiousness.
import textwrap, random
from itertools import chain, islice, izip
def sample_data(times=1, line_length=72):
parts = []
for part in (leadins, subjects, verbs, objects):
phraselist = map(str.strip, part.splitlines())
random.shuffle(phraselist)
parts.append(phraselist)
output = chain(*islice(izip(*parts), 0, times))
return ' '.join(output)
# vim: ai ts=4 sts=4 et sw=4
| bsd-3-clause |
Broadcom/linux-rdma-nxt | tools/perf/scripts/python/check-perf-trace.py | 1997 | 2539 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
shijiaxing/DeepLearningTutorials | code/lstm.py | 23 | 22627 | '''
Build a tweet sentiment analyzer
'''
from collections import OrderedDict
import cPickle as pkl
import sys
import time
import numpy
import theano
from theano import config
import theano.tensor as tensor
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import imdb
datasets = {'imdb': (imdb.load_data, imdb.prepare_data)}
# Set the random number generators' seeds for consistency
SEED = 123
numpy.random.seed(SEED)
def numpy_floatX(data):
return numpy.asarray(data, dtype=config.floatX)
def get_minibatches_idx(n, minibatch_size, shuffle=False):
"""
Used to shuffle the dataset at each iteration.
"""
idx_list = numpy.arange(n, dtype="int32")
if shuffle:
numpy.random.shuffle(idx_list)
minibatches = []
minibatch_start = 0
for i in range(n // minibatch_size):
minibatches.append(idx_list[minibatch_start:
minibatch_start + minibatch_size])
minibatch_start += minibatch_size
if (minibatch_start != n):
# Make a minibatch out of what is left
minibatches.append(idx_list[minibatch_start:])
return zip(range(len(minibatches)), minibatches)
def get_dataset(name):
return datasets[name][0], datasets[name][1]
def zipp(params, tparams):
"""
When we reload the model. Needed for the GPU stuff.
"""
for kk, vv in params.iteritems():
tparams[kk].set_value(vv)
def unzip(zipped):
"""
When we pickle the model. Needed for the GPU stuff.
"""
new_params = OrderedDict()
for kk, vv in zipped.iteritems():
new_params[kk] = vv.get_value()
return new_params
def dropout_layer(state_before, use_noise, trng):
proj = tensor.switch(use_noise,
(state_before *
trng.binomial(state_before.shape,
p=0.5, n=1,
dtype=state_before.dtype)),
state_before * 0.5)
return proj
def _p(pp, name):
return '%s_%s' % (pp, name)
def init_params(options):
"""
Global (not LSTM) parameter. For the embeding and the classifier.
"""
params = OrderedDict()
# embedding
randn = numpy.random.rand(options['n_words'],
options['dim_proj'])
params['Wemb'] = (0.01 * randn).astype(config.floatX)
params = get_layer(options['encoder'])[0](options,
params,
prefix=options['encoder'])
# classifier
params['U'] = 0.01 * numpy.random.randn(options['dim_proj'],
options['ydim']).astype(config.floatX)
params['b'] = numpy.zeros((options['ydim'],)).astype(config.floatX)
return params
def load_params(path, params):
pp = numpy.load(path)
for kk, vv in params.iteritems():
if kk not in pp:
raise Warning('%s is not in the archive' % kk)
params[kk] = pp[kk]
return params
def init_tparams(params):
tparams = OrderedDict()
for kk, pp in params.iteritems():
tparams[kk] = theano.shared(params[kk], name=kk)
return tparams
def get_layer(name):
fns = layers[name]
return fns
def ortho_weight(ndim):
W = numpy.random.randn(ndim, ndim)
u, s, v = numpy.linalg.svd(W)
return u.astype(config.floatX)
def param_init_lstm(options, params, prefix='lstm'):
"""
Init the LSTM parameter:
:see: init_params
"""
W = numpy.concatenate([ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj'])], axis=1)
params[_p(prefix, 'W')] = W
U = numpy.concatenate([ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj']),
ortho_weight(options['dim_proj'])], axis=1)
params[_p(prefix, 'U')] = U
b = numpy.zeros((4 * options['dim_proj'],))
params[_p(prefix, 'b')] = b.astype(config.floatX)
return params
def lstm_layer(tparams, state_below, options, prefix='lstm', mask=None):
nsteps = state_below.shape[0]
if state_below.ndim == 3:
n_samples = state_below.shape[1]
else:
n_samples = 1
assert mask is not None
def _slice(_x, n, dim):
if _x.ndim == 3:
return _x[:, :, n * dim:(n + 1) * dim]
return _x[:, n * dim:(n + 1) * dim]
def _step(m_, x_, h_, c_):
preact = tensor.dot(h_, tparams[_p(prefix, 'U')])
preact += x_
i = tensor.nnet.sigmoid(_slice(preact, 0, options['dim_proj']))
f = tensor.nnet.sigmoid(_slice(preact, 1, options['dim_proj']))
o = tensor.nnet.sigmoid(_slice(preact, 2, options['dim_proj']))
c = tensor.tanh(_slice(preact, 3, options['dim_proj']))
c = f * c_ + i * c
c = m_[:, None] * c + (1. - m_)[:, None] * c_
h = o * tensor.tanh(c)
h = m_[:, None] * h + (1. - m_)[:, None] * h_
return h, c
state_below = (tensor.dot(state_below, tparams[_p(prefix, 'W')]) +
tparams[_p(prefix, 'b')])
dim_proj = options['dim_proj']
rval, updates = theano.scan(_step,
sequences=[mask, state_below],
outputs_info=[tensor.alloc(numpy_floatX(0.),
n_samples,
dim_proj),
tensor.alloc(numpy_floatX(0.),
n_samples,
dim_proj)],
name=_p(prefix, '_layers'),
n_steps=nsteps)
return rval[0]
# ff: Feed Forward (normal neural net), only useful to put after lstm
# before the classifier.
layers = {'lstm': (param_init_lstm, lstm_layer)}
def sgd(lr, tparams, grads, x, mask, y, cost):
""" Stochastic Gradient Descent
:note: A more complicated version of sgd then needed. This is
done like that for adadelta and rmsprop.
"""
# New set of shared variable that will contain the gradient
# for a mini-batch.
gshared = [theano.shared(p.get_value() * 0., name='%s_grad' % k)
for k, p in tparams.iteritems()]
gsup = [(gs, g) for gs, g in zip(gshared, grads)]
# Function that computes gradients for a mini-batch, but do not
# updates the weights.
f_grad_shared = theano.function([x, mask, y], cost, updates=gsup,
name='sgd_f_grad_shared')
pup = [(p, p - lr * g) for p, g in zip(tparams.values(), gshared)]
# Function that updates the weights from the previously computed
# gradient.
f_update = theano.function([lr], [], updates=pup,
name='sgd_f_update')
return f_grad_shared, f_update
def adadelta(lr, tparams, grads, x, mask, y, cost):
"""
An adaptive learning rate optimizer
Parameters
----------
lr : Theano SharedVariable
Initial learning rate
tpramas: Theano SharedVariable
Model parameters
grads: Theano variable
Gradients of cost w.r.t to parameres
x: Theano variable
Model inputs
mask: Theano variable
Sequence mask
y: Theano variable
Targets
cost: Theano variable
Objective fucntion to minimize
Notes
-----
For more information, see [ADADELTA]_.
.. [ADADELTA] Matthew D. Zeiler, *ADADELTA: An Adaptive Learning
Rate Method*, arXiv:1212.5701.
"""
zipped_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_grad' % k)
for k, p in tparams.iteritems()]
running_up2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rup2' % k)
for k, p in tparams.iteritems()]
running_grads2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad2' % k)
for k, p in tparams.iteritems()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function([x, mask, y], cost, updates=zgup + rg2up,
name='adadelta_f_grad_shared')
updir = [-tensor.sqrt(ru2 + 1e-6) / tensor.sqrt(rg2 + 1e-6) * zg
for zg, ru2, rg2 in zip(zipped_grads,
running_up2,
running_grads2)]
ru2up = [(ru2, 0.95 * ru2 + 0.05 * (ud ** 2))
for ru2, ud in zip(running_up2, updir)]
param_up = [(p, p + ud) for p, ud in zip(tparams.values(), updir)]
f_update = theano.function([lr], [], updates=ru2up + param_up,
on_unused_input='ignore',
name='adadelta_f_update')
return f_grad_shared, f_update
def rmsprop(lr, tparams, grads, x, mask, y, cost):
"""
A variant of SGD that scales the step size by running average of the
recent step norms.
Parameters
----------
lr : Theano SharedVariable
Initial learning rate
tpramas: Theano SharedVariable
Model parameters
grads: Theano variable
Gradients of cost w.r.t to parameres
x: Theano variable
Model inputs
mask: Theano variable
Sequence mask
y: Theano variable
Targets
cost: Theano variable
Objective fucntion to minimize
Notes
-----
For more information, see [Hint2014]_.
.. [Hint2014] Geoff Hinton, *Neural Networks for Machine Learning*,
lecture 6a,
http://cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf
"""
zipped_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_grad' % k)
for k, p in tparams.iteritems()]
running_grads = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad' % k)
for k, p in tparams.iteritems()]
running_grads2 = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_rgrad2' % k)
for k, p in tparams.iteritems()]
zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
rgup = [(rg, 0.95 * rg + 0.05 * g) for rg, g in zip(running_grads, grads)]
rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
for rg2, g in zip(running_grads2, grads)]
f_grad_shared = theano.function([x, mask, y], cost,
updates=zgup + rgup + rg2up,
name='rmsprop_f_grad_shared')
updir = [theano.shared(p.get_value() * numpy_floatX(0.),
name='%s_updir' % k)
for k, p in tparams.iteritems()]
updir_new = [(ud, 0.9 * ud - 1e-4 * zg / tensor.sqrt(rg2 - rg ** 2 + 1e-4))
for ud, zg, rg, rg2 in zip(updir, zipped_grads, running_grads,
running_grads2)]
param_up = [(p, p + udn[1])
for p, udn in zip(tparams.values(), updir_new)]
f_update = theano.function([lr], [], updates=updir_new + param_up,
on_unused_input='ignore',
name='rmsprop_f_update')
return f_grad_shared, f_update
def build_model(tparams, options):
trng = RandomStreams(SEED)
# Used for dropout.
use_noise = theano.shared(numpy_floatX(0.))
x = tensor.matrix('x', dtype='int64')
mask = tensor.matrix('mask', dtype=config.floatX)
y = tensor.vector('y', dtype='int64')
n_timesteps = x.shape[0]
n_samples = x.shape[1]
emb = tparams['Wemb'][x.flatten()].reshape([n_timesteps,
n_samples,
options['dim_proj']])
proj = get_layer(options['encoder'])[1](tparams, emb, options,
prefix=options['encoder'],
mask=mask)
if options['encoder'] == 'lstm':
proj = (proj * mask[:, :, None]).sum(axis=0)
proj = proj / mask.sum(axis=0)[:, None]
if options['use_dropout']:
proj = dropout_layer(proj, use_noise, trng)
pred = tensor.nnet.softmax(tensor.dot(proj, tparams['U']) + tparams['b'])
f_pred_prob = theano.function([x, mask], pred, name='f_pred_prob')
f_pred = theano.function([x, mask], pred.argmax(axis=1), name='f_pred')
off = 1e-8
if pred.dtype == 'float16':
off = 1e-6
cost = -tensor.log(pred[tensor.arange(n_samples), y] + off).mean()
return use_noise, x, mask, y, f_pred_prob, f_pred, cost
def pred_probs(f_pred_prob, prepare_data, data, iterator, verbose=False):
""" If you want to use a trained model, this is useful to compute
the probabilities of new examples.
"""
n_samples = len(data[0])
probs = numpy.zeros((n_samples, 2)).astype(config.floatX)
n_done = 0
for _, valid_index in iterator:
x, mask, y = prepare_data([data[0][t] for t in valid_index],
numpy.array(data[1])[valid_index],
maxlen=None)
pred_probs = f_pred_prob(x, mask)
probs[valid_index, :] = pred_probs
n_done += len(valid_index)
if verbose:
print '%d/%d samples classified' % (n_done, n_samples)
return probs
def pred_error(f_pred, prepare_data, data, iterator, verbose=False):
"""
Just compute the error
f_pred: Theano fct computing the prediction
prepare_data: usual prepare_data for that dataset.
"""
valid_err = 0
for _, valid_index in iterator:
x, mask, y = prepare_data([data[0][t] for t in valid_index],
numpy.array(data[1])[valid_index],
maxlen=None)
preds = f_pred(x, mask)
targets = numpy.array(data[1])[valid_index]
valid_err += (preds == targets).sum()
valid_err = 1. - numpy_floatX(valid_err) / len(data[0])
return valid_err
def train_lstm(
dim_proj=128, # word embeding dimension and LSTM number of hidden units.
patience=10, # Number of epoch to wait before early stop if no progress
max_epochs=5000, # The maximum number of epoch to run
dispFreq=10, # Display to stdout the training progress every N updates
decay_c=0., # Weight decay for the classifier applied to the U weights.
lrate=0.0001, # Learning rate for sgd (not used for adadelta and rmsprop)
n_words=10000, # Vocabulary size
optimizer=adadelta, # sgd, adadelta and rmsprop available, sgd very hard to use, not recommanded (probably need momentum and decaying learning rate).
encoder='lstm', # TODO: can be removed must be lstm.
saveto='lstm_model.npz', # The best model will be saved there
validFreq=370, # Compute the validation error after this number of update.
saveFreq=1110, # Save the parameters after every saveFreq updates
maxlen=100, # Sequence longer then this get ignored
batch_size=16, # The batch size during training.
valid_batch_size=64, # The batch size used for validation/test set.
dataset='imdb',
# Parameter for extra option
noise_std=0.,
use_dropout=True, # if False slightly faster, but worst test error
# This frequently need a bigger model.
reload_model=None, # Path to a saved model we want to start from.
test_size=-1, # If >0, we keep only this number of test example.
):
# Model options
model_options = locals().copy()
print "model options", model_options
load_data, prepare_data = get_dataset(dataset)
print 'Loading data'
train, valid, test = load_data(n_words=n_words, valid_portion=0.05,
maxlen=maxlen)
if test_size > 0:
# The test set is sorted by size, but we want to keep random
# size example. So we must select a random selection of the
# examples.
idx = numpy.arange(len(test[0]))
numpy.random.shuffle(idx)
idx = idx[:test_size]
test = ([test[0][n] for n in idx], [test[1][n] for n in idx])
ydim = numpy.max(train[1]) + 1
model_options['ydim'] = ydim
print 'Building model'
# This create the initial parameters as numpy ndarrays.
# Dict name (string) -> numpy ndarray
params = init_params(model_options)
if reload_model:
load_params('lstm_model.npz', params)
# This create Theano Shared Variable from the parameters.
# Dict name (string) -> Theano Tensor Shared Variable
# params and tparams have different copy of the weights.
tparams = init_tparams(params)
# use_noise is for dropout
(use_noise, x, mask,
y, f_pred_prob, f_pred, cost) = build_model(tparams, model_options)
if decay_c > 0.:
decay_c = theano.shared(numpy_floatX(decay_c), name='decay_c')
weight_decay = 0.
weight_decay += (tparams['U'] ** 2).sum()
weight_decay *= decay_c
cost += weight_decay
f_cost = theano.function([x, mask, y], cost, name='f_cost')
grads = tensor.grad(cost, wrt=tparams.values())
f_grad = theano.function([x, mask, y], grads, name='f_grad')
lr = tensor.scalar(name='lr')
f_grad_shared, f_update = optimizer(lr, tparams, grads,
x, mask, y, cost)
print 'Optimization'
kf_valid = get_minibatches_idx(len(valid[0]), valid_batch_size)
kf_test = get_minibatches_idx(len(test[0]), valid_batch_size)
print "%d train examples" % len(train[0])
print "%d valid examples" % len(valid[0])
print "%d test examples" % len(test[0])
history_errs = []
best_p = None
bad_count = 0
if validFreq == -1:
validFreq = len(train[0]) / batch_size
if saveFreq == -1:
saveFreq = len(train[0]) / batch_size
uidx = 0 # the number of update done
estop = False # early stop
start_time = time.time()
try:
for eidx in xrange(max_epochs):
n_samples = 0
# Get new shuffled index for the training set.
kf = get_minibatches_idx(len(train[0]), batch_size, shuffle=True)
for _, train_index in kf:
uidx += 1
use_noise.set_value(1.)
# Select the random examples for this minibatch
y = [train[1][t] for t in train_index]
x = [train[0][t]for t in train_index]
# Get the data in numpy.ndarray format
# This swap the axis!
# Return something of shape (minibatch maxlen, n samples)
x, mask, y = prepare_data(x, y)
n_samples += x.shape[1]
cost = f_grad_shared(x, mask, y)
f_update(lrate)
if numpy.isnan(cost) or numpy.isinf(cost):
print 'NaN detected'
return 1., 1., 1.
if numpy.mod(uidx, dispFreq) == 0:
print 'Epoch ', eidx, 'Update ', uidx, 'Cost ', cost
if saveto and numpy.mod(uidx, saveFreq) == 0:
print 'Saving...',
if best_p is not None:
params = best_p
else:
params = unzip(tparams)
numpy.savez(saveto, history_errs=history_errs, **params)
pkl.dump(model_options, open('%s.pkl' % saveto, 'wb'), -1)
print 'Done'
if numpy.mod(uidx, validFreq) == 0:
use_noise.set_value(0.)
train_err = pred_error(f_pred, prepare_data, train, kf)
valid_err = pred_error(f_pred, prepare_data, valid,
kf_valid)
test_err = pred_error(f_pred, prepare_data, test, kf_test)
history_errs.append([valid_err, test_err])
if (uidx == 0 or
valid_err <= numpy.array(history_errs)[:,
0].min()):
best_p = unzip(tparams)
bad_counter = 0
print ('Train ', train_err, 'Valid ', valid_err,
'Test ', test_err)
if (len(history_errs) > patience and
valid_err >= numpy.array(history_errs)[:-patience,
0].min()):
bad_counter += 1
if bad_counter > patience:
print 'Early Stop!'
estop = True
break
print 'Seen %d samples' % n_samples
if estop:
break
except KeyboardInterrupt:
print "Training interupted"
end_time = time.time()
if best_p is not None:
zipp(best_p, tparams)
else:
best_p = unzip(tparams)
use_noise.set_value(0.)
kf_train_sorted = get_minibatches_idx(len(train[0]), batch_size)
train_err = pred_error(f_pred, prepare_data, train, kf_train_sorted)
valid_err = pred_error(f_pred, prepare_data, valid, kf_valid)
test_err = pred_error(f_pred, prepare_data, test, kf_test)
print 'Train ', train_err, 'Valid ', valid_err, 'Test ', test_err
if saveto:
numpy.savez(saveto, train_err=train_err,
valid_err=valid_err, test_err=test_err,
history_errs=history_errs, **best_p)
print 'The code run for %d epochs, with %f sec/epochs' % (
(eidx + 1), (end_time - start_time) / (1. * (eidx + 1)))
print >> sys.stderr, ('Training took %.1fs' %
(end_time - start_time))
return train_err, valid_err, test_err
if __name__ == '__main__':
# See function train for all possible parameter and there definition.
train_lstm(
max_epochs=100,
test_size=500,
)
| bsd-3-clause |
matthiaskramm/corepy | corepy/arch/vmx/isa/vmx_fields.py | 1 | 4550 | # Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Field types for VMX
from corepy.spre.spe import Register, InstructionOperand, Variable
bit_mask = [
0x0, 0x1, 0x3, 0x7, 0xF, 0x1F, 0x3F, 0x7F, 0xFF,
0x1FF, 0x3FF, 0x7FF, 0xFFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF,
0x1FFFF, 0x3FFFF, 0x7FFFF, 0xFFFFF, 0x1FFFFF, 0x3FFFFF, 0x7FFFFF, 0xFFFFFF,
0x1FFFFFF, 0x3FFFFFF, 0x7FFFFFF, 0xFFFFFFF, 0x1FFFFFFF, 0x3FFFFFFF, 0x7FFFFFFF]
class VMXField(InstructionOperand):
def __init__(self, name, position, default = None):
if type(position) == int:
position = (position, position)
self.shift = 31 - position[1]
self.width = position[1] - position[0] + 1
self.position = position
self.bit_mask = bit_mask[self.width]
InstructionOperand.__init__(self, name, default)
return
def render(self, value):
return (long(value) & self.bit_mask) << self.shift
class RegisterField(VMXField):
# TODO - AWF - really still want ints as registers?
def check(self, value):
if isinstance(value, (Register, Variable)):
return True
elif isinstance(value, int):
return value >= 0 and value < 32
return False
def render(self, value):
if isinstance(value, Register):
return (long(value.reg) & self.bit_mask) << self.shift
elif isinstance(value, Variable):
return (long(value.reg.reg) & self.bit_mask) << self.shift
else:
return (long(value) & self.bit_mask) << self.shift
class ImmediateField(VMXField):
def __init__(self, name, position, range, default = None):
self.range = range
VMXField.__init__(self, name, position, default = default)
def check(self, value):
return isinstance(value, (int, long)) and self.range[0] <= value and value < self.range[1]
OPCD = VMXField("OPCD", (0, 5))
V_XO = VMXField("VA_XO", (26, 31))
VX_XO = VMXField("VX_XO", (21, 31))
X_XO = VMXField("X_XO", (21, 30))
VXR_XO = VMXField("VXX_XO", (22, 31))
vA = RegisterField("vA", (11,15))
vB = RegisterField("vB", (16,20))
vC = RegisterField("vC", (21,25))
vD = RegisterField("vD", (6,10))
A = RegisterField("A", (11,15))
B = RegisterField("B", (16,20))
SH = ImmediateField("SH", (22,25), (0, 16))
UIMM = ImmediateField("UIMM", (11,15), (0, 32))
SIMM = ImmediateField("SIMM", (11,15), (-16, 16))
STRM = ImmediateField("STRM", (9, 10), (0, 4))
T = ImmediateField("T", (6, 6), (0, 2))
Rc = ImmediateField("RC", (21, 21), (0, 2))
| bsd-3-clause |
ashwinsawant/codejam-commandline | lib/zip_utils.py | 20 | 6568 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains functions used to zip or unzip data."""
import StringIO
import gzip
import os
import zipfile
from lib import error
def UnzipData(zipped_data):
"""Unzip the specified data using a temporary file and the gzip library.
Args:
A byte array with the zipped data.
Returns:
A byte array with the unzipped data.
Raises:
error.InternalError: If any I/O or Unicode error occurs while unzipping the
data.
"""
# Uncompress the zipped data using a memory-mapped file.
try:
zipped_memfile = StringIO.StringIO(zipped_data)
unzipped_file = gzip.GzipFile(fileobj=zipped_memfile, mode='rb')
unzipped_data = unzipped_file.read()
unzipped_file.close()
zipped_memfile.close()
return unzipped_data
except UnicodeError as e:
raise error.InternalError('Unicode error while parsing zipped data: '
'{0}.\n'.format(e))
except IOError as e:
raise error.InternalError('I/O error while decompressing unzipped data: '
'{0}.\n'.format(e))
def ZipData(unzipped_data):
"""Zip the specified data using a temporary file and the gzip library.
Args:
A byte array with the unzipped data.
Returns:
A byte array with the zipped data.
Raises:
error.InternalError: If any I/O or Unicode error occurs while unzipping the
data.
"""
try:
# Compress the unzipped data into a memory-mapped file.
zipped_memfile = StringIO.StringIO()
zipped_file = gzip.GzipFile(fileobj=zipped_memfile, mode='wb')
zipped_file.write(unzipped_data)
zipped_file.close()
# Extract the zipped data from the memory-mapped file, release it and
# return the zipped data.
zipped_data = zipped_memfile.getvalue()
zipped_memfile.close()
return zipped_data
except UnicodeError as e:
raise error.InternalError('Unicode error while parsing zipped data: '
'{0}.\n'.format(e))
except IOError as e:
raise error.InternalError('I/O error while decompressing unzipped data: '
'{0}.\n'.format(e))
def MakeZipFile(source_files, output_file, ignore_exts=None):
"""Create a zip file with the specified source files.
The source_files sequence can include directories, which will be traversed and
added recursively to the output file, ignoring those with banned extensions.
Args:
source_files: A collection with all source files or directories to zip.
output_file: Name or file-like object where the zip file must be generated.
ignore_exts: A collection with all the extensions to ignore.
Returns:
A list with all the ignored files during the zip file creation.
Raises:
error.InternalError: If any I/O or OS error occurs while zipping the data.
"""
if ignore_exts is None:
ignore_exts = []
try:
# Open the destination zip file and initialize the ignored files set.
zip_file = zipfile.ZipFile(output_file, 'w', zipfile.ZIP_DEFLATED)
ignored_files = set()
# Put all specified sources in the zip file, ignoring files with the
# specified extensions.
for source_filename in source_files:
# If the source is a directory, walk over it, adding each file inside it.
if os.path.isdir(source_filename):
# Walk over the specified directory.
for dirpath, dirnames, filenames in os.walk(source_filename):
# Create the directory inside the zip file and process all
# files in the current directory.
zip_file.write(dirpath)
for filename in filenames:
# Create the base filename and check if it extension is not in the
# extenstions ignore list. Otherwise, add it to the ignored files
# set.
base_filename = os.path.join(dirpath, filename)
if os.path.splitext(filename)[1] not in ignore_exts:
zip_file.write(base_filename)
else:
ignored_files.add(base_filename)
else:
# Add a file to the zip if and only if it extension is not in the
# ignore list. Otherwise, add it to the ignored files set.
if os.path.splitext(source_filename)[1] not in ignore_exts:
zip_file.write(source_filename)
else:
ignored_files.add(source_filename)
# Close the zip file and return the ignored files set.
zip_file.close()
return ignored_files
except IOError as e:
raise error.InternalError('I/O error while creating zip file: '
'{0}.\n'.format(e))
except OSError as e:
raise error.InternalError('OS error while creating zip file: '
'{0}.\n'.format(e))
def MakeZipFileInMemory(source_files, ignore_exts=None):
"""Create a zip file with the specified source files in memory.
The source_files sequence can include directories, which will be traversed and
added recursively to the output file, ignoring those with banned extensions.
Args:
source_files: A collection with all source files or directories to zip.
ignore_exts: A collection with all the extensions to ignore.
Returns:
A (zip_output, ignored_files) tuple, where zip_output are the contents of
the generated zip file and ignored_files is a list with all the ignored
files during the zip file creation.
Raises:
error.InternalError: If any Unicode error occurs while zipping the data.
"""
# Create a memory-mapped file and create the zip file on it. Then, get its
# contents, close the file and return.
try:
output_file = StringIO.StringIO()
ignored_files = MakeZipFile(source_files, output_file, ignore_exts)
zip_output = output_file.getvalue()
output_file.close()
return zip_output, ignored_files
except UnicodeError as e:
raise error.InternalError('Unicode error while parsing zipped data: '
'{0}.\n'.format(e))
| apache-2.0 |
arnaudsj/mdp-toolkit | mdp/test/test_process_schedule.py | 2 | 4242 | from __future__ import with_statement
from _tools import *
import mdp.parallel as parallel
n = numx
def test_process_scheduler_shutdown():
"""Test that we can properly shutdown the subprocesses"""
scheduler = parallel.ProcessScheduler(verbose=False,
n_processes=1,
source_paths=None,
cache_callable=False)
scheduler.shutdown()
def test_process_scheduler_order():
"""Test the correct result order in process scheduler."""
scheduler = parallel.ProcessScheduler(verbose=False,
n_processes=3,
source_paths=None)
max_i = 8
for i in xrange(max_i):
scheduler.add_task((n.arange(0,i+1), (max_i-1-i)*1.0/4),
parallel.SleepSqrTestCallable())
results = scheduler.get_results()
scheduler.shutdown()
# check result
results = n.concatenate(results)
assert n.all(results ==
n.concatenate([n.arange(0,i+1)**2
for i in xrange(max_i)]))
def test_process_scheduler_no_cache():
"""Test process scheduler with caching turned off."""
scheduler = parallel.ProcessScheduler(verbose=False,
n_processes=2,
source_paths=None,
cache_callable=False)
for i in xrange(8):
scheduler.add_task(i, parallel.SqrTestCallable())
results = scheduler.get_results()
scheduler.shutdown()
# check result
results = n.array(results)
assert n.all(results == n.array([0,1,4,9,16,25,36,49]))
def test_process_scheduler_manager():
"""Test process scheduler with context manager itnerface."""
with parallel.ProcessScheduler(n_processes=2,
source_paths=None) as scheduler:
for i in xrange(8):
scheduler.add_task(i, parallel.SqrTestCallable())
results = scheduler.get_results()
# check result
results = n.array(results)
assert n.all(results == n.array([0,1,4,9,16,25,36,49]))
def test_process_scheduler_flow():
"""Test process scheduler with real Nodes."""
precision = 6
node1 = mdp.nodes.PCANode(output_dim=20)
node2 = mdp.nodes.PolynomialExpansionNode(degree=1)
node3 = mdp.nodes.SFANode(output_dim=10)
flow = mdp.parallel.ParallelFlow([node1, node2, node3])
parallel_flow = mdp.parallel.ParallelFlow(flow.copy()[:])
input_dim = 30
scales = n.linspace(1, 100, num=input_dim)
scale_matrix = mdp.numx.diag(scales)
train_iterables = [n.dot(mdp.numx_rand.random((5, 100, input_dim)),
scale_matrix)
for _ in xrange(3)]
x = mdp.numx.random.random((10, input_dim))
with parallel.ProcessScheduler(verbose=False,
n_processes=3,
source_paths=None) as scheduler:
parallel_flow.train(train_iterables, scheduler=scheduler)
# test that parallel execution works as well
# note that we need more chungs then processes to test caching
parallel_flow.execute([x for _ in xrange(8)], scheduler=scheduler)
# compare to normal flow
flow.train(train_iterables)
assert parallel_flow[0].tlen == flow[0].tlen
y1 = flow.execute(x)
y2 = parallel_flow.execute(x)
assert_array_almost_equal(abs(y1), abs(y2), precision)
def test_process_scheduler_mdp_version():
"""Test that we are running the same mdp in subprocesses"""
scheduler = parallel.ProcessScheduler(verbose=False,
n_processes=2,
source_paths=None,
cache_callable=False)
for i in xrange(2):
scheduler.add_task(i, parallel.MDPVersionCallable())
out = scheduler.get_results()
scheduler.shutdown()
# check that we get 2 identical dictionaries
assert out[0] == out[1], 'Subprocesses did not run '\
'the same MDP as the parent:\n%s\n--\n%s'%(out[0], out[1])
| bsd-3-clause |
lanen/youtube-dl | youtube_dl/extractor/jeuxvideo.py | 85 | 1990 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class JeuxVideoIE(InfoExtractor):
_VALID_URL = r'http://.*?\.jeuxvideo\.com/.*/(.*?)\.htm'
_TESTS = [{
'url': 'http://www.jeuxvideo.com/reportages-videos-jeux/0004/00046170/tearaway-playstation-vita-gc-2013-tearaway-nous-presente-ses-papiers-d-identite-00115182.htm',
'md5': '046e491afb32a8aaac1f44dd4ddd54ee',
'info_dict': {
'id': '114765',
'ext': 'mp4',
'title': 'Tearaway : GC 2013 : Tearaway nous présente ses papiers d\'identité',
'description': 'Lorsque les développeurs de LittleBigPlanet proposent un nouveau titre, on ne peut que s\'attendre à un résultat original et fort attrayant.',
},
}, {
'url': 'http://www.jeuxvideo.com/videos/chroniques/434220/l-histoire-du-jeu-video-la-saturn.htm',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
title = mobj.group(1)
webpage = self._download_webpage(url, title)
title = self._html_search_meta('name', webpage)
config_url = self._html_search_regex(
r'data-src="(/contenu/medias/video.php.*?)"',
webpage, 'config URL')
config_url = 'http://www.jeuxvideo.com' + config_url
video_id = self._search_regex(
r'id=(\d+)',
config_url, 'video ID')
config = self._download_json(
config_url, title, 'Downloading JSON config')
formats = [{
'url': source['file'],
'format_id': source['label'],
'resolution': source['label'],
} for source in reversed(config['sources'])]
return {
'id': video_id,
'title': title,
'formats': formats,
'description': self._og_search_description(webpage),
'thumbnail': config.get('image'),
}
| unlicense |
johngian/remo | vendor-local/lib/python/unidecode/x0c2.py | 253 | 4710 | data = (
'syon', # 0x00
'syonj', # 0x01
'syonh', # 0x02
'syod', # 0x03
'syol', # 0x04
'syolg', # 0x05
'syolm', # 0x06
'syolb', # 0x07
'syols', # 0x08
'syolt', # 0x09
'syolp', # 0x0a
'syolh', # 0x0b
'syom', # 0x0c
'syob', # 0x0d
'syobs', # 0x0e
'syos', # 0x0f
'syoss', # 0x10
'syong', # 0x11
'syoj', # 0x12
'syoc', # 0x13
'syok', # 0x14
'syot', # 0x15
'syop', # 0x16
'syoh', # 0x17
'su', # 0x18
'sug', # 0x19
'sugg', # 0x1a
'sugs', # 0x1b
'sun', # 0x1c
'sunj', # 0x1d
'sunh', # 0x1e
'sud', # 0x1f
'sul', # 0x20
'sulg', # 0x21
'sulm', # 0x22
'sulb', # 0x23
'suls', # 0x24
'sult', # 0x25
'sulp', # 0x26
'sulh', # 0x27
'sum', # 0x28
'sub', # 0x29
'subs', # 0x2a
'sus', # 0x2b
'suss', # 0x2c
'sung', # 0x2d
'suj', # 0x2e
'suc', # 0x2f
'suk', # 0x30
'sut', # 0x31
'sup', # 0x32
'suh', # 0x33
'sweo', # 0x34
'sweog', # 0x35
'sweogg', # 0x36
'sweogs', # 0x37
'sweon', # 0x38
'sweonj', # 0x39
'sweonh', # 0x3a
'sweod', # 0x3b
'sweol', # 0x3c
'sweolg', # 0x3d
'sweolm', # 0x3e
'sweolb', # 0x3f
'sweols', # 0x40
'sweolt', # 0x41
'sweolp', # 0x42
'sweolh', # 0x43
'sweom', # 0x44
'sweob', # 0x45
'sweobs', # 0x46
'sweos', # 0x47
'sweoss', # 0x48
'sweong', # 0x49
'sweoj', # 0x4a
'sweoc', # 0x4b
'sweok', # 0x4c
'sweot', # 0x4d
'sweop', # 0x4e
'sweoh', # 0x4f
'swe', # 0x50
'sweg', # 0x51
'swegg', # 0x52
'swegs', # 0x53
'swen', # 0x54
'swenj', # 0x55
'swenh', # 0x56
'swed', # 0x57
'swel', # 0x58
'swelg', # 0x59
'swelm', # 0x5a
'swelb', # 0x5b
'swels', # 0x5c
'swelt', # 0x5d
'swelp', # 0x5e
'swelh', # 0x5f
'swem', # 0x60
'sweb', # 0x61
'swebs', # 0x62
'swes', # 0x63
'swess', # 0x64
'sweng', # 0x65
'swej', # 0x66
'swec', # 0x67
'swek', # 0x68
'swet', # 0x69
'swep', # 0x6a
'sweh', # 0x6b
'swi', # 0x6c
'swig', # 0x6d
'swigg', # 0x6e
'swigs', # 0x6f
'swin', # 0x70
'swinj', # 0x71
'swinh', # 0x72
'swid', # 0x73
'swil', # 0x74
'swilg', # 0x75
'swilm', # 0x76
'swilb', # 0x77
'swils', # 0x78
'swilt', # 0x79
'swilp', # 0x7a
'swilh', # 0x7b
'swim', # 0x7c
'swib', # 0x7d
'swibs', # 0x7e
'swis', # 0x7f
'swiss', # 0x80
'swing', # 0x81
'swij', # 0x82
'swic', # 0x83
'swik', # 0x84
'swit', # 0x85
'swip', # 0x86
'swih', # 0x87
'syu', # 0x88
'syug', # 0x89
'syugg', # 0x8a
'syugs', # 0x8b
'syun', # 0x8c
'syunj', # 0x8d
'syunh', # 0x8e
'syud', # 0x8f
'syul', # 0x90
'syulg', # 0x91
'syulm', # 0x92
'syulb', # 0x93
'syuls', # 0x94
'syult', # 0x95
'syulp', # 0x96
'syulh', # 0x97
'syum', # 0x98
'syub', # 0x99
'syubs', # 0x9a
'syus', # 0x9b
'syuss', # 0x9c
'syung', # 0x9d
'syuj', # 0x9e
'syuc', # 0x9f
'syuk', # 0xa0
'syut', # 0xa1
'syup', # 0xa2
'syuh', # 0xa3
'seu', # 0xa4
'seug', # 0xa5
'seugg', # 0xa6
'seugs', # 0xa7
'seun', # 0xa8
'seunj', # 0xa9
'seunh', # 0xaa
'seud', # 0xab
'seul', # 0xac
'seulg', # 0xad
'seulm', # 0xae
'seulb', # 0xaf
'seuls', # 0xb0
'seult', # 0xb1
'seulp', # 0xb2
'seulh', # 0xb3
'seum', # 0xb4
'seub', # 0xb5
'seubs', # 0xb6
'seus', # 0xb7
'seuss', # 0xb8
'seung', # 0xb9
'seuj', # 0xba
'seuc', # 0xbb
'seuk', # 0xbc
'seut', # 0xbd
'seup', # 0xbe
'seuh', # 0xbf
'syi', # 0xc0
'syig', # 0xc1
'syigg', # 0xc2
'syigs', # 0xc3
'syin', # 0xc4
'syinj', # 0xc5
'syinh', # 0xc6
'syid', # 0xc7
'syil', # 0xc8
'syilg', # 0xc9
'syilm', # 0xca
'syilb', # 0xcb
'syils', # 0xcc
'syilt', # 0xcd
'syilp', # 0xce
'syilh', # 0xcf
'syim', # 0xd0
'syib', # 0xd1
'syibs', # 0xd2
'syis', # 0xd3
'syiss', # 0xd4
'sying', # 0xd5
'syij', # 0xd6
'syic', # 0xd7
'syik', # 0xd8
'syit', # 0xd9
'syip', # 0xda
'syih', # 0xdb
'si', # 0xdc
'sig', # 0xdd
'sigg', # 0xde
'sigs', # 0xdf
'sin', # 0xe0
'sinj', # 0xe1
'sinh', # 0xe2
'sid', # 0xe3
'sil', # 0xe4
'silg', # 0xe5
'silm', # 0xe6
'silb', # 0xe7
'sils', # 0xe8
'silt', # 0xe9
'silp', # 0xea
'silh', # 0xeb
'sim', # 0xec
'sib', # 0xed
'sibs', # 0xee
'sis', # 0xef
'siss', # 0xf0
'sing', # 0xf1
'sij', # 0xf2
'sic', # 0xf3
'sik', # 0xf4
'sit', # 0xf5
'sip', # 0xf6
'sih', # 0xf7
'ssa', # 0xf8
'ssag', # 0xf9
'ssagg', # 0xfa
'ssags', # 0xfb
'ssan', # 0xfc
'ssanj', # 0xfd
'ssanh', # 0xfe
'ssad', # 0xff
)
| bsd-3-clause |
jamadorfi18/pulpopaul | pulpopaul/models.py | 1 | 2545 | from flask_sqlalchemy import SQLAlchemy, orm
db = SQLAlchemy()
class Model:
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
class Tournament(db.Model, Model):
"""
Tournament model
"""
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String())
short_name = db.Column(db.String())
start_date = db.Column(db.DateTime())
end_date = db.Column(db.DateTime())
def __init__(self, name, start_date, end_date, short_name=None):
self.name = name
self.short_name = short_name
self.start_date = start_date
self.end_date = end_date
def __repr__(self):
return "<Tournament '{}'>".format(self.name)
class Match(db.Model, Model):
id = db.Column(db.Integer(), primary_key=True)
team_local_id = db.Column(db.Integer(), db.ForeignKey('team.id'), index=True)
team_visitor_id = db.Column(db.Integer(), index=True)
kickoff_at = db.Column(db.DateTime())
score_local = db.Column(db.Integer())
score_visitor = db.Column(db.Integer())
def __init__(self, team_local_id, team_visitor_id, kickoff_at, score_visitor=None, score_local=None):
self.team_local_id = team_local_id
self.team_visitor_id = team_visitor_id
self.kickoff_at = kickoff_at
self.score_local = score_local
self.score_visitor = score_visitor
self.init_on_query()
# http://docs.sqlalchemy.org/en/latest/orm/constructors.html
@orm.reconstructor
def init_on_query(self):
self._team_local = Team.query.get(self.team_local_id)
self._team_visitor = Team.query.get(self.team_visitor_id)
@property
def team_local(self):
return self._team_local
@team_local.setter
def set_team_local(self, value):
self._team_local = value
@property
def team_visitor(self):
return self._team_visitor
@team_visitor.setter
def set_team_visitor(self, value):
self._team_visitor = value
def __repr__(self):
return "<Match {} vs {}>".format(self.team_local.name, self.team_visitor.name)
class Team(db.Model, Model):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.Unicode(255))
matches = db.relationship(
'Match',
backref='team',
lazy='dynamic'
)
def __init__(self, name):
self.name = name
def __repr__(self):
return "<Team {}>".format(self.name)
| agpl-3.0 |
waditu/tushare | test/fund_test.py | 19 | 1232 | # -*- coding:utf-8 -*-
import unittest
import tushare.stock.fundamental as fd
class Test(unittest.TestCase):
def set_data(self):
self.code = '600848'
self.start = '2015-01-03'
self.end = '2015-04-07'
self.year = 2014
self.quarter = 4
def test_get_stock_basics(self):
print(fd.get_stock_basics())
# def test_get_report_data(self):
# self.set_data()
# print(fd.get_report_data(self.year, self.quarter))
#
# def test_get_profit_data(self):
# self.set_data()
# print(fd.get_profit_data(self.year, self.quarter))
#
# def test_get_operation_data(self):
# self.set_data()
# print(fd.get_operation_data(self.year, self.quarter))
#
# def test_get_growth_data(self):
# self.set_data()
# print(fd.get_growth_data(self.year, self.quarter))
#
# def test_get_debtpaying_data(self):
# self.set_data()
# print(fd.get_debtpaying_data(self.year, self.quarter))
#
# def test_get_cashflow_data(self):
# self.set_data()
# print(fd.get_cashflow_data(self.year, self.quarter))
if __name__ == '__main__':
unittest.main() | bsd-3-clause |
WZeke/m2_kernel | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
brownsr/Cinnamon | files/usr/share/cinnamon/cinnamon-looking-glass/page_log.py | 21 | 5224 | import datetime
from pageutils import *
from gi.repository import Gio, Gtk, GObject, Gdk, Pango, GLib
class LogEntry():
def __init__(self, category, time, message):
self.category = category
self.time = int(time)
self.timestr = datetime.datetime.fromtimestamp(self.time).strftime("%Y-%m-%dT%H:%M:%SZ")
self.message = message
self.formattedText = "%s t=%s %s\n" % (category, self.timestr, message)
class LogView(Gtk.ScrolledWindow):
def __init__(self):
Gtk.ScrolledWindow.__init__(self)
self.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
self.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.textview = Gtk.TextView()
self.textview.set_editable(False)
self.add(self.textview)
self.textbuffer = self.textview.get_buffer()
self.log = []
self.addedMessages = 0
self.firstMessageTime = None
self.enabledTypes = {'info': True, 'warning': True, 'error': True, 'trace': False }
self.typeTags = {
'info': self.textbuffer.create_tag("info", foreground="#1a6f18", invisible=self.enabledTypes["info"] != True, invisible_set=True),
'warning': self.textbuffer.create_tag("warning", foreground="#c8bf33", invisible=self.enabledTypes["warning"] != True, invisible_set=True),
'error': self.textbuffer.create_tag("error", foreground="#9f1313", invisible=self.enabledTypes["error"] != True, invisible_set=True),
'trace': self.textbuffer.create_tag("trace", foreground="#18186f", invisible=self.enabledTypes["trace"] != True, invisible_set=True)
}
#todo: load all enabled types from gsettings
#self.enabledTypes = {'info': False, 'warning': False, 'error': False, 'trace': False }
#for key in data:
# self.enabledTypes[key] = True
self.getUpdates()
lookingGlassProxy.connect("LogUpdate", self.getUpdates)
lookingGlassProxy.addStatusChangeCallback(self.onStatusChange)
def append(self, category, time, message):
entry = LogEntry(category, time, message)
self.log.append(entry)
return entry
def onButtonToggled(self, button, data):
self.textview.hide()
active = button.get_active()
self.enabledTypes[data] = active
self.typeTags[data].props.invisible = active != True
self.textbuffer.set_modified(True)
#print self.textview.get_preferred_height()
adj = self.get_vadjustment()
#adj.set_upper(self.textview.get_allocated_height())
self.textview.show()
def onStatusChange(self, online):
iter = self.textbuffer.get_end_iter()
if online:
entry = self.append("info", 0, "================ DBus connection established ===============")
else:
entry = self.append("warning", 0, "================ DBus connection lost ===============")
self.textbuffer.insert_with_tags(iter, entry.formattedText, self.typeTags[entry.category])
self.getUpdates(True)
def getUpdates(self, reread = False):
success, data = lookingGlassProxy.GetErrorStack()
if success:
try:
dataSize = len(data)
if dataSize > 0:
# If this is a completely new log, start reading at the beginning
firstMessageTime = data[0]["timestamp"]
if self.addedMessages > dataSize or self.firstMessageTime != firstMessageTime or reread:
self.firstMessageTime = firstMessageTime
self.addedMessages = 0
if reread:
start, end = self.textbuffer.get_bounds()
self.textbuffer.delete(start, end)
self.textview.hide()
iter = self.textbuffer.get_end_iter()
for item in data[self.addedMessages:]:
entry = self.append(item["category"], float(item["timestamp"])*0.001, item["message"])
self.textbuffer.insert_with_tags(iter, entry.formattedText, self.typeTags[entry.category])
self.addedMessages += 1
self.textview.show()
except Exception as e:
print e
class ModulePage(WindowAndActionBars):
def __init__(self, parent):
self.view = LogView()
WindowAndActionBars.__init__(self, self.view)
self.parent = parent;
self.addToggleButton("info", "dialog-information", "Show/Hide Messages tagged as 'info'")
self.addToggleButton("warning", "dialog-warning", "Show/Hide Messages tagged as 'warning'")
self.addToggleButton("error", "dialog-error", "Show/Hide Messages tagged as 'error'")
self.addToggleButton("trace", "dialog-question", "Show/Hide Messages tagged as 'trace'")
def addToggleButton(self, logType, icon, tooltip):
button = ImageToggleButton(icon)
button.connect("toggled", self.view.onButtonToggled, logType)
button.set_active(self.view.enabledTypes[logType])
button.set_tooltip_text(tooltip)
self.addToLeftBar(button, 1)
| gpl-2.0 |
contactless/wirenboard | contrib/deb/mosquitto/mosquitto-1.3.4/lib/python/build/lib.linux-armv7l-2.6/mosquitto.py | 34 | 80748 | # Copyright (c) 2012,2013 Roger Light <roger@atchoo.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of mosquitto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
#
# This product includes software developed by the OpenSSL Project for use in
# the OpenSSL Toolkit. (http://www.openssl.org/)
# This product includes cryptographic software written by Eric Young
# (eay@cryptsoft.com)
# This product includes software written by Tim Hudson (tjh@cryptsoft.com)
"""
This is an MQTT v3.1 client module. MQTT is a lightweight pub/sub messaging
protocol that is easy to implement and suitable for low powered devices.
"""
import errno
import platform
import random
import select
import socket
import ssl
import struct
import sys
import threading
import time
MOSQUITTO_MAJOR=1
MOSQUITTO_MINOR=2
MOSQUITTO_REVISION=90
MOSQUITTO_VERSION_NUMBER=(MOSQUITTO_MAJOR*1000000+MOSQUITTO_MINOR*1000+MOSQUITTO_REVISION)
if platform.system() == 'Windows':
EAGAIN = errno.WSAEWOULDBLOCK
else:
EAGAIN = errno.EAGAIN
if sys.version_info[0] < 3:
PROTOCOL_NAME = "MQIsdp"
else:
PROTOCOL_NAME = b"MQIsdp"
PROTOCOL_VERSION = 3
# Message types
CONNECT = 0x10
CONNACK = 0x20
PUBLISH = 0x30
PUBACK = 0x40
PUBREC = 0x50
PUBREL = 0x60
PUBCOMP = 0x70
SUBSCRIBE = 0x80
SUBACK = 0x90
UNSUBSCRIBE = 0xA0
UNSUBACK = 0xB0
PINGREQ = 0xC0
PINGRESP = 0xD0
DISCONNECT = 0xE0
# Log levels
MOSQ_LOG_INFO = 0x01
MOSQ_LOG_NOTICE = 0x02
MOSQ_LOG_WARNING = 0x04
MOSQ_LOG_ERR = 0x08
MOSQ_LOG_DEBUG = 0x10
# CONNACK codes
CONNACK_ACCEPTED = 0
CONNACK_REFUSED_PROTOCOL_VERSION = 1
CONNACK_REFUSED_IDENTIFIER_REJECTED = 2
CONNACK_REFUSED_SERVER_UNAVAILABLE = 3
CONNACK_REFUSED_BAD_USERNAME_PASSWORD = 4
CONNACK_REFUSED_NOT_AUTHORIZED = 5
# Connection state
mosq_cs_new = 0
mosq_cs_connected = 1
mosq_cs_disconnecting = 2
mosq_cs_connect_async = 3
# Message direction
mosq_md_invalid = 0
mosq_md_in = 1
mosq_md_out = 2
# Message state
mosq_ms_invalid = 0,
mosq_ms_wait_puback = 1
mosq_ms_wait_pubrec = 2
mosq_ms_wait_pubrel = 3
mosq_ms_wait_pubcomp = 4
# Error values
MOSQ_ERR_AGAIN = -1
MOSQ_ERR_SUCCESS = 0
MOSQ_ERR_NOMEM = 1
MOSQ_ERR_PROTOCOL = 2
MOSQ_ERR_INVAL = 3
MOSQ_ERR_NO_CONN = 4
MOSQ_ERR_CONN_REFUSED = 5
MOSQ_ERR_NOT_FOUND = 6
MOSQ_ERR_CONN_LOST = 7
MOSQ_ERR_TLS = 8
MOSQ_ERR_PAYLOAD_SIZE = 9
MOSQ_ERR_NOT_SUPPORTED = 10
MOSQ_ERR_AUTH = 11
MOSQ_ERR_ACL_DENIED = 12
MOSQ_ERR_UNKNOWN = 13
MOSQ_ERR_ERRNO = 14
def error_string(mosq_errno):
"""Return the error string associated with a mosquitto error number."""
if mosq_errno == MOSQ_ERR_SUCCESS:
return "No error."
elif mosq_errno == MOSQ_ERR_NOMEM:
return "Out of memory."
elif mosq_errno == MOSQ_ERR_PROTOCOL:
return "A network protocol error occurred when communicating with the broker."
elif mosq_errno == MOSQ_ERR_INVAL:
return "Invalid function arguments provided."
elif mosq_errno == MOSQ_ERR_NO_CONN:
return "The client is not currently connected."
elif mosq_errno == MOSQ_ERR_CONN_REFUSED:
return "The connection was refused."
elif mosq_errno == MOSQ_ERR_NOT_FOUND:
return "Message not found (internal error)."
elif mosq_errno == MOSQ_ERR_CONN_LOST:
return "The connection was lost."
elif mosq_errno == MOSQ_ERR_TLS:
return "A TLS error occurred."
elif mosq_errno == MOSQ_ERR_PAYLOAD_SIZE:
return "Payload too large."
elif mosq_errno == MOSQ_ERR_NOT_SUPPORTED:
return "This feature is not supported."
elif mosq_errno == MOSQ_ERR_AUTH:
return "Authorisation failed."
elif mosq_errno == MOSQ_ERR_ACL_DENIED:
return "Access denied by ACL."
elif mosq_errno == MOSQ_ERR_UNKNOWN:
return "Unknown error."
elif mosq_errno == MOSQ_ERR_ERRNO:
return "Error defined by errno."
else:
return "Unknown error."
def connack_string(connack_code):
"""Return the string associated with a CONNACK result."""
if connack_code == 0:
return "Connection Accepted."
elif connack_code == 1:
return "Connection Refused: unacceptable protocol version."
elif connack_code == 2:
return "Connection Refused: identifier rejected."
elif connack_code == 3:
return "Connection Refused: broker unavailable."
elif connack_code == 4:
return "Connection Refused: bad user name or password."
elif connack_code == 5:
return "Connection Refused: not authorised."
else:
return "Connection Refused: unknown reason."
def topic_matches_sub(sub, topic):
"""Check whether a topic matches a subscription.
For example:
foo/bar would match the subscription foo/# or +/bar
non/matching would not match the subscription non/+/+
"""
result = True
multilevel_wildcard = False
slen = len(sub)
tlen = len(topic)
spos = 0
tpos = 0
while spos < slen and tpos < tlen:
if sub[spos] == topic[tpos]:
spos += 1
tpos += 1
if tpos == tlen and spos == slen-1 and sub[spos] == '+':
spos += 1
result = True
break
else:
if sub[spos] == '+':
spos += 1
while tpos < tlen and topic[tpos] != '/':
tpos += 1
if tpos == tlen and spos == slen:
result = True
break
elif sub[spos] == '#':
multilevel_wildcard = True
if spos+1 != slen:
result = False
break
else:
result = True
break
else:
result = False
break
if tpos == tlen-1:
# Check for e.g. foo matching foo/#
if spos == slen-3 and sub[spos+1] == '/' and sub[spos+2] == '#':
result = True
multilevel_wildcard = True
break
if not multilevel_wildcard and (tpos < tlen or spos < slen):
result = False
return result
class MosquittoMessage:
""" This is a class that describes an incoming message. It is passed to the
on_message callback as the message parameter.
Members:
topic : String. topic that the message was published on.
payload : String/bytes the message payload.
qos : Integer. The message Quality of Service 0, 1 or 2.
retain : Boolean. If true, the message is a retained message and not fresh.
mid : Integer. The message id.
"""
def __init__(self):
self.timestamp = 0
self.direction = mosq_md_invalid
self.state = mosq_ms_invalid
self.dup = False
self.mid = 0
self.topic = ""
self.payload = None
self.qos = 0
self.retain = False
class MosquittoInPacket:
"""Internal datatype."""
def __init__(self):
self.command = 0
self.have_remaining = 0
self.remaining_count = []
self.remaining_mult = 1
self.remaining_length = 0
self.packet = b""
self.to_process = 0
self.pos = 0
def cleanup(self):
self.__init__()
class MosquittoPacket:
"""Internal datatype."""
def __init__(self, command, packet, mid, qos):
self.command = command
self.mid = mid
self.qos = qos
self.pos = 0
self.to_process = len(packet)
self.packet = packet
class Mosquitto:
"""MQTT version 3.1 client class.
This is the main class for use communicating with an MQTT broker.
General usage flow:
* Use connect()/connect_async() to connect to a broker
* Call loop() frequently to maintain network traffic flow with the broker
* Or use loop_start() to set a thread running to call loop() for you.
* Or use loop_forever() to handle calling loop() for you in a blocking
* function.
* Use subscribe() to subscribe to a topic and receive messages
* Use publish() to send messages
* Use disconnect() to disconnect from the broker
Data returned from the broker is made available with the use of callback
functions as described below.
Callbacks
=========
A number of callback functions are available to receive data back from the
broker. To use a callback, define a function and then assign it to the
client:
def on_connect(mosq, userdata, rc):
print("Connection returned " + str(rc))
client.on_connect = on_connect
All of the callbacks as described below have a "mosq" and an "userdata"
argument. "mosq" is the Mosquitto instance that is calling the callback.
"userdata" is user data of any type and can be set when creating a new
client instance or with user_data_set(userdata).
The callbacks:
on_connect(mosq, userdata, rc): called when the broker responds to our
connection request. The value of rc determines success or not:
0: Connection successful
1: Connection refused - incorrect protocol version
2: Connection refused - invalid client identifier
3: Connection refused - server unavailable
4: Connection refused - bad username or password
5: Connection refused - not authorised
6-255: Currently unused.
on_disconnect(mosq, userdata, rc): called when the client disconnects from
the broker. The rc parameter indicates the disconnection state. If
MOSQ_ERR_SUCCESS (0), the callback was called in response to a
disconnect() call. If any other value the disconnection was unexpected,
such as might be caused by a network error.
on_message(mosq, userdata, message): called when a message has been
received on a topic that the client subscribes to. The message variable
is a MosquittoMessage that describes all of the message parameters.
on_publish(mosq, userdata, mid): called when a message that was to be sent
using the publish() call has completed transmission to the broker. For
messages with QoS levels 1 and 2, this means that the appropriate
handshakes have completed. For QoS 0, this simply means that the message
has left the client. The mid variable matches the mid variable returned
from the corresponding publish() call, to allow outgoing messages to be
tracked. This callback is important because even if the publish() call
returns success, it does not always mean that the message has been sent.
on_subscribe(mosq, userdata, mid, granted_qos): called when the broker
responds to a subscribe request. The mid variable matches the mid
variable returned from the corresponding subscribe() call. The
granted_qos variable is a list of integers that give the QoS level the
broker has granted for each of the different subscription requests.
on_unsubscribe(mosq, userdata, mid): called when the broker responds to an
unsubscribe request. The mid variable matches the mid variable returned
from the corresponding unsubscribe() call.
on_log(mosq, userdata, level, buf): called when the client has log
information. Define to allow debugging. The level variable gives the
severity of the message and will be one of MOSQ_LOG_INFO,
MOSQ_LOG_NOTICE, MOSQ_LOG_WARNING, MOSQ_LOG_ERR, and MOSQ_LOG_DEBUG. The
message itself is in buf.
"""
def __init__(self, client_id="", clean_session=True, userdata=None):
"""client_id is the unique client id string used when connecting to the
broker. If client_id is zero length or None, then one will be randomly
generated. In this case, clean_session must be True. If this is not the
case a ValueError will be raised.
clean_session is a boolean that determines the client type. If True,
the broker will remove all information about this client when it
disconnects. If False, the client is a persistent client and
subscription information and queued messages will be retained when the
client disconnects.
Note that a client will never discard its own outgoing messages on
disconnect. Calling connect() or reconnect() will cause the messages to
be resent. Use reinitialise() to reset a client to its original state.
userdata is user defined data of any type that is passed as the
"userdata" parameter to callbacks. It may be updated at a later point
with the user_data_set() function.
"""
if not clean_session and (client_id == "" or client_id is None):
raise ValueError('A client id must be provided if clean session is False.')
self._userdata = userdata
self._sock = None
self._keepalive = 60
self._message_retry = 20
self._last_retry_check = 0
self._clean_session = clean_session
if client_id == "" or client_id is None:
self._client_id = "mosq/" + "".join(random.choice("0123456789ADCDEF") for x in range(23-5))
else:
self._client_id = client_id
self._username = ""
self._password = ""
self._in_packet = MosquittoInPacket()
self._out_packet = []
self._current_out_packet = None
self._last_msg_in = time.time()
self._last_msg_out = time.time()
self._ping_t = 0
self._last_mid = 0
self._state = mosq_cs_new
self._messages = []
self._max_inflight_messages = 20
self._inflight_messages = 0
self._will = False
self._will_topic = ""
self._will_payload = None
self._will_qos = 0
self._will_retain = False
self.on_disconnect = None
self.on_connect = None
self.on_publish = None
self.on_message = None
self.on_subscribe = None
self.on_unsubscribe = None
self.on_log = None
self._host = ""
self._port = 1883
self._bind_address = ""
self._in_callback = False
self._strict_protocol = False
self._callback_mutex = threading.Lock()
self._state_mutex = threading.Lock()
self._out_packet_mutex = threading.Lock()
self._current_out_packet_mutex = threading.Lock()
self._msgtime_mutex = threading.Lock()
self._message_mutex = threading.Lock()
self._thread = None
self._thread_terminate = False
self._ssl = None
self._tls_certfile = None
self._tls_keyfile = None
self._tls_ca_certs = None
self._tls_cert_reqs = None
self._tls_ciphers = None
self._tls_insecure = False
self._reconnect_delay = 1
self._reconnect_delay_max = 1
self._reconnect_exponential_backoff = False
def __del__(self):
pass
def reinitialise(self, client_id="", clean_session=True, userdata=None):
if self._ssl:
self._ssl.close()
self._ssl = None
self._sock = None
elif self._sock:
self._sock.close()
self._sock = None
self.__init__(client_id, clean_session, userdata)
def tls_set(self, ca_certs, certfile=None, keyfile=None, cert_reqs=ssl.CERT_REQUIRED, tls_version=ssl.PROTOCOL_TLSv1, ciphers=None):
"""Configure network encryption and authentication options. Enables SSL/TLS support.
ca_certs : a string path to the Certificate Authority certificate files
that are to be treated as trusted by this client. If this is the only
option given then the client will operate in a similar manner to a web
browser. That is to say it will require the broker to have a
certificate signed by the Certificate Authorities in ca_certs and will
communicate using TLS v1, but will not attempt any form of
authentication. This provides basic network encryption but may not be
sufficient depending on how the broker is configured.
certfile and keyfile are strings pointing to the PEM encoded client
certificate and private keys respectively. If these arguments are not
None then they will be used as client information for TLS based
authentication. Support for this feature is broker dependent. Note
that if either of these files in encrypted and needs a password to
decrypt it, Python will ask for the password at the command line. It is
not currently possible to define a callback to provide the password.
cert_reqs allows the certificate requirements that the client imposes
on the broker to be changed. By default this is ssl.CERT_REQUIRED,
which means that the broker must provide a certificate. See the ssl
pydoc for more information on this parameter.
tls_version allows the version of the SSL/TLS protocol used to be
specified. By default TLS v1 is used. Previous versions (all versions
beginning with SSL) are possible but not recommended due to possible
security problems.
ciphers is a string specifying which encryption ciphers are allowable
for this connection, or None to use the defaults. See the ssl pydoc for
more information.
Must be called before connect() or connect_async()."""
if sys.version < '2.7':
raise ValueError('Python 2.7 is the minimum supported version for TLS.')
if ca_certs is None:
raise ValueError('ca_certs must not be None.')
try:
f = open(ca_certs, "r")
except IOError as err:
raise IOError(ca_certs+": "+err.strerror)
else:
f.close()
if certfile is not None:
try:
f = open(certfile, "r")
except IOError as err:
raise IOError(certfile+": "+err.strerror)
else:
f.close()
if keyfile is not None:
try:
f = open(keyfile, "r")
except IOError as err:
raise IOError(keyfile+": "+err.strerror)
else:
f.close()
self._tls_ca_certs = ca_certs
self._tls_certfile = certfile
self._tls_keyfile = keyfile
self._tls_cert_reqs = cert_reqs
self._tls_version = tls_version
self._tls_ciphers = ciphers
def tls_insecure_set(self, value):
"""Configure verification of the server hostname in the server certificate.
If value is set to true, it is impossible to guarantee that the host
you are connecting to is not impersonating your server. This can be
useful in initial server testing, but makes it possible for a malicious
third party to impersonate your server through DNS spoofing, for
example.
Do not use this function in a real system. Setting value to true means
there is no point using encryption.
Must be called before connect()."""
self._tls_insecure = value
def connect(self, host, port=1883, keepalive=60, bind_address=""):
"""Connect to a remote broker.
host is the hostname or IP address of the remote broker.
port is the network port of the server host to connect to. Defaults to
1883. Note that the default port for MQTT over SSL/TLS is 8883 so if
you are using tls_set() the port may need providing.
keepalive: Maximum period in seconds between communications with the
broker. If no other messages are being exchanged, this controls the
rate at which the client will send ping messages to the broker.
"""
self.connect_async(host, port, keepalive, bind_address)
return self.reconnect()
def connect_async(self, host, port=1883, keepalive=60, bind_address=""):
"""Connect to a remote broker asynchronously. This is a non-blocking
connect call that can be used with loop_start() to provide very quick
start.
host is the hostname or IP address of the remote broker.
port is the network port of the server host to connect to. Defaults to
1883. Note that the default port for MQTT over SSL/TLS is 8883 so if
you are using tls_set() the port may need providing.
keepalive: Maximum period in seconds between communications with the
broker. If no other messages are being exchanged, this controls the
rate at which the client will send ping messages to the broker.
"""
if host is None or len(host) == 0:
raise ValueError('Invalid host.')
if port <= 0:
raise ValueError('Invalid port number.')
if keepalive < 0:
raise ValueError('Keepalive must be >=0.')
if bind_address != "" and bind_address is not None:
if (sys.version_info[0] == 2 and sys.version_info[1] < 7) or (sys.version_info[0] == 3 and sys.version_info[1] < 2):
raise ValueError('bind_address requires Python 2.7 or 3.2.')
self._host = host
self._port = port
self._keepalive = keepalive
self._bind_address = bind_address
self._state_mutex.acquire()
self._state = mosq_cs_connect_async
self._state_mutex.release()
def reconnect(self):
"""Reconnect the client after a disconnect. Can only be called after
connect()/connect_async()."""
if len(self._host) == 0:
raise ValueError('Invalid host.')
if self._port <= 0:
raise ValueError('Invalid port number.')
self._in_packet.cleanup()
self._out_packet_mutex.acquire()
self._out_packet = []
self._out_packet_mutex.release()
self._current_out_packet_mutex.acquire()
self._current_out_packet = None
self._current_out_packet_mutex.release()
self._msgtime_mutex.acquire()
self._last_msg_in = time.time()
self._last_msg_out = time.time()
self._msgtime_mutex.release()
self._ping_t = 0
self._state_mutex.acquire()
self._state = mosq_cs_new
self._state_mutex.release()
if self._ssl:
self._ssl.close()
self._ssl = None
self._sock = None
elif self._sock:
self._sock.close()
self._sock = None
# Put messages in progress in a valid state.
self._messages_reconnect_reset()
try:
if (sys.version_info[0] == 2 and sys.version_info[1] < 7) or (sys.version_info[0] == 3 and sys.version_info[1] < 2):
self._sock = socket.create_connection((self._host, self._port), source_address=(self._bind_address, 0))
else:
self._sock = socket.create_connection((self._host, self._port))
except socket.error as err:
(msg) = err
if msg.errno != errno.EINPROGRESS:
raise
if self._tls_ca_certs is not None:
self._ssl = ssl.wrap_socket(
self._sock,
certfile=self._tls_certfile,
keyfile=self._tls_keyfile,
ca_certs=self._tls_ca_certs,
cert_reqs=self._tls_cert_reqs,
ssl_version=self._tls_version,
ciphers=self._tls_ciphers)
if self._tls_insecure is False:
if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and sys.version_info[1] < 2):
self._tls_match_hostname()
else:
ssl.match_hostname(self._ssl.getpeercert(), self._host)
self._sock.setblocking(0)
return self._send_connect(self._keepalive, self._clean_session)
def loop(self, timeout=1.0, max_packets=1):
"""Process network events.
This function must be called regularly to ensure communication with the
broker is carried out. It calls select() on the network socket to wait
for network events. If incoming data is present it will then be
processed. Outgoing commands, from e.g. publish(), are normally sent
immediately that their function is called, but this is not always
possible. loop() will also attempt to send any remaining outgoing
messages, which also includes commands that are part of the flow for
messages with QoS>0.
timeout: The time in seconds to wait for incoming/outgoing network
traffic before timing out and returning.
max_packets: Not currently used.
Returns MOSQ_ERR_SUCCESS on success.
Returns >0 on error.
A ValueError will be raised if timeout < 0"""
if timeout < 0.0:
raise ValueError('Invalid timeout.')
self._current_out_packet_mutex.acquire()
self._out_packet_mutex.acquire()
if self._current_out_packet is None and len(self._out_packet) > 0:
self._current_out_packet = self._out_packet.pop(0)
if self._current_out_packet:
wlist = [self.socket()]
else:
wlist = []
self._out_packet_mutex.release()
self._current_out_packet_mutex.release()
rlist = [self.socket()]
try:
socklist = select.select(rlist, wlist, [], timeout)
except TypeError:
# Socket isn't correct type, in likelihood connection is lost
return MOSQ_ERR_CONN_LOST
if self.socket() in socklist[0]:
rc = self.loop_read(max_packets)
if rc or (self._ssl is None and self._sock is None):
return rc
if self.socket() in socklist[1]:
rc = self.loop_write(max_packets)
if rc or (self._ssl is None and self._sock is None):
return rc
return self.loop_misc()
def publish(self, topic, payload=None, qos=0, retain=False):
"""Publish a message on a topic.
This causes a message to be sent to the broker and subsequently from
the broker to any clients subscribing to matching topics.
topic: The topic that the message should be published on.
payload: The actual message to send. If not given, or set to None a
zero length message will be used. Passing an int or float will result
in the payload being converted to a string representing that number. If
you wish to send a true int/float, use struct.pack() to create the
payload you require.
qos: The quality of service level to use.
retain: If set to true, the message will be set as the "last known
good"/retained message for the topic.
Returns a tuple (result, mid), where result is MOSQ_ERR_SUCCESS to
indicate success or MOSQ_ERR_NO_CONN if the client is not currently
connected. mid is the message ID for the publish request. The mid
value can be used to track the publish request by checking against the
mid argument in the on_publish() callback if it is defined.
A ValueError will be raised if topic is None, has zero length or is
invalid (contains a wildcard), if qos is not one of 0, 1 or 2, or if
the length of the payload is greater than 268435455 bytes."""
if topic is None or len(topic) == 0:
raise ValueError('Invalid topic.')
if qos<0 or qos>2:
raise ValueError('Invalid QoS level.')
if isinstance(payload, str) or isinstance(payload, bytearray):
local_payload = payload
elif isinstance(payload, int) or isinstance(payload, float):
local_payload = str(payload)
elif payload is None:
local_payload = None
else:
raise TypeError('payload must be a string, bytearray, int, float or None.')
if local_payload is not None and len(local_payload) > 268435455:
raise ValueError('Payload too large.')
if self._topic_wildcard_len_check(topic) != MOSQ_ERR_SUCCESS:
raise ValueError('Publish topic cannot contain wildcards.')
local_mid = self._mid_generate()
if qos == 0:
rc = self._send_publish(local_mid, topic, local_payload, qos, retain, False)
return (rc, local_mid)
else:
message = MosquittoMessage()
message.timestamp = time.time()
message.direction = mosq_md_out
message.mid = local_mid
message.topic = topic
if local_payload is None or len(local_payload) == 0:
message.payload = None
else:
message.payload = local_payload
message.qos = qos
message.retain = retain
message.dup = False
self._message_mutex.acquire()
self._messages.append(message)
if self._max_inflight_messages == 0 or self._inflight_messages < self._max_inflight_messages:
self._inflight_messages = self._inflight_messages+1
if qos == 1:
message.state = mosq_ms_wait_puback
elif qos == 2:
message.state = mosq_ms_wait_pubrec
self._message_mutex.release()
rc = self._send_publish(message.mid, message.topic, message.payload, message.qos, message.retain, message.dup)
return (rc, local_mid)
self._message_mutex.release()
return (MOSQ_ERR_SUCCESS, local_mid)
def username_pw_set(self, username, password=None):
"""Set a username and optionally a password for broker authentication.
Must be called before connect() to have any effect.
Requires a broker that supports MQTT v3.1.
username: The username to authenticate with. Need have no relationship
to the client id.
password: The password to authenticate with. Optional, set to None if
not required.
"""
self._username = username
self._password = password
def disconnect(self):
"""Disconnect a connected client from the broker."""
self._state_mutex.acquire()
self._state = mosq_cs_disconnecting
self._state_mutex.release()
if self._sock is None and self._ssl is None:
return MOSQ_ERR_NO_CONN
return self._send_disconnect()
def subscribe(self, topic, qos=0):
"""Subscribe the client to one or more topics.
This function may be called in three different ways:
Simple string and integer
-------------------------
e.g. subscribe("my/topic", 2)
topic: A string specifying the subscription topic to subscribe to.
qos: The desired quality of service level for the subscription.
Defaults to 0.
String and integer tuple
------------------------
e.g. subscribe(("my/topic", 1))
topic: A tuple of (topic, qos). Both topic and qos must be present in
the tuple.
qos: Not used.
List of string and integer tuples
------------------------
e.g. subscribe([("my/topic", 0), ("another/topic", 2)])
This allows multiple topic subscriptions in a single SUBSCRIPTION
command, which is more efficient than using multiple calls to
subscribe().
topic: A list of tuple of format (topic, qos). Both topic and qos must
be present in all of the tuples.
qos: Not used.
The function returns a tuple (result, mid), where result is
MOSQ_ERR_SUCCESS to indicate success or MOSQ_ERR_NO_CONN if the client
is not currently connected. mid is the message ID for the subscribe
request. The mid value can be used to track the subscribe request by
checking against the mid argument in the on_subscribe() callback if it
is defined.
Raises a ValueError if qos is not 0, 1 or 2, or if topic is None or has
zero string length, or if topic is not a string, tuple or list.
"""
topic_qos_list = None
if isinstance(topic, str):
if qos<0 or qos>2:
raise ValueError('Invalid QoS level.')
if topic is None or len(topic) == 0:
raise ValueError('Invalid topic.')
topic_qos_list = [(topic, qos)]
elif isinstance(topic, tuple):
if topic[1]<0 or topic[1]>2:
raise ValueError('Invalid QoS level.')
if topic[0] is None or len(topic[0]) == 0 or not isinstance(topic[0], str):
raise ValueError('Invalid topic.')
topic_qos_list = [topic]
elif isinstance(topic, list):
for t in topic:
if t[1]<0 or t[1]>2:
raise ValueError('Invalid QoS level.')
if t[0] is None or len(t[0]) == 0 or not isinstance(t[0], str):
raise ValueError('Invalid topic.')
topic_qos_list = topic
if topic_qos_list is None:
raise ValueError("No topic specified, or incorrect topic type.")
if self._sock is None and self._ssl is None:
return MOSQ_ERR_NO_CONN
return self._send_subscribe(False, topic_qos_list)
def unsubscribe(self, topic):
"""Unsubscribe the client from one or more topics.
topic: A single string, or list of strings that are the subscription
topics to unsubscribe from.
Returns a tuple (result, mid), where result is MOSQ_ERR_SUCCESS
to indicate success or MOSQ_ERR_NO_CONN if the client is not currently
connected.
mid is the message ID for the unsubscribe request. The mid value can be
used to track the unsubscribe request by checking against the mid
argument in the on_unsubscribe() callback if it is defined.
Raises a ValueError if topic is None or has zero string length, or is
not a string or list.
"""
topic_list = None
if topic is None:
raise ValueError('Invalid topic.')
if isinstance(topic, str):
if len(topic) == 0:
raise ValueError('Invalid topic.')
topic_list = [topic]
elif isinstance(topic, list):
for t in topic:
if len(t) == 0 or not isinstance(t, str):
raise ValueError('Invalid topic.')
topic_list = topic
if topic_list is None:
raise ValueError("No topic specified, or incorrect topic type.")
if self._sock is None and self._ssl is None:
return MOSQ_ERR_NO_CONN
return self._send_unsubscribe(False, topic_list)
def loop_read(self, max_packets=1):
"""Process read network events. Use in place of calling loop() if you
wish to handle your client reads as part of your own application.
Use socket() to obtain the client socket to call select() or equivalent
on.
Do not use if you are using the threaded interface loop_start()."""
if self._sock is None and self._ssl is None:
return MOSQ_ERR_NO_CONN
max_packets = len(self._messages)
if max_packets < 1:
max_packets = 1
for i in range(0, max_packets):
rc = self._packet_read()
if rc > 0:
return self._loop_rc_handle(rc)
elif rc == MOSQ_ERR_AGAIN:
return MOSQ_ERR_SUCCESS
return MOSQ_ERR_SUCCESS
def loop_write(self, max_packets=1):
"""Process read network events. Use in place of calling loop() if you
wish to handle your client reads as part of your own application.
Use socket() to obtain the client socket to call select() or equivalent
on.
Use want_write() to determine if there is data waiting to be written.
Do not use if you are using the threaded interface loop_start()."""
if self._sock is None and self._ssl is None:
return MOSQ_ERR_NO_CONN
max_packets = len(self._messages)
if max_packets < 1:
max_packets = 1
for i in range(0, max_packets):
rc = self._packet_write()
if rc > 0:
return self._loop_rc_handle(rc)
elif rc == MOSQ_ERR_AGAIN:
return MOSQ_ERR_SUCCESS
return MOSQ_ERR_SUCCESS
def want_write(self):
"""Call to determine if there is network data waiting to be written.
Useful if you are calling select() yourself rather than using loop().
"""
if self._current_out_packet or len(self._out_packet) > 0:
return True
else:
return False
def loop_misc(self):
"""Process miscellaneous network events. Use in place of calling loop()
if you wish to call select() or equivalent on.
Do not use if you are using the threaded interface loop_start()."""
if self._sock is None and self._ssl is None:
return MOSQ_ERR_NO_CONN
now = time.time()
self._check_keepalive()
if self._last_retry_check+1 < now:
# Only check once a second at most
self._message_retry_check()
self._last_retry_check = now
if self._ping_t > 0 and now - self._ping_t >= self._keepalive:
# mosq->ping_t != 0 means we are waiting for a pingresp.
# This hasn't happened in the keepalive time so we should
# disconnect.
if self._ssl:
self._ssl.close()
self._ssl = None
elif self._sock:
self._sock.close()
self._sock = None
self._callback_mutex.acquire()
if self._state == mosq_cs_disconnecting:
rc = MOSQ_ERR_SUCCESS
else:
rc = 1
if self.on_disconnect:
self._in_callback = True
self.on_disconnect(self, self._userdata, rc)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_CONN_LOST
return MOSQ_ERR_SUCCESS
def max_inflight_messages_set(self, inflight):
"""Set the maximum number of messages with QoS>0 that can be part way
through their network flow at once. Defaults to 20."""
if inflight < 0:
raise ValueError('Invalid inflight.')
self._max_inflight_messages = inflight
def message_retry_set(self, retry):
"""Set the timeout in seconds before a message with QoS>0 is retried.
20 seconds by default."""
if retry < 0:
raise ValueError('Invalid retry.')
self._message_retry = retry
def reconnect_delay_set(self, delay, delay_max, exponential_backoff):
"""Set the amount of time that the client will wait before reconnecting
after losing its connection to the broker.
delay is the number of seconds to wait between successive reconnect
attempts. By default this is set to 1.
delay_max is the maximum number of seconds to wait between reconnection
attempts and is also set to 1 by default. This means that the default
behaviour is to attempt to reconnect every second.
If exponential_backoff is False and delay_max is greater than delay,
then on each successive reconnect failure the delay will increase
linearly in the form delay*failure_count.
If exponential_backoff is True and delay_max is greater than delay,
then on each successive reconnect failure the delay will increase
exponentially in the form delay*failure_count^2.
In both cases, the maximum delay ever used is set by delay_max.
Example 1:
delay=2, delay_max=10, exponential_backoff=False
Delays would be: 2, 4, 6, 8, 10, 10, ...
Example 2:
delay=3, delay_max=30, exponential_backoff=True
Delays would be: 3, 6, 12, 24, 30, 30, ...
"""
if not isinstance(delay, int) or delay <= 0:
ValueError("delay must be a positive integer.")
if not isinstance(delay_max, int) or delay_max < delay:
ValueError("delay_max must be a integer and greater than delay.")
if not isinstance(exponential_backoff, bool):
ValueError("exponential_backoff must be a bool.")
self._reconnect_delay = delay
self._reconnect_delay_max = delay_max
self._reconnect_exponential_backoff = exponential_backoff
def user_data_set(self, userdata):
"""Set the user data variable passed to callbacks. May be any data
type."""
self._userdata = userdata
def will_set(self, topic, payload=None, qos=0, retain=False):
"""Set a Will to be sent by the broker in case the client disconnects
unexpectedly.
This must be called before connect() to have any effect.
topic: The topic that the will message should be published on.
payload: The message to send as a will. If not given, or set to None a
zero length message will be used as the will. Passing an int or float
will result in the payload being converted to a string representing
that number. If you wish to send a true int/float, use struct.pack() to
create the payload you require.
qos: The quality of service level to use for the will.
retain: If set to true, the will message will be set as the "last known
good"/retained message for the topic.
Raises a ValueError if qos is not 0, 1 or 2, or if topic is None or has
zero string length.
"""
if topic is None or len(topic) == 0:
raise ValueError('Invalid topic.')
if qos<0 or qos>2:
raise ValueError('Invalid QoS level.')
if isinstance(payload, str) or isinstance(payload, bytearray):
self._will_payload = payload
elif isinstance(payload, int) or isinstance(payload, float):
self._will_payload = str(payload)
elif payload is None:
self._will_payload = None
else:
raise TypeError('payload must be a string, bytearray, int, float or None.')
self._will = True
self._will_topic = topic
self._will_qos = qos
self._will_retain = retain
def will_clear(self):
""" Removes a will that was previously configured with will_set().
Must be called before connect() to have any effect."""
self._will = False
self._will_topic = ""
self._will_payload = None
self._will_qos = 0
self._will_retain = False
def socket(self):
"""Return the socket or ssl object for this client."""
if self._ssl:
return self._ssl
else:
return self._sock
def loop_forever(self, timeout=1.0, max_packets=1):
"""This function call loop() for you in an infinite blocking loop. It
is useful for the case where you only want to run the MQTT client loop
in your program.
loop_forever() will handle reconnecting for you. If you call
disconnect() in a callback it will return."""
reconnects = 0
run = True
if self._state == mosq_cs_connect_async:
self.reconnect()
while run:
rc = MOSQ_ERR_SUCCESS
while rc == MOSQ_ERR_SUCCESS:
rc = self.loop(timeout, max_packets)
# We don't need to worry about locking here, because we've
# either called loop_forever() when in single threaded mode, or
# in multi threaded mode when loop_stop() has been called and
# so no other threads can access _current_out_packet,
# _out_packet or _messages.
if (self._thread_terminate
and self._current_out_packet == None
and len(self._out_packet) == 0
and len(self._messages) == 0):
rc = 1
run = False
if rc == MOSQ_ERR_SUCCESS:
reconnects = 0
self._state_mutex.acquire()
if self._state == mosq_cs_disconnecting:
run = False
self._state_mutex.release()
else:
self._state_mutex.release()
if self._reconnect_delay > 0 and self._reconnect_exponential_backoff:
reconnect_delay = self._reconnect_delay*reconnects*reconnects
else:
reconnect_delay = self._reconnect_delay
if reconnect_delay > self._reconnect_delay_max:
reconnect_delay = self._reconnect_delay_max
else:
reconnects = reconnects + 1
time.sleep(reconnect_delay)
self._state_mutex.acquire()
if self._state == mosq_cs_disconnecting:
run = False
self._state_mutex.release()
else:
self._state_mutex.release()
try:
self.reconnect()
except socket.error as err:
pass
return rc
def loop_start(self):
"""This is part of the threaded client interface. Call this once to
start a new thread to process network traffic. This provides an
alternative to repeatedly calling loop() yourself.
"""
if self._thread is not None:
return MOSQ_ERR_INVAL
self._thread = threading.Thread(target=self._thread_main)
self._thread.daemon = True
self._thread.start()
def loop_stop(self, force=False):
"""This is part of the threaded client interface. Call this once to
stop the network thread previously created with loop_start(). This call
will block until the network thread finishes.
The force parameter is currently ignored.
"""
if self._thread is None:
return MOSQ_ERR_INVAL
self._thread_terminate = True
self._thread.join()
self._thread = None
# ============================================================
# Private functions
# ============================================================
def _loop_rc_handle(self, rc):
if rc:
if self._ssl:
self._ssl.close()
self._ssl = None
elif self._sock:
self._sock.close()
self._sock = None
self._state_mutex.acquire()
if self._state == mosq_cs_disconnecting:
rc = MOSQ_ERR_SUCCESS
self._state_mutex.release()
self._callback_mutex.acquire()
if self.on_disconnect:
self._in_callback = True
self.on_disconnect(self, self._userdata, rc)
self._in_callback = False
self._callback_mutex.release()
return rc
def _packet_read(self):
# This gets called if pselect() indicates that there is network data
# available - ie. at least one byte. What we do depends on what data
# we already have.
# If we've not got a command, attempt to read one and save it. This
# should always work because it's only a single byte.
# Then try to read the remaining length. This may fail because it is
# may be more than one byte - will need to save data pending next read
# if it does fail.
# Then try to read the remaining payload, where 'payload' here means
# the combined variable header and actual payload. This is the most
# likely to fail due to longer length, so save current data and current
# position. After all data is read, send to _mosquitto_handle_packet()
# to deal with. Finally, free the memory and reset everything to
# starting conditions.
if self._in_packet.command == 0:
try:
if self._ssl:
command = self._ssl.read(1)
else:
command = self._sock.recv(1)
except socket.error as err:
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == EAGAIN:
return MOSQ_ERR_AGAIN
raise
else:
if len(command) == 0:
return 1
command = struct.unpack("!B", command)
self._in_packet.command = command[0]
if self._in_packet.have_remaining == 0:
# Read remaining
# Algorithm for decoding taken from pseudo code in the spec.
while True:
try:
if self._ssl:
byte = self._ssl.read(1)
else:
byte = self._sock.recv(1)
except socket.error as err:
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == EAGAIN:
return MOSQ_ERR_AGAIN
raise
else:
byte = struct.unpack("!B", byte)
byte = byte[0]
self._in_packet.remaining_count.append(byte)
# Max 4 bytes length for remaining length as defined by
# protocol. Anything more likely means a broken/malicious
# client.
if len(self._in_packet.remaining_count) > 4:
return MOSQ_ERR_PROTOCOL
self._in_packet.remaining_length = self._in_packet.remaining_length + (byte & 127)*self._in_packet.remaining_mult
self._in_packet.remaining_mult = self._in_packet.remaining_mult * 128
if (byte & 128) == 0:
break
self._in_packet.have_remaining = 1
self._in_packet.to_process = self._in_packet.remaining_length
while self._in_packet.to_process > 0:
try:
if self._ssl:
data = self._ssl.read(self._in_packet.to_process)
else:
data = self._sock.recv(self._in_packet.to_process)
except socket.error as err:
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == EAGAIN:
return MOSQ_ERR_AGAIN
raise
else:
self._in_packet.to_process = self._in_packet.to_process - len(data)
self._in_packet.packet = self._in_packet.packet + data
# All data for this packet is read.
self._in_packet.pos = 0
rc = self._packet_handle()
# Free data and reset values
self._in_packet.cleanup()
self._msgtime_mutex.acquire()
self._last_msg_in = time.time()
self._msgtime_mutex.release()
return rc
def _packet_write(self):
self._current_out_packet_mutex.acquire()
while self._current_out_packet:
packet = self._current_out_packet
try:
if self._ssl:
write_length = self._ssl.write(packet.packet[packet.pos:])
else:
write_length = self._sock.send(packet.packet[packet.pos:])
except AttributeError:
self._current_out_packet_mutex.release()
return MOSQ_ERR_SUCCESS
except socket.error as err:
self._current_out_packet_mutex.release()
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == EAGAIN:
return MOSQ_ERR_AGAIN
raise
if write_length > 0:
packet.to_process = packet.to_process - write_length
packet.pos = packet.pos + write_length
if packet.to_process == 0:
if (packet.command & 0xF0) == PUBLISH and packet.qos == 0:
self._callback_mutex.acquire()
if self.on_publish:
self._in_callback = True
self.on_publish(self, self._userdata, packet.mid)
self._in_callback = False
self._callback_mutex.release()
if (packet.command & 0xF0) == DISCONNECT:
self._current_out_packet_mutex.release()
self._msgtime_mutex.acquire()
self._last_msg_out = time.time()
self._msgtime_mutex.release()
self._callback_mutex.acquire()
if self.on_disconnect:
self._in_callback = True
self.on_disconnect(self, self._userdata, 0)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_SUCCESS
self._out_packet_mutex.acquire()
if len(self._out_packet) > 0:
self._current_out_packet = self._out_packet.pop(0)
else:
self._current_out_packet = None
self._out_packet_mutex.release()
else:
pass # FIXME
self._current_out_packet_mutex.release()
self._msgtime_mutex.acquire()
self._last_msg_out = time.time()
self._msgtime_mutex.release()
return MOSQ_ERR_SUCCESS
def _easy_log(self, level, buf):
if self.on_log:
self.on_log(self, self._userdata, level, buf)
def _check_keepalive(self):
now = time.time()
self._msgtime_mutex.acquire()
last_msg_out = self._last_msg_out
last_msg_in = self._last_msg_in
self._msgtime_mutex.release()
if (self._sock is not None or self._ssl is not None) and (now - last_msg_out >= self._keepalive or now - last_msg_in >= self._keepalive):
if self._state == mosq_cs_connected and self._ping_t == 0:
self._send_pingreq()
self._msgtime_mutex.acquire()
self._last_msg_out = now
self._last_msg_in = now
self._msgtime_mutex.release()
else:
if self._ssl:
self._ssl.close()
self._ssl = None
elif self._sock:
self._sock.close()
self._sock = None
if self._state == mosq_cs_disconnecting:
rc = MOSQ_ERR_SUCCESS
else:
rc = 1
self._callback_mutex.acquire()
if self.on_disconnect:
self._in_callback = True
self.on_disconnect(self, self._userdata, rc)
self._in_callback = False
self._callback_mutex.release()
def _mid_generate(self):
self._last_mid = self._last_mid + 1
if self._last_mid == 65536:
self._last_mid = 1
return self._last_mid
def _topic_wildcard_len_check(self, topic):
# Search for + or # in a topic. Return MOSQ_ERR_INVAL if found.
# Also returns MOSQ_ERR_INVAL if the topic string is too long.
# Returns MOSQ_ERR_SUCCESS if everything is fine.
if '+' in topic or '#' in topic or len(topic) == 0 or len(topic) > 65535:
return MOSQ_ERR_INVAL
else:
return MOSQ_ERR_SUCCESS
def _send_pingreq(self):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PINGREQ")
rc = self._send_simple_command(PINGREQ)
if rc == MOSQ_ERR_SUCCESS:
self._ping_t = time.time()
return rc
def _send_pingresp(self):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PINGRESP")
return self._send_simple_command(PINGRESP)
def _send_puback(self, mid):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBACK (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBACK, mid, False)
def _send_pubcomp(self, mid):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBCOMP (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBCOMP, mid, False)
def _pack_remaining_length(self, packet, remaining_length):
remaining_bytes = []
while True:
byte = remaining_length % 128
remaining_length = remaining_length // 128
# If there are more digits to encode, set the top bit of this digit
if remaining_length > 0:
byte = byte | 0x80
remaining_bytes.append(byte)
packet.extend(struct.pack("!B", byte))
if remaining_length == 0:
# FIXME - this doesn't deal with incorrectly large payloads
return packet
def _pack_str16(self, packet, data):
if sys.version_info[0] < 3:
if isinstance(data, bytearray):
packet.extend(struct.pack("!H", len(data)))
packet.extend(data)
elif isinstance(data, str):
pack_format = "!H" + str(len(data)) + "s"
packet.extend(struct.pack(pack_format, len(data), data))
elif isinstance(data, unicode):
udata = data.encode('utf-8')
pack_format = "!H" + str(len(udata)) + "s"
packet.extend(struct.pack(pack_format, len(udata), udata))
else:
raise TypeError
else:
if isinstance(data, bytearray):
packet.extend(struct.pack("!H", len(data)))
packet.extend(data)
elif isinstance(data, str):
udata = data.encode('utf-8')
pack_format = "!H" + str(len(udata)) + "s"
packet.extend(struct.pack(pack_format, len(udata), udata))
else:
raise TypeError
def _send_publish(self, mid, topic, payload=None, qos=0, retain=False, dup=False):
if self._sock is None and self._ssl is None:
return MOSQ_ERR_NO_CONN
command = PUBLISH | ((dup&0x1)<<3) | (qos<<1) | retain
packet = bytearray()
packet.extend(struct.pack("!B", command))
if payload is None:
remaining_length = 2+len(topic)
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBLISH (d"+str(dup)+", q"+str(qos)+", r"+str(int(retain))+", m"+str(mid)+", '"+topic+"' (NULL payload)")
else:
remaining_length = 2+len(topic) + len(payload)
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBLISH (d"+str(dup)+", q"+str(qos)+", r"+str(int(retain))+", m"+str(mid)+", '"+topic+"', ... ("+str(len(payload))+" bytes)")
if qos > 0:
# For message id
remaining_length = remaining_length + 2
self._pack_remaining_length(packet, remaining_length)
self._pack_str16(packet, topic)
if qos > 0:
# For message id
packet.extend(struct.pack("!H", mid))
if payload is not None:
if isinstance(payload, str):
if sys.version_info[0] < 3:
pack_format = str(len(payload)) + "s"
packet.extend(struct.pack(pack_format, payload))
else:
upayload = payload.encode('utf-8')
pack_format = str(len(upayload)) + "s"
packet.extend(struct.pack(pack_format, upayload))
elif isinstance(payload, bytearray):
packet.extend(payload)
elif isinstance(payload, unicode):
upayload = payload.encode('utf-8')
pack_format = str(len(upayload)) + "s"
packet.extend(struct.pack(pack_format, upayload))
else:
raise TypeError('payload must be a string, unicode or a bytearray.')
return self._packet_queue(PUBLISH, packet, mid, qos)
def _send_pubrec(self, mid):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBREC (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBREC, mid, False)
def _send_pubrel(self, mid, dup=False):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBREL (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBREL|2, mid, dup)
def _send_command_with_mid(self, command, mid, dup):
# For PUBACK, PUBCOMP, PUBREC, and PUBREL
if dup:
command = command | 8
remaining_length = 2
packet = struct.pack('!BBH', command, remaining_length, mid)
return self._packet_queue(command, packet, mid, 1)
def _send_simple_command(self, command):
# For DISCONNECT, PINGREQ and PINGRESP
remaining_length = 0
packet = struct.pack('!BB', command, remaining_length)
return self._packet_queue(command, packet, 0, 0)
def _send_connect(self, keepalive, clean_session):
remaining_length = 12 + 2+len(self._client_id)
connect_flags = 0
if clean_session:
connect_flags = connect_flags | 0x02
if self._will:
if self._will_payload is not None:
remaining_length = remaining_length + 2+len(self._will_topic) + 2+len(self._will_payload)
else:
remaining_length = remaining_length + 2+len(self._will_topic) + 2
connect_flags = connect_flags | 0x04 | ((self._will_qos&0x03) << 3) | ((self._will_retain&0x01) << 5)
if self._username:
remaining_length = remaining_length + 2+len(self._username)
connect_flags = connect_flags | 0x80
if self._password:
connect_flags = connect_flags | 0x40
remaining_length = remaining_length + 2+len(self._password)
command = CONNECT
packet = bytearray()
packet.extend(struct.pack("!B", command))
self._pack_remaining_length(packet, remaining_length)
packet.extend(struct.pack("!H6sBBH", len(PROTOCOL_NAME), PROTOCOL_NAME, PROTOCOL_VERSION, connect_flags, keepalive))
self._pack_str16(packet, self._client_id)
if self._will:
self._pack_str16(packet, self._will_topic)
if self._will_payload is None or len(self._will_payload) == 0:
packet.extend(struct.pack("!H", 0))
else:
self._pack_str16(packet, self._will_payload)
if self._username:
self._pack_str16(packet, self._username)
if self._password:
self._pack_str16(packet, self._password)
self._keepalive = keepalive
return self._packet_queue(command, packet, 0, 0)
def _send_disconnect(self):
return self._send_simple_command(DISCONNECT)
def _send_subscribe(self, dup, topics):
remaining_length = 2
for t in topics:
remaining_length = remaining_length + 2+len(t[0])+1
command = SUBSCRIBE | (dup<<3) | (1<<1)
packet = bytearray()
packet.extend(struct.pack("!B", command))
self._pack_remaining_length(packet, remaining_length)
local_mid = self._mid_generate()
packet.extend(struct.pack("!H", local_mid))
for t in topics:
self._pack_str16(packet, t[0])
packet.extend(struct.pack("B", t[1]))
return (self._packet_queue(command, packet, local_mid, 1), local_mid)
def _send_unsubscribe(self, dup, topics):
remaining_length = 2
for t in topics:
remaining_length = remaining_length + 2+len(t)
command = UNSUBSCRIBE | (dup<<3) | (1<<1)
packet = bytearray()
packet.extend(struct.pack("!B", command))
self._pack_remaining_length(packet, remaining_length)
local_mid = self._mid_generate()
packet.extend(struct.pack("!H", local_mid))
for t in topics:
self._pack_str16(packet, t)
return (self._packet_queue(command, packet, local_mid, 1), local_mid)
def _message_update(self, mid, direction, state):
self._message_mutex.acquire()
for m in self._messages:
if m.mid == mid and m.direction == direction:
m.state = state
m.timestamp = time.time()
self._message_mutex.release()
return MOSQ_ERR_SUCCESS
self._message_mutex.release()
return MOSQ_ERR_NOT_FOUND
def _message_retry_check(self):
self._message_mutex.acquire()
now = time.time()
for m in self._messages:
if m.timestamp + self._message_retry < now:
if m.state == mosq_ms_wait_puback or m.state == mosq_ms_wait_pubrec:
m.timestamp = now
m.dup = True
self._send_publish(m.mid, m.topic, m.payload, m.qos, m.retain, m.dup)
elif m.state == mosq_ms_wait_pubrel:
m.timestamp = now
m.dup = True
self._send_pubrec(m.mid)
elif m.state == mosq_ms_wait_pubcomp:
m.timestamp = now
m.dup = True
self._send_pubrel(m.mid, True)
self._message_mutex.release()
def _messages_reconnect_reset(self):
self._message_mutex.acquire()
for m in self._messages:
m.timestamp = 0
if m.direction == mosq_md_out:
if self._max_inflight_messages == 0 or self._inflight_messages < self._max_inflight_messages:
if m.qos == 1:
m.state = mosq_ms_wait_puback
elif m.qos == 2:
# Preserve current state
pass
else:
m.state = mosq_ms_invalid
else:
if m.qos != 2:
self._messages.pop(self._messages.index(m))
else:
# Preserve current state
pass
self._message_mutex.release()
def _packet_queue(self, command, packet, mid, qos):
mpkt = MosquittoPacket(command, packet, mid, qos)
self._out_packet_mutex.acquire()
self._out_packet.append(mpkt)
if self._current_out_packet_mutex.acquire(False):
if self._current_out_packet is None and len(self._out_packet) > 0:
self._current_out_packet = self._out_packet.pop(0)
self._current_out_packet_mutex.release()
self._out_packet_mutex.release()
if not self._in_callback and self._thread is None:
return self.loop_write()
else:
return MOSQ_ERR_SUCCESS
def _packet_handle(self):
cmd = self._in_packet.command&0xF0
if cmd == PINGREQ:
return self._handle_pingreq()
elif cmd == PINGRESP:
return self._handle_pingresp()
elif cmd == PUBACK:
return self._handle_pubackcomp("PUBACK")
elif cmd == PUBCOMP:
return self._handle_pubackcomp("PUBCOMP")
elif cmd == PUBLISH:
return self._handle_publish()
elif cmd == PUBREC:
return self._handle_pubrec()
elif cmd == PUBREL:
return self._handle_pubrel()
elif cmd == CONNACK:
return self._handle_connack()
elif cmd == SUBACK:
return self._handle_suback()
elif cmd == UNSUBACK:
return self._handle_unsuback()
else:
# If we don't recognise the command, return an error straight away.
self._easy_log(MOSQ_LOG_ERR, "Error: Unrecognised command "+str(cmd))
return MOSQ_ERR_PROTOCOL
def _handle_pingreq(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 0:
return MOSQ_ERR_PROTOCOL
self._easy_log(MOSQ_LOG_DEBUG, "Received PINGREQ")
return self._send_pingresp()
def _handle_pingresp(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 0:
return MOSQ_ERR_PROTOCOL
# No longer waiting for a PINGRESP.
self._ping_t = 0
self._easy_log(MOSQ_LOG_DEBUG, "Received PINGRESP")
return MOSQ_ERR_SUCCESS
def _handle_connack(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
if len(self._in_packet.packet) != 2:
return MOSQ_ERR_PROTOCOL
(resvd, result) = struct.unpack("!BB", self._in_packet.packet)
self._easy_log(MOSQ_LOG_DEBUG, "Received CONNACK ("+str(resvd)+", "+str(result)+")")
self._callback_mutex.acquire()
if self.on_connect:
self._in_callback = True
self.on_connect(self, self._userdata, result)
self._in_callback = False
self._callback_mutex.release()
if result == 0:
self._state = mosq_cs_connected
return MOSQ_ERR_SUCCESS
elif result > 0 and result < 6:
return MOSQ_ERR_CONN_REFUSED
else:
return MOSQ_ERR_PROTOCOL
def _handle_suback(self):
self._easy_log(MOSQ_LOG_DEBUG, "Received SUBACK")
pack_format = "!H" + str(len(self._in_packet.packet)-2) + 's'
(mid, packet) = struct.unpack(pack_format, self._in_packet.packet)
pack_format = "!" + "B"*len(packet)
granted_qos = struct.unpack(pack_format, packet)
self._callback_mutex.acquire()
if self.on_subscribe:
self._in_callback = True
self.on_subscribe(self, self._userdata, mid, granted_qos)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_SUCCESS
def _handle_publish(self):
rc = 0
header = self._in_packet.command
message = MosquittoMessage()
message.direction = mosq_md_in
message.dup = (header & 0x08)>>3
message.qos = (header & 0x06)>>1
message.retain = (header & 0x01)
pack_format = "!H" + str(len(self._in_packet.packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, self._in_packet.packet)
pack_format = '!' + str(slen) + 's' + str(len(packet)-slen) + 's'
(message.topic, packet) = struct.unpack(pack_format, packet)
if len(message.topic) == 0:
return MOSQ_ERR_PROTOCOL
if sys.version_info[0] >= 3:
message.topic = message.topic.decode('utf-8')
if message.qos > 0:
pack_format = "!H" + str(len(packet)-2) + 's'
(message.mid, packet) = struct.unpack(pack_format, packet)
message.payload = packet
self._easy_log(
MOSQ_LOG_DEBUG,
"Received PUBLISH (d"+str(message.dup)+
", q"+str(message.qos)+", r"+str(message.retain)+
", m"+str(message.mid)+", '"+message.topic+
"', ... ("+str(len(message.payload))+" bytes)")
message.timestamp = time.time()
if message.qos == 0:
self._callback_mutex.acquire()
if self.on_message:
self._in_callback = True
self.on_message(self, self._userdata, message)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_SUCCESS
elif message.qos == 1:
rc = self._send_puback(message.mid)
self._callback_mutex.acquire()
if self.on_message:
self._in_callback = True
self.on_message(self, self._userdata, message)
self._in_callback = False
self._callback_mutex.release()
return rc
elif message.qos == 2:
rc = self._send_pubrec(message.mid)
message.state = mosq_ms_wait_pubrel
self._message_mutex.acquire()
self._messages.append(message)
self._message_mutex.release()
return rc
else:
return MOSQ_ERR_PROTOCOL
def _handle_pubrel(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
if len(self._in_packet.packet) != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received PUBREL (Mid: "+str(mid)+")")
self._message_mutex.acquire()
for i in range(len(self._messages)):
if self._messages[i].direction == mosq_md_in and self._messages[i].mid == mid:
# Only pass the message on if we have removed it from the queue - this
# prevents multiple callbacks for the same message.
self._callback_mutex.acquire()
if self.on_message:
self._in_callback = True
self.on_message(self, self._userdata, self._messages[i])
self._in_callback = False
self._callback_mutex.release()
self._messages.pop(i)
self._inflight_messages = self._inflight_messages - 1
if self._max_inflight_messages > 0:
rc = self._update_inflight()
if rc != MOSQ_ERR_SUCCESS:
self._message_mutex.release()
return rc
self._message_mutex.release()
return self._send_pubcomp(mid)
self._message_mutex.release()
return MOSQ_ERR_SUCCESS
def _update_inflight(self):
# Dont lock message_mutex here
for m in self._messages:
if self._inflight_messages < self._max_inflight_messages:
if m.qos > 0 and m.state == mosq_ms_invalid and m.direction == mosq_md_out:
self._inflight_messages = self._inflight_messages + 1
if m.qos == 1:
m.state = mosq_ms_wait_puback
elif m.qos == 2:
m.state = mosq_ms_wait_pubrec
rc = self._send_publish(m.mid, m.topic, m.payload, m.qos, m.retain, m.dup)
if rc != 0:
return rc
else:
return MOSQ_ERR_SUCCESS
return MOSQ_ERR_SUCCESS
def _handle_pubrec(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received PUBREC (Mid: "+str(mid)+")")
self._message_mutex.acquire()
for m in self._messages:
if m.direction == mosq_md_out and m.mid == mid:
m.state = mosq_ms_wait_pubcomp
m.timestamp = time.time()
self._message_mutex.release()
return self._send_pubrel(mid, False)
self._message_mutex.release()
return MOSQ_ERR_SUCCESS
def _handle_unsuback(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received UNSUBACK (Mid: "+str(mid)+")")
self._callback_mutex.acquire()
if self.on_unsubscribe:
self._in_callback = True
self.on_unsubscribe(self, self._userdata, mid)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_SUCCESS
def _handle_pubackcomp(self, cmd):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received "+cmd+" (Mid: "+str(mid)+")")
self._message_mutex.acquire()
for i in range(len(self._messages)):
try:
if self._messages[i].direction == mosq_md_out and self._messages[i].mid == mid:
# Only inform the client the message has been sent once.
self._callback_mutex.acquire()
if self.on_publish:
self._in_callback = True
self.on_publish(self, self._userdata, mid)
self._in_callback = False
self._callback_mutex.release()
self._messages.pop(i)
self._inflight_messages = self._inflight_messages - 1
if self._max_inflight_messages > 0:
rc = self._update_inflight()
if rc != MOSQ_ERR_SUCCESS:
self._message_mutex.release()
return rc
self._message_mutex.release()
return MOSQ_ERR_SUCCESS
except IndexError:
# Have removed item so i>count.
# Not really an error.
pass
self._message_mutex.release()
return MOSQ_ERR_SUCCESS
def _thread_main(self):
self._thread_terminate = False
self._state_mutex.acquire()
if self._state == mosq_cs_connect_async:
self._state_mutex.release()
self.reconnect()
else:
self._state_mutex.release()
self.loop_forever()
def _tls_match_hostname(self):
cert = self._ssl.getpeercert()
san = cert.get('subjectAltName')
if san:
have_san_dns = False
for ((key, value),) in san:
if key == 'DNS':
have_san_dns = True
if value == self._host:
return
if key == 'IP Address':
have_san_dns = True
if value.lower() == self._host.lower():
return
if have_san_dns:
# Only check subject if subjectAltName dns not found.
raise ssl.SSLError('Certificate subject does not match remote hostname.')
subject = cert.get('subject')
if subject:
for ((key, value),) in subject:
if key == 'commonName':
if value.lower() == self._host.lower():
return
raise ssl.SSLError('Certificate subject does not match remote hostname.')
| mit |
littlstar/chromium.src | third_party/cython/src/Cython/Compiler/ParseTreeTransforms.py | 86 | 115877 | import cython
cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object,
Options=object, UtilNodes=object, LetNode=object,
LetRefNode=object, TreeFragment=object, EncodedString=object,
error=object, warning=object, copy=object)
import PyrexTypes
import Naming
import ExprNodes
import Nodes
import Options
import Builtin
from Cython.Compiler.Visitor import VisitorTransform, TreeVisitor
from Cython.Compiler.Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform
from Cython.Compiler.UtilNodes import LetNode, LetRefNode, ResultRefNode
from Cython.Compiler.TreeFragment import TreeFragment
from Cython.Compiler.StringEncoding import EncodedString
from Cython.Compiler.Errors import error, warning, CompileError, InternalError
from Cython.Compiler.Code import UtilityCode
import copy
class NameNodeCollector(TreeVisitor):
"""Collect all NameNodes of a (sub-)tree in the ``name_nodes``
attribute.
"""
def __init__(self):
super(NameNodeCollector, self).__init__()
self.name_nodes = []
def visit_NameNode(self, node):
self.name_nodes.append(node)
def visit_Node(self, node):
self._visitchildren(node, None)
class SkipDeclarations(object):
"""
Variable and function declarations can often have a deep tree structure,
and yet most transformations don't need to descend to this depth.
Declaration nodes are removed after AnalyseDeclarationsTransform, so there
is no need to use this for transformations after that point.
"""
def visit_CTypeDefNode(self, node):
return node
def visit_CVarDefNode(self, node):
return node
def visit_CDeclaratorNode(self, node):
return node
def visit_CBaseTypeNode(self, node):
return node
def visit_CEnumDefNode(self, node):
return node
def visit_CStructOrUnionDefNode(self, node):
return node
class NormalizeTree(CythonTransform):
"""
This transform fixes up a few things after parsing
in order to make the parse tree more suitable for
transforms.
a) After parsing, blocks with only one statement will
be represented by that statement, not by a StatListNode.
When doing transforms this is annoying and inconsistent,
as one cannot in general remove a statement in a consistent
way and so on. This transform wraps any single statements
in a StatListNode containing a single statement.
b) The PassStatNode is a noop and serves no purpose beyond
plugging such one-statement blocks; i.e., once parsed a
` "pass" can just as well be represented using an empty
StatListNode. This means less special cases to worry about
in subsequent transforms (one always checks to see if a
StatListNode has no children to see if the block is empty).
"""
def __init__(self, context):
super(NormalizeTree, self).__init__(context)
self.is_in_statlist = False
self.is_in_expr = False
def visit_ExprNode(self, node):
stacktmp = self.is_in_expr
self.is_in_expr = True
self.visitchildren(node)
self.is_in_expr = stacktmp
return node
def visit_StatNode(self, node, is_listcontainer=False):
stacktmp = self.is_in_statlist
self.is_in_statlist = is_listcontainer
self.visitchildren(node)
self.is_in_statlist = stacktmp
if not self.is_in_statlist and not self.is_in_expr:
return Nodes.StatListNode(pos=node.pos, stats=[node])
else:
return node
def visit_StatListNode(self, node):
self.is_in_statlist = True
self.visitchildren(node)
self.is_in_statlist = False
return node
def visit_ParallelAssignmentNode(self, node):
return self.visit_StatNode(node, True)
def visit_CEnumDefNode(self, node):
return self.visit_StatNode(node, True)
def visit_CStructOrUnionDefNode(self, node):
return self.visit_StatNode(node, True)
def visit_PassStatNode(self, node):
"""Eliminate PassStatNode"""
if not self.is_in_statlist:
return Nodes.StatListNode(pos=node.pos, stats=[])
else:
return []
def visit_ExprStatNode(self, node):
"""Eliminate useless string literals"""
if node.expr.is_string_literal:
return self.visit_PassStatNode(node)
else:
return self.visit_StatNode(node)
def visit_CDeclaratorNode(self, node):
return node
class PostParseError(CompileError): pass
# error strings checked by unit tests, so define them
ERR_CDEF_INCLASS = 'Cannot assign default value to fields in cdef classes, structs or unions'
ERR_BUF_DEFAULTS = 'Invalid buffer defaults specification (see docs)'
ERR_INVALID_SPECIALATTR_TYPE = 'Special attributes must not have a type declared'
class PostParse(ScopeTrackingTransform):
"""
Basic interpretation of the parse tree, as well as validity
checking that can be done on a very basic level on the parse
tree (while still not being a problem with the basic syntax,
as such).
Specifically:
- Default values to cdef assignments are turned into single
assignments following the declaration (everywhere but in class
bodies, where they raise a compile error)
- Interpret some node structures into Python runtime values.
Some nodes take compile-time arguments (currently:
TemplatedTypeNode[args] and __cythonbufferdefaults__ = {args}),
which should be interpreted. This happens in a general way
and other steps should be taken to ensure validity.
Type arguments cannot be interpreted in this way.
- For __cythonbufferdefaults__ the arguments are checked for
validity.
TemplatedTypeNode has its directives interpreted:
Any first positional argument goes into the "dtype" attribute,
any "ndim" keyword argument goes into the "ndim" attribute and
so on. Also it is checked that the directive combination is valid.
- __cythonbufferdefaults__ attributes are parsed and put into the
type information.
Note: Currently Parsing.py does a lot of interpretation and
reorganization that can be refactored into this transform
if a more pure Abstract Syntax Tree is wanted.
"""
def __init__(self, context):
super(PostParse, self).__init__(context)
self.specialattribute_handlers = {
'__cythonbufferdefaults__' : self.handle_bufferdefaults
}
def visit_ModuleNode(self, node):
self.lambda_counter = 1
self.genexpr_counter = 1
return super(PostParse, self).visit_ModuleNode(node)
def visit_LambdaNode(self, node):
# unpack a lambda expression into the corresponding DefNode
lambda_id = self.lambda_counter
self.lambda_counter += 1
node.lambda_name = EncodedString(u'lambda%d' % lambda_id)
collector = YieldNodeCollector()
collector.visitchildren(node.result_expr)
if collector.yields or isinstance(node.result_expr, ExprNodes.YieldExprNode):
body = Nodes.ExprStatNode(
node.result_expr.pos, expr=node.result_expr)
else:
body = Nodes.ReturnStatNode(
node.result_expr.pos, value=node.result_expr)
node.def_node = Nodes.DefNode(
node.pos, name=node.name, lambda_name=node.lambda_name,
args=node.args, star_arg=node.star_arg,
starstar_arg=node.starstar_arg,
body=body, doc=None)
self.visitchildren(node)
return node
def visit_GeneratorExpressionNode(self, node):
# unpack a generator expression into the corresponding DefNode
genexpr_id = self.genexpr_counter
self.genexpr_counter += 1
node.genexpr_name = EncodedString(u'genexpr%d' % genexpr_id)
node.def_node = Nodes.DefNode(node.pos, name=node.name,
doc=None,
args=[], star_arg=None,
starstar_arg=None,
body=node.loop)
self.visitchildren(node)
return node
# cdef variables
def handle_bufferdefaults(self, decl):
if not isinstance(decl.default, ExprNodes.DictNode):
raise PostParseError(decl.pos, ERR_BUF_DEFAULTS)
self.scope_node.buffer_defaults_node = decl.default
self.scope_node.buffer_defaults_pos = decl.pos
def visit_CVarDefNode(self, node):
# This assumes only plain names and pointers are assignable on
# declaration. Also, it makes use of the fact that a cdef decl
# must appear before the first use, so we don't have to deal with
# "i = 3; cdef int i = i" and can simply move the nodes around.
try:
self.visitchildren(node)
stats = [node]
newdecls = []
for decl in node.declarators:
declbase = decl
while isinstance(declbase, Nodes.CPtrDeclaratorNode):
declbase = declbase.base
if isinstance(declbase, Nodes.CNameDeclaratorNode):
if declbase.default is not None:
if self.scope_type in ('cclass', 'pyclass', 'struct'):
if isinstance(self.scope_node, Nodes.CClassDefNode):
handler = self.specialattribute_handlers.get(decl.name)
if handler:
if decl is not declbase:
raise PostParseError(decl.pos, ERR_INVALID_SPECIALATTR_TYPE)
handler(decl)
continue # Remove declaration
raise PostParseError(decl.pos, ERR_CDEF_INCLASS)
first_assignment = self.scope_type != 'module'
stats.append(Nodes.SingleAssignmentNode(node.pos,
lhs=ExprNodes.NameNode(node.pos, name=declbase.name),
rhs=declbase.default, first=first_assignment))
declbase.default = None
newdecls.append(decl)
node.declarators = newdecls
return stats
except PostParseError, e:
# An error in a cdef clause is ok, simply remove the declaration
# and try to move on to report more errors
self.context.nonfatal_error(e)
return None
# Split parallel assignments (a,b = b,a) into separate partial
# assignments that are executed rhs-first using temps. This
# restructuring must be applied before type analysis so that known
# types on rhs and lhs can be matched directly. It is required in
# the case that the types cannot be coerced to a Python type in
# order to assign from a tuple.
def visit_SingleAssignmentNode(self, node):
self.visitchildren(node)
return self._visit_assignment_node(node, [node.lhs, node.rhs])
def visit_CascadedAssignmentNode(self, node):
self.visitchildren(node)
return self._visit_assignment_node(node, node.lhs_list + [node.rhs])
def _visit_assignment_node(self, node, expr_list):
"""Flatten parallel assignments into separate single
assignments or cascaded assignments.
"""
if sum([ 1 for expr in expr_list
if expr.is_sequence_constructor or expr.is_string_literal ]) < 2:
# no parallel assignments => nothing to do
return node
expr_list_list = []
flatten_parallel_assignments(expr_list, expr_list_list)
temp_refs = []
eliminate_rhs_duplicates(expr_list_list, temp_refs)
nodes = []
for expr_list in expr_list_list:
lhs_list = expr_list[:-1]
rhs = expr_list[-1]
if len(lhs_list) == 1:
node = Nodes.SingleAssignmentNode(rhs.pos,
lhs = lhs_list[0], rhs = rhs)
else:
node = Nodes.CascadedAssignmentNode(rhs.pos,
lhs_list = lhs_list, rhs = rhs)
nodes.append(node)
if len(nodes) == 1:
assign_node = nodes[0]
else:
assign_node = Nodes.ParallelAssignmentNode(nodes[0].pos, stats = nodes)
if temp_refs:
duplicates_and_temps = [ (temp.expression, temp)
for temp in temp_refs ]
sort_common_subsequences(duplicates_and_temps)
for _, temp_ref in duplicates_and_temps[::-1]:
assign_node = LetNode(temp_ref, assign_node)
return assign_node
def _flatten_sequence(self, seq, result):
for arg in seq.args:
if arg.is_sequence_constructor:
self._flatten_sequence(arg, result)
else:
result.append(arg)
return result
def visit_DelStatNode(self, node):
self.visitchildren(node)
node.args = self._flatten_sequence(node, [])
return node
def visit_ExceptClauseNode(self, node):
if node.is_except_as:
# except-as must delete NameNode target at the end
del_target = Nodes.DelStatNode(
node.pos,
args=[ExprNodes.NameNode(
node.target.pos, name=node.target.name)],
ignore_nonexisting=True)
node.body = Nodes.StatListNode(
node.pos,
stats=[Nodes.TryFinallyStatNode(
node.pos,
body=node.body,
finally_clause=Nodes.StatListNode(
node.pos,
stats=[del_target]))])
self.visitchildren(node)
return node
def eliminate_rhs_duplicates(expr_list_list, ref_node_sequence):
"""Replace rhs items by LetRefNodes if they appear more than once.
Creates a sequence of LetRefNodes that set up the required temps
and appends them to ref_node_sequence. The input list is modified
in-place.
"""
seen_nodes = set()
ref_nodes = {}
def find_duplicates(node):
if node.is_literal or node.is_name:
# no need to replace those; can't include attributes here
# as their access is not necessarily side-effect free
return
if node in seen_nodes:
if node not in ref_nodes:
ref_node = LetRefNode(node)
ref_nodes[node] = ref_node
ref_node_sequence.append(ref_node)
else:
seen_nodes.add(node)
if node.is_sequence_constructor:
for item in node.args:
find_duplicates(item)
for expr_list in expr_list_list:
rhs = expr_list[-1]
find_duplicates(rhs)
if not ref_nodes:
return
def substitute_nodes(node):
if node in ref_nodes:
return ref_nodes[node]
elif node.is_sequence_constructor:
node.args = list(map(substitute_nodes, node.args))
return node
# replace nodes inside of the common subexpressions
for node in ref_nodes:
if node.is_sequence_constructor:
node.args = list(map(substitute_nodes, node.args))
# replace common subexpressions on all rhs items
for expr_list in expr_list_list:
expr_list[-1] = substitute_nodes(expr_list[-1])
def sort_common_subsequences(items):
"""Sort items/subsequences so that all items and subsequences that
an item contains appear before the item itself. This is needed
because each rhs item must only be evaluated once, so its value
must be evaluated first and then reused when packing sequences
that contain it.
This implies a partial order, and the sort must be stable to
preserve the original order as much as possible, so we use a
simple insertion sort (which is very fast for short sequences, the
normal case in practice).
"""
def contains(seq, x):
for item in seq:
if item is x:
return True
elif item.is_sequence_constructor and contains(item.args, x):
return True
return False
def lower_than(a,b):
return b.is_sequence_constructor and contains(b.args, a)
for pos, item in enumerate(items):
key = item[1] # the ResultRefNode which has already been injected into the sequences
new_pos = pos
for i in xrange(pos-1, -1, -1):
if lower_than(key, items[i][0]):
new_pos = i
if new_pos != pos:
for i in xrange(pos, new_pos, -1):
items[i] = items[i-1]
items[new_pos] = item
def unpack_string_to_character_literals(literal):
chars = []
pos = literal.pos
stype = literal.__class__
sval = literal.value
sval_type = sval.__class__
for char in sval:
cval = sval_type(char)
chars.append(stype(pos, value=cval, constant_result=cval))
return chars
def flatten_parallel_assignments(input, output):
# The input is a list of expression nodes, representing the LHSs
# and RHS of one (possibly cascaded) assignment statement. For
# sequence constructors, rearranges the matching parts of both
# sides into a list of equivalent assignments between the
# individual elements. This transformation is applied
# recursively, so that nested structures get matched as well.
rhs = input[-1]
if (not (rhs.is_sequence_constructor or isinstance(rhs, ExprNodes.UnicodeNode))
or not sum([lhs.is_sequence_constructor for lhs in input[:-1]])):
output.append(input)
return
complete_assignments = []
if rhs.is_sequence_constructor:
rhs_args = rhs.args
elif rhs.is_string_literal:
rhs_args = unpack_string_to_character_literals(rhs)
rhs_size = len(rhs_args)
lhs_targets = [ [] for _ in xrange(rhs_size) ]
starred_assignments = []
for lhs in input[:-1]:
if not lhs.is_sequence_constructor:
if lhs.is_starred:
error(lhs.pos, "starred assignment target must be in a list or tuple")
complete_assignments.append(lhs)
continue
lhs_size = len(lhs.args)
starred_targets = sum([1 for expr in lhs.args if expr.is_starred])
if starred_targets > 1:
error(lhs.pos, "more than 1 starred expression in assignment")
output.append([lhs,rhs])
continue
elif lhs_size - starred_targets > rhs_size:
error(lhs.pos, "need more than %d value%s to unpack"
% (rhs_size, (rhs_size != 1) and 's' or ''))
output.append([lhs,rhs])
continue
elif starred_targets:
map_starred_assignment(lhs_targets, starred_assignments,
lhs.args, rhs_args)
elif lhs_size < rhs_size:
error(lhs.pos, "too many values to unpack (expected %d, got %d)"
% (lhs_size, rhs_size))
output.append([lhs,rhs])
continue
else:
for targets, expr in zip(lhs_targets, lhs.args):
targets.append(expr)
if complete_assignments:
complete_assignments.append(rhs)
output.append(complete_assignments)
# recursively flatten partial assignments
for cascade, rhs in zip(lhs_targets, rhs_args):
if cascade:
cascade.append(rhs)
flatten_parallel_assignments(cascade, output)
# recursively flatten starred assignments
for cascade in starred_assignments:
if cascade[0].is_sequence_constructor:
flatten_parallel_assignments(cascade, output)
else:
output.append(cascade)
def map_starred_assignment(lhs_targets, starred_assignments, lhs_args, rhs_args):
# Appends the fixed-position LHS targets to the target list that
# appear left and right of the starred argument.
#
# The starred_assignments list receives a new tuple
# (lhs_target, rhs_values_list) that maps the remaining arguments
# (those that match the starred target) to a list.
# left side of the starred target
for i, (targets, expr) in enumerate(zip(lhs_targets, lhs_args)):
if expr.is_starred:
starred = i
lhs_remaining = len(lhs_args) - i - 1
break
targets.append(expr)
else:
raise InternalError("no starred arg found when splitting starred assignment")
# right side of the starred target
for i, (targets, expr) in enumerate(zip(lhs_targets[-lhs_remaining:],
lhs_args[starred + 1:])):
targets.append(expr)
# the starred target itself, must be assigned a (potentially empty) list
target = lhs_args[starred].target # unpack starred node
starred_rhs = rhs_args[starred:]
if lhs_remaining:
starred_rhs = starred_rhs[:-lhs_remaining]
if starred_rhs:
pos = starred_rhs[0].pos
else:
pos = target.pos
starred_assignments.append([
target, ExprNodes.ListNode(pos=pos, args=starred_rhs)])
class PxdPostParse(CythonTransform, SkipDeclarations):
"""
Basic interpretation/validity checking that should only be
done on pxd trees.
A lot of this checking currently happens in the parser; but
what is listed below happens here.
- "def" functions are let through only if they fill the
getbuffer/releasebuffer slots
- cdef functions are let through only if they are on the
top level and are declared "inline"
"""
ERR_INLINE_ONLY = "function definition in pxd file must be declared 'cdef inline'"
ERR_NOGO_WITH_INLINE = "inline function definition in pxd file cannot be '%s'"
def __call__(self, node):
self.scope_type = 'pxd'
return super(PxdPostParse, self).__call__(node)
def visit_CClassDefNode(self, node):
old = self.scope_type
self.scope_type = 'cclass'
self.visitchildren(node)
self.scope_type = old
return node
def visit_FuncDefNode(self, node):
# FuncDefNode always come with an implementation (without
# an imp they are CVarDefNodes..)
err = self.ERR_INLINE_ONLY
if (isinstance(node, Nodes.DefNode) and self.scope_type == 'cclass'
and node.name in ('__getbuffer__', '__releasebuffer__')):
err = None # allow these slots
if isinstance(node, Nodes.CFuncDefNode):
if (u'inline' in node.modifiers and
self.scope_type in ('pxd', 'cclass')):
node.inline_in_pxd = True
if node.visibility != 'private':
err = self.ERR_NOGO_WITH_INLINE % node.visibility
elif node.api:
err = self.ERR_NOGO_WITH_INLINE % 'api'
else:
err = None # allow inline function
else:
err = self.ERR_INLINE_ONLY
if err:
self.context.nonfatal_error(PostParseError(node.pos, err))
return None
else:
return node
class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
"""
After parsing, directives can be stored in a number of places:
- #cython-comments at the top of the file (stored in ModuleNode)
- Command-line arguments overriding these
- @cython.directivename decorators
- with cython.directivename: statements
This transform is responsible for interpreting these various sources
and store the directive in two ways:
- Set the directives attribute of the ModuleNode for global directives.
- Use a CompilerDirectivesNode to override directives for a subtree.
(The first one is primarily to not have to modify with the tree
structure, so that ModuleNode stay on top.)
The directives are stored in dictionaries from name to value in effect.
Each such dictionary is always filled in for all possible directives,
using default values where no value is given by the user.
The available directives are controlled in Options.py.
Note that we have to run this prior to analysis, and so some minor
duplication of functionality has to occur: We manually track cimports
and which names the "cython" module may have been imported to.
"""
unop_method_nodes = {
'typeof': ExprNodes.TypeofNode,
'operator.address': ExprNodes.AmpersandNode,
'operator.dereference': ExprNodes.DereferenceNode,
'operator.preincrement' : ExprNodes.inc_dec_constructor(True, '++'),
'operator.predecrement' : ExprNodes.inc_dec_constructor(True, '--'),
'operator.postincrement': ExprNodes.inc_dec_constructor(False, '++'),
'operator.postdecrement': ExprNodes.inc_dec_constructor(False, '--'),
# For backwards compatability.
'address': ExprNodes.AmpersandNode,
}
binop_method_nodes = {
'operator.comma' : ExprNodes.c_binop_constructor(','),
}
special_methods = set(['declare', 'union', 'struct', 'typedef',
'sizeof', 'cast', 'pointer', 'compiled',
'NULL', 'fused_type', 'parallel'])
special_methods.update(unop_method_nodes.keys())
valid_parallel_directives = set([
"parallel",
"prange",
"threadid",
# "threadsavailable",
])
def __init__(self, context, compilation_directive_defaults):
super(InterpretCompilerDirectives, self).__init__(context)
self.compilation_directive_defaults = {}
for key, value in compilation_directive_defaults.items():
self.compilation_directive_defaults[unicode(key)] = copy.deepcopy(value)
self.cython_module_names = set()
self.directive_names = {}
self.parallel_directives = {}
def check_directive_scope(self, pos, directive, scope):
legal_scopes = Options.directive_scopes.get(directive, None)
if legal_scopes and scope not in legal_scopes:
self.context.nonfatal_error(PostParseError(pos, 'The %s compiler directive '
'is not allowed in %s scope' % (directive, scope)))
return False
else:
if (directive not in Options.directive_defaults
and directive not in Options.directive_types):
error(pos, "Invalid directive: '%s'." % (directive,))
return True
# Set up processing and handle the cython: comments.
def visit_ModuleNode(self, node):
for key, value in node.directive_comments.items():
if not self.check_directive_scope(node.pos, key, 'module'):
self.wrong_scope_error(node.pos, key, 'module')
del node.directive_comments[key]
self.module_scope = node.scope
directives = copy.deepcopy(Options.directive_defaults)
directives.update(copy.deepcopy(self.compilation_directive_defaults))
directives.update(node.directive_comments)
self.directives = directives
node.directives = directives
node.parallel_directives = self.parallel_directives
self.visitchildren(node)
node.cython_module_names = self.cython_module_names
return node
# The following four functions track imports and cimports that
# begin with "cython"
def is_cython_directive(self, name):
return (name in Options.directive_types or
name in self.special_methods or
PyrexTypes.parse_basic_type(name))
def is_parallel_directive(self, full_name, pos):
"""
Checks to see if fullname (e.g. cython.parallel.prange) is a valid
parallel directive. If it is a star import it also updates the
parallel_directives.
"""
result = (full_name + ".").startswith("cython.parallel.")
if result:
directive = full_name.split('.')
if full_name == u"cython.parallel":
self.parallel_directives[u"parallel"] = u"cython.parallel"
elif full_name == u"cython.parallel.*":
for name in self.valid_parallel_directives:
self.parallel_directives[name] = u"cython.parallel.%s" % name
elif (len(directive) != 3 or
directive[-1] not in self.valid_parallel_directives):
error(pos, "No such directive: %s" % full_name)
self.module_scope.use_utility_code(
UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c"))
return result
def visit_CImportStatNode(self, node):
if node.module_name == u"cython":
self.cython_module_names.add(node.as_name or u"cython")
elif node.module_name.startswith(u"cython."):
if node.module_name.startswith(u"cython.parallel."):
error(node.pos, node.module_name + " is not a module")
if node.module_name == u"cython.parallel":
if node.as_name and node.as_name != u"cython":
self.parallel_directives[node.as_name] = node.module_name
else:
self.cython_module_names.add(u"cython")
self.parallel_directives[
u"cython.parallel"] = node.module_name
self.module_scope.use_utility_code(
UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c"))
elif node.as_name:
self.directive_names[node.as_name] = node.module_name[7:]
else:
self.cython_module_names.add(u"cython")
# if this cimport was a compiler directive, we don't
# want to leave the cimport node sitting in the tree
return None
return node
def visit_FromCImportStatNode(self, node):
if (node.module_name == u"cython") or \
node.module_name.startswith(u"cython."):
submodule = (node.module_name + u".")[7:]
newimp = []
for pos, name, as_name, kind in node.imported_names:
full_name = submodule + name
qualified_name = u"cython." + full_name
if self.is_parallel_directive(qualified_name, node.pos):
# from cython cimport parallel, or
# from cython.parallel cimport parallel, prange, ...
self.parallel_directives[as_name or name] = qualified_name
elif self.is_cython_directive(full_name):
if as_name is None:
as_name = full_name
self.directive_names[as_name] = full_name
if kind is not None:
self.context.nonfatal_error(PostParseError(pos,
"Compiler directive imports must be plain imports"))
else:
newimp.append((pos, name, as_name, kind))
if not newimp:
return None
node.imported_names = newimp
return node
def visit_FromImportStatNode(self, node):
if (node.module.module_name.value == u"cython") or \
node.module.module_name.value.startswith(u"cython."):
submodule = (node.module.module_name.value + u".")[7:]
newimp = []
for name, name_node in node.items:
full_name = submodule + name
qualified_name = u"cython." + full_name
if self.is_parallel_directive(qualified_name, node.pos):
self.parallel_directives[name_node.name] = qualified_name
elif self.is_cython_directive(full_name):
self.directive_names[name_node.name] = full_name
else:
newimp.append((name, name_node))
if not newimp:
return None
node.items = newimp
return node
def visit_SingleAssignmentNode(self, node):
if isinstance(node.rhs, ExprNodes.ImportNode):
module_name = node.rhs.module_name.value
is_parallel = (module_name + u".").startswith(u"cython.parallel.")
if module_name != u"cython" and not is_parallel:
return node
module_name = node.rhs.module_name.value
as_name = node.lhs.name
node = Nodes.CImportStatNode(node.pos,
module_name = module_name,
as_name = as_name)
node = self.visit_CImportStatNode(node)
else:
self.visitchildren(node)
return node
def visit_NameNode(self, node):
if node.name in self.cython_module_names:
node.is_cython_module = True
else:
node.cython_attribute = self.directive_names.get(node.name)
return node
def try_to_parse_directives(self, node):
# If node is the contents of an directive (in a with statement or
# decorator), returns a list of (directivename, value) pairs.
# Otherwise, returns None
if isinstance(node, ExprNodes.CallNode):
self.visit(node.function)
optname = node.function.as_cython_attribute()
if optname:
directivetype = Options.directive_types.get(optname)
if directivetype:
args, kwds = node.explicit_args_kwds()
directives = []
key_value_pairs = []
if kwds is not None and directivetype is not dict:
for keyvalue in kwds.key_value_pairs:
key, value = keyvalue
sub_optname = "%s.%s" % (optname, key.value)
if Options.directive_types.get(sub_optname):
directives.append(self.try_to_parse_directive(sub_optname, [value], None, keyvalue.pos))
else:
key_value_pairs.append(keyvalue)
if not key_value_pairs:
kwds = None
else:
kwds.key_value_pairs = key_value_pairs
if directives and not kwds and not args:
return directives
directives.append(self.try_to_parse_directive(optname, args, kwds, node.function.pos))
return directives
elif isinstance(node, (ExprNodes.AttributeNode, ExprNodes.NameNode)):
self.visit(node)
optname = node.as_cython_attribute()
if optname:
directivetype = Options.directive_types.get(optname)
if directivetype is bool:
return [(optname, True)]
elif directivetype is None:
return [(optname, None)]
else:
raise PostParseError(
node.pos, "The '%s' directive should be used as a function call." % optname)
return None
def try_to_parse_directive(self, optname, args, kwds, pos):
directivetype = Options.directive_types.get(optname)
if len(args) == 1 and isinstance(args[0], ExprNodes.NoneNode):
return optname, Options.directive_defaults[optname]
elif directivetype is bool:
if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.BoolNode):
raise PostParseError(pos,
'The %s directive takes one compile-time boolean argument' % optname)
return (optname, args[0].value)
elif directivetype is int:
if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.IntNode):
raise PostParseError(pos,
'The %s directive takes one compile-time integer argument' % optname)
return (optname, int(args[0].value))
elif directivetype is str:
if kwds is not None or len(args) != 1 or not isinstance(
args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)):
raise PostParseError(pos,
'The %s directive takes one compile-time string argument' % optname)
return (optname, str(args[0].value))
elif directivetype is type:
if kwds is not None or len(args) != 1:
raise PostParseError(pos,
'The %s directive takes one type argument' % optname)
return (optname, args[0])
elif directivetype is dict:
if len(args) != 0:
raise PostParseError(pos,
'The %s directive takes no prepositional arguments' % optname)
return optname, dict([(key.value, value) for key, value in kwds.key_value_pairs])
elif directivetype is list:
if kwds and len(kwds) != 0:
raise PostParseError(pos,
'The %s directive takes no keyword arguments' % optname)
return optname, [ str(arg.value) for arg in args ]
elif callable(directivetype):
if kwds is not None or len(args) != 1 or not isinstance(
args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)):
raise PostParseError(pos,
'The %s directive takes one compile-time string argument' % optname)
return (optname, directivetype(optname, str(args[0].value)))
else:
assert False
def visit_with_directives(self, body, directives):
olddirectives = self.directives
newdirectives = copy.copy(olddirectives)
newdirectives.update(directives)
self.directives = newdirectives
assert isinstance(body, Nodes.StatListNode), body
retbody = self.visit_Node(body)
directive = Nodes.CompilerDirectivesNode(pos=retbody.pos, body=retbody,
directives=newdirectives)
self.directives = olddirectives
return directive
# Handle decorators
def visit_FuncDefNode(self, node):
directives = self._extract_directives(node, 'function')
if not directives:
return self.visit_Node(node)
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def visit_CVarDefNode(self, node):
directives = self._extract_directives(node, 'function')
if not directives:
return node
for name, value in directives.iteritems():
if name == 'locals':
node.directive_locals = value
elif name != 'final':
self.context.nonfatal_error(PostParseError(
node.pos,
"Cdef functions can only take cython.locals() "
"or final decorators, got %s." % name))
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def visit_CClassDefNode(self, node):
directives = self._extract_directives(node, 'cclass')
if not directives:
return self.visit_Node(node)
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def visit_PyClassDefNode(self, node):
directives = self._extract_directives(node, 'class')
if not directives:
return self.visit_Node(node)
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def _extract_directives(self, node, scope_name):
if not node.decorators:
return {}
# Split the decorators into two lists -- real decorators and directives
directives = []
realdecs = []
for dec in node.decorators:
new_directives = self.try_to_parse_directives(dec.decorator)
if new_directives is not None:
for directive in new_directives:
if self.check_directive_scope(node.pos, directive[0], scope_name):
directives.append(directive)
else:
realdecs.append(dec)
if realdecs and isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode)):
raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.")
else:
node.decorators = realdecs
# merge or override repeated directives
optdict = {}
directives.reverse() # Decorators coming first take precedence
for directive in directives:
name, value = directive
if name in optdict:
old_value = optdict[name]
# keywords and arg lists can be merged, everything
# else overrides completely
if isinstance(old_value, dict):
old_value.update(value)
elif isinstance(old_value, list):
old_value.extend(value)
else:
optdict[name] = value
else:
optdict[name] = value
return optdict
# Handle with statements
def visit_WithStatNode(self, node):
directive_dict = {}
for directive in self.try_to_parse_directives(node.manager) or []:
if directive is not None:
if node.target is not None:
self.context.nonfatal_error(
PostParseError(node.pos, "Compiler directive with statements cannot contain 'as'"))
else:
name, value = directive
if name in ('nogil', 'gil'):
# special case: in pure mode, "with nogil" spells "with cython.nogil"
node = Nodes.GILStatNode(node.pos, state = name, body = node.body)
return self.visit_Node(node)
if self.check_directive_scope(node.pos, name, 'with statement'):
directive_dict[name] = value
if directive_dict:
return self.visit_with_directives(node.body, directive_dict)
return self.visit_Node(node)
class ParallelRangeTransform(CythonTransform, SkipDeclarations):
"""
Transform cython.parallel stuff. The parallel_directives come from the
module node, set there by InterpretCompilerDirectives.
x = cython.parallel.threadavailable() -> ParallelThreadAvailableNode
with nogil, cython.parallel.parallel(): -> ParallelWithBlockNode
print cython.parallel.threadid() -> ParallelThreadIdNode
for i in cython.parallel.prange(...): -> ParallelRangeNode
...
"""
# a list of names, maps 'cython.parallel.prange' in the code to
# ['cython', 'parallel', 'prange']
parallel_directive = None
# Indicates whether a namenode in an expression is the cython module
namenode_is_cython_module = False
# Keep track of whether we are the context manager of a 'with' statement
in_context_manager_section = False
# One of 'prange' or 'with parallel'. This is used to disallow closely
# nested 'with parallel:' blocks
state = None
directive_to_node = {
u"cython.parallel.parallel": Nodes.ParallelWithBlockNode,
# u"cython.parallel.threadsavailable": ExprNodes.ParallelThreadsAvailableNode,
u"cython.parallel.threadid": ExprNodes.ParallelThreadIdNode,
u"cython.parallel.prange": Nodes.ParallelRangeNode,
}
def node_is_parallel_directive(self, node):
return node.name in self.parallel_directives or node.is_cython_module
def get_directive_class_node(self, node):
"""
Figure out which parallel directive was used and return the associated
Node class.
E.g. for a cython.parallel.prange() call we return ParallelRangeNode
"""
if self.namenode_is_cython_module:
directive = '.'.join(self.parallel_directive)
else:
directive = self.parallel_directives[self.parallel_directive[0]]
directive = '%s.%s' % (directive,
'.'.join(self.parallel_directive[1:]))
directive = directive.rstrip('.')
cls = self.directive_to_node.get(directive)
if cls is None and not (self.namenode_is_cython_module and
self.parallel_directive[0] != 'parallel'):
error(node.pos, "Invalid directive: %s" % directive)
self.namenode_is_cython_module = False
self.parallel_directive = None
return cls
def visit_ModuleNode(self, node):
"""
If any parallel directives were imported, copy them over and visit
the AST
"""
if node.parallel_directives:
self.parallel_directives = node.parallel_directives
return self.visit_Node(node)
# No parallel directives were imported, so they can't be used :)
return node
def visit_NameNode(self, node):
if self.node_is_parallel_directive(node):
self.parallel_directive = [node.name]
self.namenode_is_cython_module = node.is_cython_module
return node
def visit_AttributeNode(self, node):
self.visitchildren(node)
if self.parallel_directive:
self.parallel_directive.append(node.attribute)
return node
def visit_CallNode(self, node):
self.visit(node.function)
if not self.parallel_directive:
return node
# We are a parallel directive, replace this node with the
# corresponding ParallelSomethingSomething node
if isinstance(node, ExprNodes.GeneralCallNode):
args = node.positional_args.args
kwargs = node.keyword_args
else:
args = node.args
kwargs = {}
parallel_directive_class = self.get_directive_class_node(node)
if parallel_directive_class:
# Note: in case of a parallel() the body is set by
# visit_WithStatNode
node = parallel_directive_class(node.pos, args=args, kwargs=kwargs)
return node
def visit_WithStatNode(self, node):
"Rewrite with cython.parallel.parallel() blocks"
newnode = self.visit(node.manager)
if isinstance(newnode, Nodes.ParallelWithBlockNode):
if self.state == 'parallel with':
error(node.manager.pos,
"Nested parallel with blocks are disallowed")
self.state = 'parallel with'
body = self.visit(node.body)
self.state = None
newnode.body = body
return newnode
elif self.parallel_directive:
parallel_directive_class = self.get_directive_class_node(node)
if not parallel_directive_class:
# There was an error, stop here and now
return None
if parallel_directive_class is Nodes.ParallelWithBlockNode:
error(node.pos, "The parallel directive must be called")
return None
node.body = self.visit(node.body)
return node
def visit_ForInStatNode(self, node):
"Rewrite 'for i in cython.parallel.prange(...):'"
self.visit(node.iterator)
self.visit(node.target)
in_prange = isinstance(node.iterator.sequence,
Nodes.ParallelRangeNode)
previous_state = self.state
if in_prange:
# This will replace the entire ForInStatNode, so copy the
# attributes
parallel_range_node = node.iterator.sequence
parallel_range_node.target = node.target
parallel_range_node.body = node.body
parallel_range_node.else_clause = node.else_clause
node = parallel_range_node
if not isinstance(node.target, ExprNodes.NameNode):
error(node.target.pos,
"Can only iterate over an iteration variable")
self.state = 'prange'
self.visit(node.body)
self.state = previous_state
self.visit(node.else_clause)
return node
def visit(self, node):
"Visit a node that may be None"
if node is not None:
return super(ParallelRangeTransform, self).visit(node)
class WithTransform(CythonTransform, SkipDeclarations):
def visit_WithStatNode(self, node):
self.visitchildren(node, 'body')
pos = node.pos
body, target, manager = node.body, node.target, node.manager
node.enter_call = ExprNodes.SimpleCallNode(
pos, function=ExprNodes.AttributeNode(
pos, obj=ExprNodes.CloneNode(manager),
attribute=EncodedString('__enter__'),
is_special_lookup=True),
args=[],
is_temp=True)
if target is not None:
body = Nodes.StatListNode(
pos, stats = [
Nodes.WithTargetAssignmentStatNode(
pos, lhs = target,
rhs = ResultRefNode(node.enter_call),
orig_rhs = node.enter_call),
body])
excinfo_target = ExprNodes.TupleNode(pos, slow=True, args=[
ExprNodes.ExcValueNode(pos) for _ in range(3)])
except_clause = Nodes.ExceptClauseNode(
pos, body=Nodes.IfStatNode(
pos, if_clauses=[
Nodes.IfClauseNode(
pos, condition=ExprNodes.NotNode(
pos, operand=ExprNodes.WithExitCallNode(
pos, with_stat=node,
test_if_run=False,
args=excinfo_target)),
body=Nodes.ReraiseStatNode(pos),
),
],
else_clause=None),
pattern=None,
target=None,
excinfo_target=excinfo_target,
)
node.body = Nodes.TryFinallyStatNode(
pos, body=Nodes.TryExceptStatNode(
pos, body=body,
except_clauses=[except_clause],
else_clause=None,
),
finally_clause=Nodes.ExprStatNode(
pos, expr=ExprNodes.WithExitCallNode(
pos, with_stat=node,
test_if_run=True,
args=ExprNodes.TupleNode(
pos, args=[ExprNodes.NoneNode(pos) for _ in range(3)]
))),
handle_error_case=False,
)
return node
def visit_ExprNode(self, node):
# With statements are never inside expressions.
return node
class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations):
"""Originally, this was the only place where decorators were
transformed into the corresponding calling code. Now, this is
done directly in DefNode and PyClassDefNode to avoid reassignments
to the function/class name - except for cdef class methods. For
those, the reassignment is required as methods are originally
defined in the PyMethodDef struct.
The IndirectionNode allows DefNode to override the decorator
"""
def visit_DefNode(self, func_node):
scope_type = self.scope_type
func_node = self.visit_FuncDefNode(func_node)
if scope_type != 'cclass' or not func_node.decorators:
return func_node
return self.handle_decorators(func_node, func_node.decorators,
func_node.name)
def handle_decorators(self, node, decorators, name):
decorator_result = ExprNodes.NameNode(node.pos, name = name)
for decorator in decorators[::-1]:
decorator_result = ExprNodes.SimpleCallNode(
decorator.pos,
function = decorator.decorator,
args = [decorator_result])
name_node = ExprNodes.NameNode(node.pos, name = name)
reassignment = Nodes.SingleAssignmentNode(
node.pos,
lhs = name_node,
rhs = decorator_result)
reassignment = Nodes.IndirectionNode([reassignment])
node.decorator_indirection = reassignment
return [node, reassignment]
class CnameDirectivesTransform(CythonTransform, SkipDeclarations):
"""
Only part of the CythonUtilityCode pipeline. Must be run before
DecoratorTransform in case this is a decorator for a cdef class.
It filters out @cname('my_cname') decorators and rewrites them to
CnameDecoratorNodes.
"""
def handle_function(self, node):
if not getattr(node, 'decorators', None):
return self.visit_Node(node)
for i, decorator in enumerate(node.decorators):
decorator = decorator.decorator
if (isinstance(decorator, ExprNodes.CallNode) and
decorator.function.is_name and
decorator.function.name == 'cname'):
args, kwargs = decorator.explicit_args_kwds()
if kwargs:
raise AssertionError(
"cname decorator does not take keyword arguments")
if len(args) != 1:
raise AssertionError(
"cname decorator takes exactly one argument")
if not (args[0].is_literal and
args[0].type == Builtin.str_type):
raise AssertionError(
"argument to cname decorator must be a string literal")
cname = args[0].compile_time_value(None).decode('UTF-8')
del node.decorators[i]
node = Nodes.CnameDecoratorNode(pos=node.pos, node=node,
cname=cname)
break
return self.visit_Node(node)
visit_FuncDefNode = handle_function
visit_CClassDefNode = handle_function
visit_CEnumDefNode = handle_function
visit_CStructOrUnionDefNode = handle_function
class ForwardDeclareTypes(CythonTransform):
def visit_CompilerDirectivesNode(self, node):
env = self.module_scope
old = env.directives
env.directives = node.directives
self.visitchildren(node)
env.directives = old
return node
def visit_ModuleNode(self, node):
self.module_scope = node.scope
self.module_scope.directives = node.directives
self.visitchildren(node)
return node
def visit_CDefExternNode(self, node):
old_cinclude_flag = self.module_scope.in_cinclude
self.module_scope.in_cinclude = 1
self.visitchildren(node)
self.module_scope.in_cinclude = old_cinclude_flag
return node
def visit_CEnumDefNode(self, node):
node.declare(self.module_scope)
return node
def visit_CStructOrUnionDefNode(self, node):
if node.name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
def visit_CClassDefNode(self, node):
if node.class_name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
class AnalyseDeclarationsTransform(EnvTransform):
basic_property = TreeFragment(u"""
property NAME:
def __get__(self):
return ATTR
def __set__(self, value):
ATTR = value
""", level='c_class', pipeline=[NormalizeTree(None)])
basic_pyobject_property = TreeFragment(u"""
property NAME:
def __get__(self):
return ATTR
def __set__(self, value):
ATTR = value
def __del__(self):
ATTR = None
""", level='c_class', pipeline=[NormalizeTree(None)])
basic_property_ro = TreeFragment(u"""
property NAME:
def __get__(self):
return ATTR
""", level='c_class', pipeline=[NormalizeTree(None)])
struct_or_union_wrapper = TreeFragment(u"""
cdef class NAME:
cdef TYPE value
def __init__(self, MEMBER=None):
cdef int count
count = 0
INIT_ASSIGNMENTS
if IS_UNION and count > 1:
raise ValueError, "At most one union member should be specified."
def __str__(self):
return STR_FORMAT % MEMBER_TUPLE
def __repr__(self):
return REPR_FORMAT % MEMBER_TUPLE
""", pipeline=[NormalizeTree(None)])
init_assignment = TreeFragment(u"""
if VALUE is not None:
ATTR = VALUE
count += 1
""", pipeline=[NormalizeTree(None)])
fused_function = None
in_lambda = 0
def __call__(self, root):
# needed to determine if a cdef var is declared after it's used.
self.seen_vars_stack = []
self.fused_error_funcs = set()
super_class = super(AnalyseDeclarationsTransform, self)
self._super_visit_FuncDefNode = super_class.visit_FuncDefNode
return super_class.__call__(root)
def visit_NameNode(self, node):
self.seen_vars_stack[-1].add(node.name)
return node
def visit_ModuleNode(self, node):
self.seen_vars_stack.append(set())
node.analyse_declarations(self.current_env())
self.visitchildren(node)
self.seen_vars_stack.pop()
return node
def visit_LambdaNode(self, node):
self.in_lambda += 1
node.analyse_declarations(self.current_env())
self.visitchildren(node)
self.in_lambda -= 1
return node
def visit_CClassDefNode(self, node):
node = self.visit_ClassDefNode(node)
if node.scope and node.scope.implemented:
stats = []
for entry in node.scope.var_entries:
if entry.needs_property:
property = self.create_Property(entry)
property.analyse_declarations(node.scope)
self.visit(property)
stats.append(property)
if stats:
node.body.stats += stats
return node
def _handle_fused_def_decorators(self, old_decorators, env, node):
"""
Create function calls to the decorators and reassignments to
the function.
"""
# Delete staticmethod and classmethod decorators, this is
# handled directly by the fused function object.
decorators = []
for decorator in old_decorators:
func = decorator.decorator
if (not func.is_name or
func.name not in ('staticmethod', 'classmethod') or
env.lookup_here(func.name)):
# not a static or classmethod
decorators.append(decorator)
if decorators:
transform = DecoratorTransform(self.context)
def_node = node.node
_, reassignments = transform.handle_decorators(
def_node, decorators, def_node.name)
reassignments.analyse_declarations(env)
node = [node, reassignments]
return node
def _handle_def(self, decorators, env, node):
"Handle def or cpdef fused functions"
# Create PyCFunction nodes for each specialization
node.stats.insert(0, node.py_func)
node.py_func = self.visit(node.py_func)
node.update_fused_defnode_entry(env)
pycfunc = ExprNodes.PyCFunctionNode.from_defnode(node.py_func,
True)
pycfunc = ExprNodes.ProxyNode(pycfunc.coerce_to_temp(env))
node.resulting_fused_function = pycfunc
# Create assignment node for our def function
node.fused_func_assignment = self._create_assignment(
node.py_func, ExprNodes.CloneNode(pycfunc), env)
if decorators:
node = self._handle_fused_def_decorators(decorators, env, node)
return node
def _create_fused_function(self, env, node):
"Create a fused function for a DefNode with fused arguments"
from Cython.Compiler import FusedNode
if self.fused_function or self.in_lambda:
if self.fused_function not in self.fused_error_funcs:
if self.in_lambda:
error(node.pos, "Fused lambdas not allowed")
else:
error(node.pos, "Cannot nest fused functions")
self.fused_error_funcs.add(self.fused_function)
node.body = Nodes.PassStatNode(node.pos)
for arg in node.args:
if arg.type.is_fused:
arg.type = arg.type.get_fused_types()[0]
return node
decorators = getattr(node, 'decorators', None)
node = FusedNode.FusedCFuncDefNode(node, env)
self.fused_function = node
self.visitchildren(node)
self.fused_function = None
if node.py_func:
node = self._handle_def(decorators, env, node)
return node
def _handle_nogil_cleanup(self, lenv, node):
"Handle cleanup for 'with gil' blocks in nogil functions."
if lenv.nogil and lenv.has_with_gil_block:
# Acquire the GIL for cleanup in 'nogil' functions, by wrapping
# the entire function body in try/finally.
# The corresponding release will be taken care of by
# Nodes.FuncDefNode.generate_function_definitions()
node.body = Nodes.NogilTryFinallyStatNode(
node.body.pos,
body=node.body,
finally_clause=Nodes.EnsureGILNode(node.body.pos))
def _handle_fused(self, node):
if node.is_generator and node.has_fused_arguments:
node.has_fused_arguments = False
error(node.pos, "Fused generators not supported")
node.gbody = Nodes.StatListNode(node.pos,
stats=[],
body=Nodes.PassStatNode(node.pos))
return node.has_fused_arguments
def visit_FuncDefNode(self, node):
"""
Analyse a function and its body, as that hasn't happend yet. Also
analyse the directive_locals set by @cython.locals(). Then, if we are
a function with fused arguments, replace the function (after it has
declared itself in the symbol table!) with a FusedCFuncDefNode, and
analyse its children (which are in turn normal functions). If we're a
normal function, just analyse the body of the function.
"""
env = self.current_env()
self.seen_vars_stack.append(set())
lenv = node.local_scope
node.declare_arguments(lenv)
for var, type_node in node.directive_locals.items():
if not lenv.lookup_here(var): # don't redeclare args
type = type_node.analyse_as_type(lenv)
if type:
lenv.declare_var(var, type, type_node.pos)
else:
error(type_node.pos, "Not a type")
if self._handle_fused(node):
node = self._create_fused_function(env, node)
else:
node.body.analyse_declarations(lenv)
self._handle_nogil_cleanup(lenv, node)
self._super_visit_FuncDefNode(node)
self.seen_vars_stack.pop()
return node
def visit_DefNode(self, node):
node = self.visit_FuncDefNode(node)
env = self.current_env()
if (not isinstance(node, Nodes.DefNode) or
node.fused_py_func or node.is_generator_body or
not node.needs_assignment_synthesis(env)):
return node
return [node, self._synthesize_assignment(node, env)]
def visit_GeneratorBodyDefNode(self, node):
return self.visit_FuncDefNode(node)
def _synthesize_assignment(self, node, env):
# Synthesize assignment node and put it right after defnode
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
if genv.is_closure_scope:
rhs = node.py_cfunc_node = ExprNodes.InnerFunctionNode(
node.pos, def_node=node,
pymethdef_cname=node.entry.pymethdef_cname,
code_object=ExprNodes.CodeObjectNode(node))
else:
binding = self.current_directives.get('binding')
rhs = ExprNodes.PyCFunctionNode.from_defnode(node, binding)
if env.is_py_class_scope:
rhs.binding = True
node.is_cyfunction = rhs.binding
return self._create_assignment(node, rhs, env)
def _create_assignment(self, def_node, rhs, env):
if def_node.decorators:
for decorator in def_node.decorators[::-1]:
rhs = ExprNodes.SimpleCallNode(
decorator.pos,
function = decorator.decorator,
args = [rhs])
def_node.decorators = None
assmt = Nodes.SingleAssignmentNode(
def_node.pos,
lhs=ExprNodes.NameNode(def_node.pos, name=def_node.name),
rhs=rhs)
assmt.analyse_declarations(env)
return assmt
def visit_ScopedExprNode(self, node):
env = self.current_env()
node.analyse_declarations(env)
# the node may or may not have a local scope
if node.has_local_scope:
self.seen_vars_stack.append(set(self.seen_vars_stack[-1]))
self.enter_scope(node, node.expr_scope)
node.analyse_scoped_declarations(node.expr_scope)
self.visitchildren(node)
self.exit_scope()
self.seen_vars_stack.pop()
else:
node.analyse_scoped_declarations(env)
self.visitchildren(node)
return node
def visit_TempResultFromStatNode(self, node):
self.visitchildren(node)
node.analyse_declarations(self.current_env())
return node
def visit_CppClassNode(self, node):
if node.visibility == 'extern':
return None
else:
return self.visit_ClassDefNode(node)
def visit_CStructOrUnionDefNode(self, node):
# Create a wrapper node if needed.
# We want to use the struct type information (so it can't happen
# before this phase) but also create new objects to be declared
# (so it can't happen later).
# Note that we don't return the original node, as it is
# never used after this phase.
if True: # private (default)
return None
self_value = ExprNodes.AttributeNode(
pos = node.pos,
obj = ExprNodes.NameNode(pos=node.pos, name=u"self"),
attribute = EncodedString(u"value"))
var_entries = node.entry.type.scope.var_entries
attributes = []
for entry in var_entries:
attributes.append(ExprNodes.AttributeNode(pos = entry.pos,
obj = self_value,
attribute = entry.name))
# __init__ assignments
init_assignments = []
for entry, attr in zip(var_entries, attributes):
# TODO: branch on visibility
init_assignments.append(self.init_assignment.substitute({
u"VALUE": ExprNodes.NameNode(entry.pos, name = entry.name),
u"ATTR": attr,
}, pos = entry.pos))
# create the class
str_format = u"%s(%s)" % (node.entry.type.name, ("%s, " * len(attributes))[:-2])
wrapper_class = self.struct_or_union_wrapper.substitute({
u"INIT_ASSIGNMENTS": Nodes.StatListNode(node.pos, stats = init_assignments),
u"IS_UNION": ExprNodes.BoolNode(node.pos, value = not node.entry.type.is_struct),
u"MEMBER_TUPLE": ExprNodes.TupleNode(node.pos, args=attributes),
u"STR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format)),
u"REPR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format.replace("%s", "%r"))),
}, pos = node.pos).stats[0]
wrapper_class.class_name = node.name
wrapper_class.shadow = True
class_body = wrapper_class.body.stats
# fix value type
assert isinstance(class_body[0].base_type, Nodes.CSimpleBaseTypeNode)
class_body[0].base_type.name = node.name
# fix __init__ arguments
init_method = class_body[1]
assert isinstance(init_method, Nodes.DefNode) and init_method.name == '__init__'
arg_template = init_method.args[1]
if not node.entry.type.is_struct:
arg_template.kw_only = True
del init_method.args[1]
for entry, attr in zip(var_entries, attributes):
arg = copy.deepcopy(arg_template)
arg.declarator.name = entry.name
init_method.args.append(arg)
# setters/getters
for entry, attr in zip(var_entries, attributes):
# TODO: branch on visibility
if entry.type.is_pyobject:
template = self.basic_pyobject_property
else:
template = self.basic_property
property = template.substitute({
u"ATTR": attr,
}, pos = entry.pos).stats[0]
property.name = entry.name
wrapper_class.body.stats.append(property)
wrapper_class.analyse_declarations(self.current_env())
return self.visit_CClassDefNode(wrapper_class)
# Some nodes are no longer needed after declaration
# analysis and can be dropped. The analysis was performed
# on these nodes in a seperate recursive process from the
# enclosing function or module, so we can simply drop them.
def visit_CDeclaratorNode(self, node):
# necessary to ensure that all CNameDeclaratorNodes are visited.
self.visitchildren(node)
return node
def visit_CTypeDefNode(self, node):
return node
def visit_CBaseTypeNode(self, node):
return None
def visit_CEnumDefNode(self, node):
if node.visibility == 'public':
return node
else:
return None
def visit_CNameDeclaratorNode(self, node):
if node.name in self.seen_vars_stack[-1]:
entry = self.current_env().lookup(node.name)
if (entry is None or entry.visibility != 'extern'
and not entry.scope.is_c_class_scope):
warning(node.pos, "cdef variable '%s' declared after it is used" % node.name, 2)
self.visitchildren(node)
return node
def visit_CVarDefNode(self, node):
# to ensure all CNameDeclaratorNodes are visited.
self.visitchildren(node)
return None
def visit_CnameDecoratorNode(self, node):
child_node = self.visit(node.node)
if not child_node:
return None
if type(child_node) is list: # Assignment synthesized
node.child_node = child_node[0]
return [node] + child_node[1:]
node.node = child_node
return node
def create_Property(self, entry):
if entry.visibility == 'public':
if entry.type.is_pyobject:
template = self.basic_pyobject_property
else:
template = self.basic_property
elif entry.visibility == 'readonly':
template = self.basic_property_ro
property = template.substitute({
u"ATTR": ExprNodes.AttributeNode(pos=entry.pos,
obj=ExprNodes.NameNode(pos=entry.pos, name="self"),
attribute=entry.name),
}, pos=entry.pos).stats[0]
property.name = entry.name
property.doc = entry.doc
return property
class CalculateQualifiedNamesTransform(EnvTransform):
"""
Calculate and store the '__qualname__' and the global
module name on some nodes.
"""
def visit_ModuleNode(self, node):
self.module_name = self.global_scope().qualified_name
self.qualified_name = []
_super = super(CalculateQualifiedNamesTransform, self)
self._super_visit_FuncDefNode = _super.visit_FuncDefNode
self._super_visit_ClassDefNode = _super.visit_ClassDefNode
self.visitchildren(node)
return node
def _set_qualname(self, node, name=None):
if name:
qualname = self.qualified_name[:]
qualname.append(name)
else:
qualname = self.qualified_name
node.qualname = EncodedString('.'.join(qualname))
node.module_name = self.module_name
self.visitchildren(node)
return node
def _append_entry(self, entry):
if entry.is_pyglobal and not entry.is_pyclass_attr:
self.qualified_name = [entry.name]
else:
self.qualified_name.append(entry.name)
def visit_ClassNode(self, node):
return self._set_qualname(node, node.name)
def visit_PyClassNamespaceNode(self, node):
# class name was already added by parent node
return self._set_qualname(node)
def visit_PyCFunctionNode(self, node):
return self._set_qualname(node, node.def_node.name)
def visit_FuncDefNode(self, node):
orig_qualified_name = self.qualified_name[:]
if getattr(node, 'name', None) == '<lambda>':
self.qualified_name.append('<lambda>')
else:
self._append_entry(node.entry)
self.qualified_name.append('<locals>')
self._super_visit_FuncDefNode(node)
self.qualified_name = orig_qualified_name
return node
def visit_ClassDefNode(self, node):
orig_qualified_name = self.qualified_name[:]
entry = (getattr(node, 'entry', None) or # PyClass
self.current_env().lookup_here(node.name)) # CClass
self._append_entry(entry)
self._super_visit_ClassDefNode(node)
self.qualified_name = orig_qualified_name
return node
class AnalyseExpressionsTransform(CythonTransform):
def visit_ModuleNode(self, node):
node.scope.infer_types()
node.body = node.body.analyse_expressions(node.scope)
self.visitchildren(node)
return node
def visit_FuncDefNode(self, node):
node.local_scope.infer_types()
node.body = node.body.analyse_expressions(node.local_scope)
self.visitchildren(node)
return node
def visit_ScopedExprNode(self, node):
if node.has_local_scope:
node.expr_scope.infer_types()
node = node.analyse_scoped_expressions(node.expr_scope)
self.visitchildren(node)
return node
def visit_IndexNode(self, node):
"""
Replace index nodes used to specialize cdef functions with fused
argument types with the Attribute- or NameNode referring to the
function. We then need to copy over the specialization properties to
the attribute or name node.
Because the indexing might be a Python indexing operation on a fused
function, or (usually) a Cython indexing operation, we need to
re-analyse the types.
"""
self.visit_Node(node)
if node.is_fused_index and not node.type.is_error:
node = node.base
elif node.memslice_ellipsis_noop:
# memoryviewslice[...] expression, drop the IndexNode
node = node.base
return node
class FindInvalidUseOfFusedTypes(CythonTransform):
def visit_FuncDefNode(self, node):
# Errors related to use in functions with fused args will already
# have been detected
if not node.has_fused_arguments:
if not node.is_generator_body and node.return_type.is_fused:
error(node.pos, "Return type is not specified as argument type")
else:
self.visitchildren(node)
return node
def visit_ExprNode(self, node):
if node.type and node.type.is_fused:
error(node.pos, "Invalid use of fused types, type cannot be specialized")
else:
self.visitchildren(node)
return node
class ExpandInplaceOperators(EnvTransform):
def visit_InPlaceAssignmentNode(self, node):
lhs = node.lhs
rhs = node.rhs
if lhs.type.is_cpp_class:
# No getting around this exact operator here.
return node
if isinstance(lhs, ExprNodes.IndexNode) and lhs.is_buffer_access:
# There is code to handle this case.
return node
env = self.current_env()
def side_effect_free_reference(node, setting=False):
if isinstance(node, ExprNodes.NameNode):
return node, []
elif node.type.is_pyobject and not setting:
node = LetRefNode(node)
return node, [node]
elif isinstance(node, ExprNodes.IndexNode):
if node.is_buffer_access:
raise ValueError("Buffer access")
base, temps = side_effect_free_reference(node.base)
index = LetRefNode(node.index)
return ExprNodes.IndexNode(node.pos, base=base, index=index), temps + [index]
elif isinstance(node, ExprNodes.AttributeNode):
obj, temps = side_effect_free_reference(node.obj)
return ExprNodes.AttributeNode(node.pos, obj=obj, attribute=node.attribute), temps
else:
node = LetRefNode(node)
return node, [node]
try:
lhs, let_ref_nodes = side_effect_free_reference(lhs, setting=True)
except ValueError:
return node
dup = lhs.__class__(**lhs.__dict__)
binop = ExprNodes.binop_node(node.pos,
operator = node.operator,
operand1 = dup,
operand2 = rhs,
inplace=True)
# Manually analyse types for new node.
lhs.analyse_target_types(env)
dup.analyse_types(env)
binop.analyse_operation(env)
node = Nodes.SingleAssignmentNode(
node.pos,
lhs = lhs,
rhs=binop.coerce_to(lhs.type, env))
# Use LetRefNode to avoid side effects.
let_ref_nodes.reverse()
for t in let_ref_nodes:
node = LetNode(t, node)
return node
def visit_ExprNode(self, node):
# In-place assignments can't happen within an expression.
return node
class AdjustDefByDirectives(CythonTransform, SkipDeclarations):
"""
Adjust function and class definitions by the decorator directives:
@cython.cfunc
@cython.cclass
@cython.ccall
"""
def visit_ModuleNode(self, node):
self.directives = node.directives
self.in_py_class = False
self.visitchildren(node)
return node
def visit_CompilerDirectivesNode(self, node):
old_directives = self.directives
self.directives = node.directives
self.visitchildren(node)
self.directives = old_directives
return node
def visit_DefNode(self, node):
if 'ccall' in self.directives:
node = node.as_cfunction(overridable=True, returns=self.directives.get('returns'))
return self.visit(node)
if 'cfunc' in self.directives:
if self.in_py_class:
error(node.pos, "cfunc directive is not allowed here")
else:
node = node.as_cfunction(overridable=False, returns=self.directives.get('returns'))
return self.visit(node)
self.visitchildren(node)
return node
def visit_PyClassDefNode(self, node):
if 'cclass' in self.directives:
node = node.as_cclass()
return self.visit(node)
else:
old_in_pyclass = self.in_py_class
self.in_py_class = True
self.visitchildren(node)
self.in_py_class = old_in_pyclass
return node
def visit_CClassDefNode(self, node):
old_in_pyclass = self.in_py_class
self.in_py_class = False
self.visitchildren(node)
self.in_py_class = old_in_pyclass
return node
class AlignFunctionDefinitions(CythonTransform):
"""
This class takes the signatures from a .pxd file and applies them to
the def methods in a .py file.
"""
def visit_ModuleNode(self, node):
self.scope = node.scope
self.directives = node.directives
self.imported_names = set() # hack, see visit_FromImportStatNode()
self.visitchildren(node)
return node
def visit_PyClassDefNode(self, node):
pxd_def = self.scope.lookup(node.name)
if pxd_def:
if pxd_def.is_cclass:
return self.visit_CClassDefNode(node.as_cclass(), pxd_def)
elif not pxd_def.scope or not pxd_def.scope.is_builtin_scope:
error(node.pos, "'%s' redeclared" % node.name)
if pxd_def.pos:
error(pxd_def.pos, "previous declaration here")
return None
return node
def visit_CClassDefNode(self, node, pxd_def=None):
if pxd_def is None:
pxd_def = self.scope.lookup(node.class_name)
if pxd_def:
outer_scope = self.scope
self.scope = pxd_def.type.scope
self.visitchildren(node)
if pxd_def:
self.scope = outer_scope
return node
def visit_DefNode(self, node):
pxd_def = self.scope.lookup(node.name)
if pxd_def and (not pxd_def.scope or not pxd_def.scope.is_builtin_scope):
if not pxd_def.is_cfunction:
error(node.pos, "'%s' redeclared" % node.name)
if pxd_def.pos:
error(pxd_def.pos, "previous declaration here")
return None
node = node.as_cfunction(pxd_def)
elif (self.scope.is_module_scope and self.directives['auto_cpdef']
and not node.name in self.imported_names
and node.is_cdef_func_compatible()):
# FIXME: cpdef-ing should be done in analyse_declarations()
node = node.as_cfunction(scope=self.scope)
# Enable this when nested cdef functions are allowed.
# self.visitchildren(node)
return node
def visit_FromImportStatNode(self, node):
# hack to prevent conditional import fallback functions from
# being cdpef-ed (global Python variables currently conflict
# with imports)
if self.scope.is_module_scope:
for name, _ in node.items:
self.imported_names.add(name)
return node
def visit_ExprNode(self, node):
# ignore lambdas and everything else that appears in expressions
return node
class RemoveUnreachableCode(CythonTransform):
def visit_StatListNode(self, node):
if not self.current_directives['remove_unreachable']:
return node
self.visitchildren(node)
for idx, stat in enumerate(node.stats):
idx += 1
if stat.is_terminator:
if idx < len(node.stats):
if self.current_directives['warn.unreachable']:
warning(node.stats[idx].pos, "Unreachable code", 2)
node.stats = node.stats[:idx]
node.is_terminator = True
break
return node
def visit_IfClauseNode(self, node):
self.visitchildren(node)
if node.body.is_terminator:
node.is_terminator = True
return node
def visit_IfStatNode(self, node):
self.visitchildren(node)
if node.else_clause and node.else_clause.is_terminator:
for clause in node.if_clauses:
if not clause.is_terminator:
break
else:
node.is_terminator = True
return node
def visit_TryExceptStatNode(self, node):
self.visitchildren(node)
if node.body.is_terminator and node.else_clause:
if self.current_directives['warn.unreachable']:
warning(node.else_clause.pos, "Unreachable code", 2)
node.else_clause = None
return node
class YieldNodeCollector(TreeVisitor):
def __init__(self):
super(YieldNodeCollector, self).__init__()
self.yields = []
self.returns = []
self.has_return_value = False
def visit_Node(self, node):
self.visitchildren(node)
def visit_YieldExprNode(self, node):
self.yields.append(node)
self.visitchildren(node)
def visit_ReturnStatNode(self, node):
self.visitchildren(node)
if node.value:
self.has_return_value = True
self.returns.append(node)
def visit_ClassDefNode(self, node):
pass
def visit_FuncDefNode(self, node):
pass
def visit_LambdaNode(self, node):
pass
def visit_GeneratorExpressionNode(self, node):
pass
class MarkClosureVisitor(CythonTransform):
def visit_ModuleNode(self, node):
self.needs_closure = False
self.visitchildren(node)
return node
def visit_FuncDefNode(self, node):
self.needs_closure = False
self.visitchildren(node)
node.needs_closure = self.needs_closure
self.needs_closure = True
collector = YieldNodeCollector()
collector.visitchildren(node)
if collector.yields:
if isinstance(node, Nodes.CFuncDefNode):
# Will report error later
return node
for i, yield_expr in enumerate(collector.yields):
yield_expr.label_num = i + 1 # no enumerate start arg in Py2.4
for retnode in collector.returns:
retnode.in_generator = True
gbody = Nodes.GeneratorBodyDefNode(
pos=node.pos, name=node.name, body=node.body)
generator = Nodes.GeneratorDefNode(
pos=node.pos, name=node.name, args=node.args,
star_arg=node.star_arg, starstar_arg=node.starstar_arg,
doc=node.doc, decorators=node.decorators,
gbody=gbody, lambda_name=node.lambda_name)
return generator
return node
def visit_CFuncDefNode(self, node):
self.visit_FuncDefNode(node)
if node.needs_closure:
error(node.pos, "closures inside cdef functions not yet supported")
return node
def visit_LambdaNode(self, node):
self.needs_closure = False
self.visitchildren(node)
node.needs_closure = self.needs_closure
self.needs_closure = True
return node
def visit_ClassDefNode(self, node):
self.visitchildren(node)
self.needs_closure = True
return node
class CreateClosureClasses(CythonTransform):
# Output closure classes in module scope for all functions
# that really need it.
def __init__(self, context):
super(CreateClosureClasses, self).__init__(context)
self.path = []
self.in_lambda = False
def visit_ModuleNode(self, node):
self.module_scope = node.scope
self.visitchildren(node)
return node
def find_entries_used_in_closures(self, node):
from_closure = []
in_closure = []
for name, entry in node.local_scope.entries.items():
if entry.from_closure:
from_closure.append((name, entry))
elif entry.in_closure:
in_closure.append((name, entry))
return from_closure, in_closure
def create_class_from_scope(self, node, target_module_scope, inner_node=None):
# move local variables into closure
if node.is_generator:
for entry in node.local_scope.entries.values():
if not entry.from_closure:
entry.in_closure = True
from_closure, in_closure = self.find_entries_used_in_closures(node)
in_closure.sort()
# Now from the begining
node.needs_closure = False
node.needs_outer_scope = False
func_scope = node.local_scope
cscope = node.entry.scope
while cscope.is_py_class_scope or cscope.is_c_class_scope:
cscope = cscope.outer_scope
if not from_closure and (self.path or inner_node):
if not inner_node:
if not node.py_cfunc_node:
raise InternalError("DefNode does not have assignment node")
inner_node = node.py_cfunc_node
inner_node.needs_self_code = False
node.needs_outer_scope = False
if node.is_generator:
pass
elif not in_closure and not from_closure:
return
elif not in_closure:
func_scope.is_passthrough = True
func_scope.scope_class = cscope.scope_class
node.needs_outer_scope = True
return
as_name = '%s_%s' % (
target_module_scope.next_id(Naming.closure_class_prefix),
node.entry.cname)
entry = target_module_scope.declare_c_class(
name=as_name, pos=node.pos, defining=True,
implementing=True)
entry.type.is_final_type = True
func_scope.scope_class = entry
class_scope = entry.type.scope
class_scope.is_internal = True
if Options.closure_freelist_size:
class_scope.directives['freelist'] = Options.closure_freelist_size
if from_closure:
assert cscope.is_closure_scope
class_scope.declare_var(pos=node.pos,
name=Naming.outer_scope_cname,
cname=Naming.outer_scope_cname,
type=cscope.scope_class.type,
is_cdef=True)
node.needs_outer_scope = True
for name, entry in in_closure:
closure_entry = class_scope.declare_var(pos=entry.pos,
name=entry.name,
cname=entry.cname,
type=entry.type,
is_cdef=True)
if entry.is_declared_generic:
closure_entry.is_declared_generic = 1
node.needs_closure = True
# Do it here because other classes are already checked
target_module_scope.check_c_class(func_scope.scope_class)
def visit_LambdaNode(self, node):
if not isinstance(node.def_node, Nodes.DefNode):
# fused function, an error has been previously issued
return node
was_in_lambda = self.in_lambda
self.in_lambda = True
self.create_class_from_scope(node.def_node, self.module_scope, node)
self.visitchildren(node)
self.in_lambda = was_in_lambda
return node
def visit_FuncDefNode(self, node):
if self.in_lambda:
self.visitchildren(node)
return node
if node.needs_closure or self.path:
self.create_class_from_scope(node, self.module_scope)
self.path.append(node)
self.visitchildren(node)
self.path.pop()
return node
def visit_GeneratorBodyDefNode(self, node):
self.visitchildren(node)
return node
def visit_CFuncDefNode(self, node):
self.visitchildren(node)
return node
class GilCheck(VisitorTransform):
"""
Call `node.gil_check(env)` on each node to make sure we hold the
GIL when we need it. Raise an error when on Python operations
inside a `nogil` environment.
Additionally, raise exceptions for closely nested with gil or with nogil
statements. The latter would abort Python.
"""
def __call__(self, root):
self.env_stack = [root.scope]
self.nogil = False
# True for 'cdef func() nogil:' functions, as the GIL may be held while
# calling this function (thus contained 'nogil' blocks may be valid).
self.nogil_declarator_only = False
return super(GilCheck, self).__call__(root)
def visit_FuncDefNode(self, node):
self.env_stack.append(node.local_scope)
was_nogil = self.nogil
self.nogil = node.local_scope.nogil
if self.nogil:
self.nogil_declarator_only = True
if self.nogil and node.nogil_check:
node.nogil_check(node.local_scope)
self.visitchildren(node)
# This cannot be nested, so it doesn't need backup/restore
self.nogil_declarator_only = False
self.env_stack.pop()
self.nogil = was_nogil
return node
def visit_GILStatNode(self, node):
if self.nogil and node.nogil_check:
node.nogil_check()
was_nogil = self.nogil
self.nogil = (node.state == 'nogil')
if was_nogil == self.nogil and not self.nogil_declarator_only:
if not was_nogil:
error(node.pos, "Trying to acquire the GIL while it is "
"already held.")
else:
error(node.pos, "Trying to release the GIL while it was "
"previously released.")
if isinstance(node.finally_clause, Nodes.StatListNode):
# The finally clause of the GILStatNode is a GILExitNode,
# which is wrapped in a StatListNode. Just unpack that.
node.finally_clause, = node.finally_clause.stats
self.visitchildren(node)
self.nogil = was_nogil
return node
def visit_ParallelRangeNode(self, node):
if node.nogil:
node.nogil = False
node = Nodes.GILStatNode(node.pos, state='nogil', body=node)
return self.visit_GILStatNode(node)
if not self.nogil:
error(node.pos, "prange() can only be used without the GIL")
# Forget about any GIL-related errors that may occur in the body
return None
node.nogil_check(self.env_stack[-1])
self.visitchildren(node)
return node
def visit_ParallelWithBlockNode(self, node):
if not self.nogil:
error(node.pos, "The parallel section may only be used without "
"the GIL")
return None
if node.nogil_check:
# It does not currently implement this, but test for it anyway to
# avoid potential future surprises
node.nogil_check(self.env_stack[-1])
self.visitchildren(node)
return node
def visit_TryFinallyStatNode(self, node):
"""
Take care of try/finally statements in nogil code sections.
"""
if not self.nogil or isinstance(node, Nodes.GILStatNode):
return self.visit_Node(node)
node.nogil_check = None
node.is_try_finally_in_nogil = True
self.visitchildren(node)
return node
def visit_Node(self, node):
if self.env_stack and self.nogil and node.nogil_check:
node.nogil_check(self.env_stack[-1])
self.visitchildren(node)
node.in_nogil_context = self.nogil
return node
class TransformBuiltinMethods(EnvTransform):
def visit_SingleAssignmentNode(self, node):
if node.declaration_only:
return None
else:
self.visitchildren(node)
return node
def visit_AttributeNode(self, node):
self.visitchildren(node)
return self.visit_cython_attribute(node)
def visit_NameNode(self, node):
return self.visit_cython_attribute(node)
def visit_cython_attribute(self, node):
attribute = node.as_cython_attribute()
if attribute:
if attribute == u'compiled':
node = ExprNodes.BoolNode(node.pos, value=True)
elif attribute == u'__version__':
import Cython
node = ExprNodes.StringNode(node.pos, value=EncodedString(Cython.__version__))
elif attribute == u'NULL':
node = ExprNodes.NullNode(node.pos)
elif attribute in (u'set', u'frozenset'):
node = ExprNodes.NameNode(node.pos, name=EncodedString(attribute),
entry=self.current_env().builtin_scope().lookup_here(attribute))
elif PyrexTypes.parse_basic_type(attribute):
pass
elif self.context.cython_scope.lookup_qualified_name(attribute):
pass
else:
error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute)
return node
def visit_ExecStatNode(self, node):
lenv = self.current_env()
self.visitchildren(node)
if len(node.args) == 1:
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def _inject_locals(self, node, func_name):
# locals()/dir()/vars() builtins
lenv = self.current_env()
entry = lenv.lookup_here(func_name)
if entry:
# not the builtin
return node
pos = node.pos
if func_name in ('locals', 'vars'):
if func_name == 'locals' and len(node.args) > 0:
error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d"
% len(node.args))
return node
elif func_name == 'vars':
if len(node.args) > 1:
error(self.pos, "Builtin 'vars()' called with wrong number of args, expected 0-1, got %d"
% len(node.args))
if len(node.args) > 0:
return node # nothing to do
return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv)
else: # dir()
if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
% len(node.args))
if len(node.args) > 0:
# optimised in Builtin.py
return node
if lenv.is_py_class_scope or lenv.is_module_scope:
if lenv.is_py_class_scope:
pyclass = self.current_scope_node()
locals_dict = ExprNodes.CloneNode(pyclass.dict)
else:
locals_dict = ExprNodes.GlobalsExprNode(pos)
return ExprNodes.SortedDictKeysNode(locals_dict)
local_names = [ var.name for var in lenv.entries.values() if var.name ]
items = [ ExprNodes.IdentifierStringNode(pos, value=var)
for var in local_names ]
return ExprNodes.ListNode(pos, args=items)
def visit_PrimaryCmpNode(self, node):
# special case: for in/not-in test, we do not need to sort locals()
self.visitchildren(node)
if node.operator in 'not_in': # in/not_in
if isinstance(node.operand2, ExprNodes.SortedDictKeysNode):
arg = node.operand2.arg
if isinstance(arg, ExprNodes.NoneCheckNode):
arg = arg.arg
node.operand2 = arg
return node
def visit_CascadedCmpNode(self, node):
return self.visit_PrimaryCmpNode(node)
def _inject_eval(self, node, func_name):
lenv = self.current_env()
entry = lenv.lookup_here(func_name)
if entry or len(node.args) != 1:
return node
# Inject globals and locals
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def _inject_super(self, node, func_name):
lenv = self.current_env()
entry = lenv.lookup_here(func_name)
if entry or node.args:
return node
# Inject no-args super
def_node = self.current_scope_node()
if (not isinstance(def_node, Nodes.DefNode) or not def_node.args or
len(self.env_stack) < 2):
return node
class_node, class_scope = self.env_stack[-2]
if class_scope.is_py_class_scope:
def_node.requires_classobj = True
class_node.class_cell.is_active = True
node.args = [
ExprNodes.ClassCellNode(
node.pos, is_generator=def_node.is_generator),
ExprNodes.NameNode(node.pos, name=def_node.args[0].name)
]
elif class_scope.is_c_class_scope:
node.args = [
ExprNodes.NameNode(
node.pos, name=class_node.scope.name,
entry=class_node.entry),
ExprNodes.NameNode(node.pos, name=def_node.args[0].name)
]
return node
def visit_SimpleCallNode(self, node):
# cython.foo
function = node.function.as_cython_attribute()
if function:
if function in InterpretCompilerDirectives.unop_method_nodes:
if len(node.args) != 1:
error(node.function.pos, u"%s() takes exactly one argument" % function)
else:
node = InterpretCompilerDirectives.unop_method_nodes[function](node.function.pos, operand=node.args[0])
elif function in InterpretCompilerDirectives.binop_method_nodes:
if len(node.args) != 2:
error(node.function.pos, u"%s() takes exactly two arguments" % function)
else:
node = InterpretCompilerDirectives.binop_method_nodes[function](node.function.pos, operand1=node.args[0], operand2=node.args[1])
elif function == u'cast':
if len(node.args) != 2:
error(node.function.pos, u"cast() takes exactly two arguments")
else:
type = node.args[0].analyse_as_type(self.current_env())
if type:
node = ExprNodes.TypecastNode(node.function.pos, type=type, operand=node.args[1])
else:
error(node.args[0].pos, "Not a type")
elif function == u'sizeof':
if len(node.args) != 1:
error(node.function.pos, u"sizeof() takes exactly one argument")
else:
type = node.args[0].analyse_as_type(self.current_env())
if type:
node = ExprNodes.SizeofTypeNode(node.function.pos, arg_type=type)
else:
node = ExprNodes.SizeofVarNode(node.function.pos, operand=node.args[0])
elif function == 'cmod':
if len(node.args) != 2:
error(node.function.pos, u"cmod() takes exactly two arguments")
else:
node = ExprNodes.binop_node(node.function.pos, '%', node.args[0], node.args[1])
node.cdivision = True
elif function == 'cdiv':
if len(node.args) != 2:
error(node.function.pos, u"cdiv() takes exactly two arguments")
else:
node = ExprNodes.binop_node(node.function.pos, '/', node.args[0], node.args[1])
node.cdivision = True
elif function == u'set':
node.function = ExprNodes.NameNode(node.pos, name=EncodedString('set'))
elif self.context.cython_scope.lookup_qualified_name(function):
pass
else:
error(node.function.pos,
u"'%s' not a valid cython language construct" % function)
self.visitchildren(node)
if isinstance(node, ExprNodes.SimpleCallNode) and node.function.is_name:
func_name = node.function.name
if func_name in ('dir', 'locals', 'vars'):
return self._inject_locals(node, func_name)
if func_name == 'eval':
return self._inject_eval(node, func_name)
if func_name == 'super':
return self._inject_super(node, func_name)
return node
class ReplaceFusedTypeChecks(VisitorTransform):
"""
This is not a transform in the pipeline. It is invoked on the specific
versions of a cdef function with fused argument types. It filters out any
type branches that don't match. e.g.
if fused_t is mytype:
...
elif fused_t in other_fused_type:
...
"""
def __init__(self, local_scope):
super(ReplaceFusedTypeChecks, self).__init__()
self.local_scope = local_scope
# defer the import until now to avoid circular import time dependencies
from Cython.Compiler import Optimize
self.transform = Optimize.ConstantFolding(reevaluate=True)
def visit_IfStatNode(self, node):
"""
Filters out any if clauses with false compile time type check
expression.
"""
self.visitchildren(node)
return self.transform(node)
def visit_PrimaryCmpNode(self, node):
type1 = node.operand1.analyse_as_type(self.local_scope)
type2 = node.operand2.analyse_as_type(self.local_scope)
if type1 and type2:
false_node = ExprNodes.BoolNode(node.pos, value=False)
true_node = ExprNodes.BoolNode(node.pos, value=True)
type1 = self.specialize_type(type1, node.operand1.pos)
op = node.operator
if op in ('is', 'is_not', '==', '!='):
type2 = self.specialize_type(type2, node.operand2.pos)
is_same = type1.same_as(type2)
eq = op in ('is', '==')
if (is_same and eq) or (not is_same and not eq):
return true_node
elif op in ('in', 'not_in'):
# We have to do an instance check directly, as operand2
# needs to be a fused type and not a type with a subtype
# that is fused. First unpack the typedef
if isinstance(type2, PyrexTypes.CTypedefType):
type2 = type2.typedef_base_type
if type1.is_fused:
error(node.operand1.pos, "Type is fused")
elif not type2.is_fused:
error(node.operand2.pos,
"Can only use 'in' or 'not in' on a fused type")
else:
types = PyrexTypes.get_specialized_types(type2)
for specialized_type in types:
if type1.same_as(specialized_type):
if op == 'in':
return true_node
else:
return false_node
if op == 'not_in':
return true_node
return false_node
return node
def specialize_type(self, type, pos):
try:
return type.specialize(self.local_scope.fused_to_specific)
except KeyError:
error(pos, "Type is not specific")
return type
def visit_Node(self, node):
self.visitchildren(node)
return node
class DebugTransform(CythonTransform):
"""
Write debug information for this Cython module.
"""
def __init__(self, context, options, result):
super(DebugTransform, self).__init__(context)
self.visited = set()
# our treebuilder and debug output writer
# (see Cython.Debugger.debug_output.CythonDebugWriter)
self.tb = self.context.gdb_debug_outputwriter
#self.c_output_file = options.output_file
self.c_output_file = result.c_file
# Closure support, basically treat nested functions as if the AST were
# never nested
self.nested_funcdefs = []
# tells visit_NameNode whether it should register step-into functions
self.register_stepinto = False
def visit_ModuleNode(self, node):
self.tb.module_name = node.full_module_name
attrs = dict(
module_name=node.full_module_name,
filename=node.pos[0].filename,
c_filename=self.c_output_file)
self.tb.start('Module', attrs)
# serialize functions
self.tb.start('Functions')
# First, serialize functions normally...
self.visitchildren(node)
# ... then, serialize nested functions
for nested_funcdef in self.nested_funcdefs:
self.visit_FuncDefNode(nested_funcdef)
self.register_stepinto = True
self.serialize_modulenode_as_function(node)
self.register_stepinto = False
self.tb.end('Functions')
# 2.3 compatibility. Serialize global variables
self.tb.start('Globals')
entries = {}
for k, v in node.scope.entries.iteritems():
if (v.qualified_name not in self.visited and not
v.name.startswith('__pyx_') and not
v.type.is_cfunction and not
v.type.is_extension_type):
entries[k]= v
self.serialize_local_variables(entries)
self.tb.end('Globals')
# self.tb.end('Module') # end Module after the line number mapping in
# Cython.Compiler.ModuleNode.ModuleNode._serialize_lineno_map
return node
def visit_FuncDefNode(self, node):
self.visited.add(node.local_scope.qualified_name)
if getattr(node, 'is_wrapper', False):
return node
if self.register_stepinto:
self.nested_funcdefs.append(node)
return node
# node.entry.visibility = 'extern'
if node.py_func is None:
pf_cname = ''
else:
pf_cname = node.py_func.entry.func_cname
attrs = dict(
name=node.entry.name or getattr(node, 'name', '<unknown>'),
cname=node.entry.func_cname,
pf_cname=pf_cname,
qualified_name=node.local_scope.qualified_name,
lineno=str(node.pos[1]))
self.tb.start('Function', attrs=attrs)
self.tb.start('Locals')
self.serialize_local_variables(node.local_scope.entries)
self.tb.end('Locals')
self.tb.start('Arguments')
for arg in node.local_scope.arg_entries:
self.tb.start(arg.name)
self.tb.end(arg.name)
self.tb.end('Arguments')
self.tb.start('StepIntoFunctions')
self.register_stepinto = True
self.visitchildren(node)
self.register_stepinto = False
self.tb.end('StepIntoFunctions')
self.tb.end('Function')
return node
def visit_NameNode(self, node):
if (self.register_stepinto and
node.type.is_cfunction and
getattr(node, 'is_called', False) and
node.entry.func_cname is not None):
# don't check node.entry.in_cinclude, as 'cdef extern: ...'
# declared functions are not 'in_cinclude'.
# This means we will list called 'cdef' functions as
# "step into functions", but this is not an issue as they will be
# recognized as Cython functions anyway.
attrs = dict(name=node.entry.func_cname)
self.tb.start('StepIntoFunction', attrs=attrs)
self.tb.end('StepIntoFunction')
self.visitchildren(node)
return node
def serialize_modulenode_as_function(self, node):
"""
Serialize the module-level code as a function so the debugger will know
it's a "relevant frame" and it will know where to set the breakpoint
for 'break modulename'.
"""
name = node.full_module_name.rpartition('.')[-1]
cname_py2 = 'init' + name
cname_py3 = 'PyInit_' + name
py2_attrs = dict(
name=name,
cname=cname_py2,
pf_cname='',
# Ignore the qualified_name, breakpoints should be set using
# `cy break modulename:lineno` for module-level breakpoints.
qualified_name='',
lineno='1',
is_initmodule_function="True",
)
py3_attrs = dict(py2_attrs, cname=cname_py3)
self._serialize_modulenode_as_function(node, py2_attrs)
self._serialize_modulenode_as_function(node, py3_attrs)
def _serialize_modulenode_as_function(self, node, attrs):
self.tb.start('Function', attrs=attrs)
self.tb.start('Locals')
self.serialize_local_variables(node.scope.entries)
self.tb.end('Locals')
self.tb.start('Arguments')
self.tb.end('Arguments')
self.tb.start('StepIntoFunctions')
self.register_stepinto = True
self.visitchildren(node)
self.register_stepinto = False
self.tb.end('StepIntoFunctions')
self.tb.end('Function')
def serialize_local_variables(self, entries):
for entry in entries.values():
if not entry.cname:
# not a local variable
continue
if entry.type.is_pyobject:
vartype = 'PythonObject'
else:
vartype = 'CObject'
if entry.from_closure:
# We're dealing with a closure where a variable from an outer
# scope is accessed, get it from the scope object.
cname = '%s->%s' % (Naming.cur_scope_cname,
entry.outer_entry.cname)
qname = '%s.%s.%s' % (entry.scope.outer_scope.qualified_name,
entry.scope.name,
entry.name)
elif entry.in_closure:
cname = '%s->%s' % (Naming.cur_scope_cname,
entry.cname)
qname = entry.qualified_name
else:
cname = entry.cname
qname = entry.qualified_name
if not entry.pos:
# this happens for variables that are not in the user's code,
# e.g. for the global __builtins__, __doc__, etc. We can just
# set the lineno to 0 for those.
lineno = '0'
else:
lineno = str(entry.pos[1])
attrs = dict(
name=entry.name,
cname=cname,
qualified_name=qname,
type=vartype,
lineno=lineno)
self.tb.start('LocalVar', attrs)
self.tb.end('LocalVar')
| bsd-3-clause |
dsiddharth/access-keys | keystone/tests/_sql_livetest.py | 8 | 1451 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone import config
from keystone.tests import test_sql_upgrade
CONF = config.CONF
class PostgresqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
def config_files(self):
files = (test_sql_upgrade.SqlUpgradeTests.
_config_file_list[:])
files.append("backend_postgresql.conf")
return files
class MysqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
def config_files(self):
files = (test_sql_upgrade.SqlUpgradeTests.
_config_file_list[:])
files.append("backend_mysql.conf")
return files
class Db2MigrateTests(test_sql_upgrade.SqlUpgradeTests):
def config_files(self):
files = (test_sql_upgrade.SqlUpgradeTests.
_config_file_list[:])
files.append("backend_db2.conf")
return files
| apache-2.0 |
SmartPeople/zulip | zerver/lib/url_preview/preview.py | 27 | 2235 | from __future__ import absolute_import
import re
import logging
import traceback
from typing import Any, Optional, Text
from typing.re import Match
import requests
from zerver.lib.cache import cache_with_key, get_cache_with_key
from zerver.lib.url_preview.oembed import get_oembed_data
from zerver.lib.url_preview.parsers import OpenGraphParser, GenericParser
CACHE_NAME = "database"
# Based on django.core.validators.URLValidator, with ftp support removed.
link_regex = re.compile(
r'^(?:http)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def is_link(url):
# type: (Text) -> Match[Text]
return link_regex.match(str(url))
def cache_key_func(url):
# type: (Text) -> Text
return url
@cache_with_key(cache_key_func, cache_name=CACHE_NAME, with_statsd_key="urlpreview_data")
def get_link_embed_data(url, maxwidth=640, maxheight=480):
# type: (Text, Optional[int], Optional[int]) -> Any
if not is_link(url):
return None
# Fetch information from URL.
# We are using three sources in next order:
# 1. OEmbed
# 2. Open Graph
# 3. Meta tags
try:
data = get_oembed_data(url, maxwidth=maxwidth, maxheight=maxheight)
except requests.exceptions.RequestException:
msg = 'Unable to fetch information from url {0}, traceback: {1}'
logging.error(msg.format(url, traceback.format_exc()))
return None
data = data or {}
response = requests.get(url)
if response.ok:
og_data = OpenGraphParser(response.text).extract_data()
if og_data:
data.update(og_data)
generic_data = GenericParser(response.text).extract_data() or {}
for key in ['title', 'description', 'image']:
if not data.get(key) and generic_data.get(key):
data[key] = generic_data[key]
return data
@get_cache_with_key(cache_key_func, cache_name=CACHE_NAME)
def link_embed_data_from_cache(url, maxwidth=640, maxheight=480):
# type: (Text, Optional[int], Optional[int]) -> Any
return
| apache-2.0 |
nagyistoce/electron-atom-shell | script/upload.py | 5 | 5904 | #!/usr/bin/env python
import argparse
import errno
import os
import subprocess
import sys
import tempfile
from lib.config import DIST_ARCH, TARGET_PLATFORM
from lib.util import execute, get_atom_shell_version, parse_version, \
get_chromedriver_version, scoped_cwd
from lib.github import GitHub
ATOM_SHELL_REPO = 'atom/atom-shell'
ATOM_SHELL_VERSION = get_atom_shell_version()
CHROMEDRIVER_VERSION = get_chromedriver_version()
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'Release')
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
DIST_NAME = 'atom-shell-{0}-{1}-{2}.zip'.format(ATOM_SHELL_VERSION,
TARGET_PLATFORM,
DIST_ARCH)
SYMBOLS_NAME = 'atom-shell-{0}-{1}-{2}-symbols.zip'.format(ATOM_SHELL_VERSION,
TARGET_PLATFORM,
DIST_ARCH)
CHROMEDRIVER_NAME = 'chromedriver-{0}-{1}-{2}.zip'.format(CHROMEDRIVER_VERSION,
TARGET_PLATFORM,
DIST_ARCH)
def main():
args = parse_args()
if not dist_newer_than_head():
create_dist = os.path.join(SOURCE_ROOT, 'script', 'create-dist.py')
execute([sys.executable, create_dist])
build_version = get_atom_shell_build_version()
if not ATOM_SHELL_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ATOM_SHELL_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
# Upload atom-shell with GitHub Releases API.
github = GitHub(auth_token())
release_id = create_or_get_release_draft(github, args.version)
upload_atom_shell(github, release_id, os.path.join(DIST_DIR, DIST_NAME))
upload_atom_shell(github, release_id, os.path.join(DIST_DIR, SYMBOLS_NAME))
# Upload chromedriver for minor version update.
if parse_version(args.version)[2] == '0':
upload_atom_shell(github, release_id,
os.path.join(DIST_DIR, CHROMEDRIVER_NAME))
if args.publish_release:
if TARGET_PLATFORM == 'win32':
# Upload PDBs to Windows symbol server.
execute([sys.executable,
os.path.join(SOURCE_ROOT, 'script', 'upload-windows-pdb.py')])
# Upload node headers.
execute([sys.executable,
os.path.join(SOURCE_ROOT, 'script', 'upload-node-headers.py'),
'-v', ATOM_SHELL_VERSION])
# Press the publish button.
publish_release(github, release_id)
def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ATOM_SHELL_VERSION)
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
return parser.parse_args()
def get_atom_shell_build_version():
if TARGET_PLATFORM == 'darwin':
atom_shell = os.path.join(SOURCE_ROOT, 'out', 'Release', 'Atom.app',
'Contents', 'MacOS', 'Atom')
elif TARGET_PLATFORM == 'win32':
atom_shell = os.path.join(SOURCE_ROOT, 'out', 'Release', 'atom.exe')
else:
atom_shell = os.path.join(SOURCE_ROOT, 'out', 'Release', 'atom')
return subprocess.check_output([atom_shell, '--version']).strip()
def dist_newer_than_head():
with scoped_cwd(SOURCE_ROOT):
try:
head_time = subprocess.check_output(['git', 'log', '--pretty=format:%at',
'-n', '1']).strip()
dist_time = os.path.getmtime(os.path.join(DIST_DIR, DIST_NAME))
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, tag):
name = 'atom-shell %s' % tag
releases = github.repos(ATOM_SHELL_REPO).releases.get()
for release in releases:
# The untagged commit doesn't have a matching tag_name, so also check name.
if release['tag_name'] == tag or release['name'] == name:
return release['id']
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = 'atom-shell %s' % tag
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True)
r = github.repos(ATOM_SHELL_REPO).releases.post(data=data)
return r['id']
def upload_atom_shell(github, release_id, file_path):
params = {'name': os.path.basename(file_path)}
headers = {'Content-Type': 'application/zip'}
with open(file_path, 'rb') as f:
github.repos(ATOM_SHELL_REPO).releases(release_id).assets.post(
params=params, headers=headers, data=f, verify=False)
def publish_release(github, release_id):
data = dict(draft=False)
github.repos(ATOM_SHELL_REPO).releases(release_id).patch(data=data)
def auth_token():
token = os.environ.get('ATOM_SHELL_GITHUB_TOKEN')
message = ('Error: Please set the $ATOM_SHELL_GITHUB_TOKEN '
'environment variable, which is your personal token')
assert token, message
return token
if __name__ == '__main__':
import sys
sys.exit(main())
| mit |
aioue/ansible | lib/ansible/module_utils/facts/network/freebsd.py | 232 | 1190 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.network.generic_bsd import GenericBsdIfconfigNetwork
class FreeBSDNetwork(GenericBsdIfconfigNetwork):
"""
This is the FreeBSD Network Class.
It uses the GenericBsdIfconfigNetwork unchanged.
"""
platform = 'FreeBSD'
class FreeBSDNetworkCollector(NetworkCollector):
_fact_class = FreeBSDNetwork
_platform = 'FreeBSD'
| gpl-3.0 |
Cheridan/-tg-station | tools/mapmerge/maintloot.py | 118 | 1940 | #!/usr/bin/env python3
import argparse
import collections
import re
from map_helpers import parse_map
if __name__=='__main__':
parser = argparse.ArgumentParser()
parser.add_argument("mapfile")
args = parser.parse_args()
M = parse_map(args.mapfile)
# Format of this map parsing
# dict(coordinates->mapkey)
grid = M["grid"]
# dict(mapkey->tilecontents)
dictionary = M["dictionary"]
# tilecontents are a list of atoms, path is first
lootdrop_path = "/obj/effect/spawner/lootdrop/maintenance"
area_path = "/area"
follow_up = set()
for key, atoms in dictionary.items():
#atom is a string
for atom in atoms:
if atom.startswith(lootdrop_path):
if(key in follow_up):
print("Hey, '{}' has multiple maintlootdrops...")
follow_up.add(key)
# Count the number of times each map key appears
appears = collections.Counter()
for coord, key in grid.items():
if key in follow_up:
appears[key] += 1
tally = collections.Counter()
for key in follow_up:
# Because I am a terrible person, and don't actually care about
# building a proper parser for this "object notation" that byond
# uses, I'm just going to cheat.
area = None
count = 0
for atom in dictionary[key]:
if atom.startswith(lootdrop_path):
amount = 1
mo = re.search(r'lootcount = (\d+)', atom)
if mo is not None:
amount = int(mo.group(1))
count += amount
elif atom.startswith(area_path):
area = atom
# Multiply by the number of times this model is used
tally[area] += (count * appears[key])
for area, total in tally.items():
print("{}: {}".format(area, total))
print("TOTAL: {}".format(sum(tally.values())))
| agpl-3.0 |
t-hey/QGIS-Original | python/plugins/processing/algs/qgis/TinInterpolation.py | 2 | 8218 | # -*- coding: utf-8 -*-
"""
***************************************************************************
TinInterpolation.py
---------------------
Date : October 2016
Copyright : (C) 2016 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'October 2016'
__copyright__ = '(C) 2016, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsProcessingUtils,
QgsProcessing,
QgsProcessingParameterDefinition,
QgsProcessingParameterEnum,
QgsProcessingParameterNumber,
QgsProcessingParameterExtent,
QgsProcessingParameterRasterDestination,
QgsWkbTypes,
QgsProcessingParameterFeatureSink,
QgsProcessingException,
QgsCoordinateReferenceSystem)
from qgis.analysis import (QgsInterpolator,
QgsTinInterpolator,
QgsGridFileWriter)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class ParameterInterpolationData(QgsProcessingParameterDefinition):
def __init__(self, name='', description=''):
super().__init__(name, description)
self.setMetadata({
'widget_wrapper': 'processing.algs.qgis.ui.InterpolationDataWidget.InterpolationDataWidgetWrapper'
})
def type(self):
return 'tin_interpolation_data'
def clone(self):
return ParameterInterpolationData(self.name(), self.description())
@staticmethod
def parseValue(value):
if value is None:
return None
if value == '':
return None
if isinstance(value, str):
return value if value != '' else None
else:
return ParameterInterpolationData.dataToString(value)
@staticmethod
def dataToString(data):
s = ''
for c in data:
s += '{}, {}, {:d}, {:d};'.format(c[0],
c[1],
c[2],
c[3])
return s[:-1]
class TinInterpolation(QgisAlgorithm):
INTERPOLATION_DATA = 'INTERPOLATION_DATA'
METHOD = 'METHOD'
COLUMNS = 'COLUMNS'
ROWS = 'ROWS'
EXTENT = 'EXTENT'
OUTPUT = 'OUTPUT'
TRIANGULATION = 'TRIANGULATION'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'interpolation.png'))
def group(self):
return self.tr('Interpolation')
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.METHODS = [self.tr('Linear'),
self.tr('Clough-Toucher (cubic)')
]
self.addParameter(ParameterInterpolationData(self.INTERPOLATION_DATA,
self.tr('Input layer(s)')))
self.addParameter(QgsProcessingParameterEnum(self.METHOD,
self.tr('Interpolation method'),
options=self.METHODS,
defaultValue=0))
self.addParameter(QgsProcessingParameterNumber(self.COLUMNS,
self.tr('Number of columns'),
minValue=0, maxValue=10000000, defaultValue=300))
self.addParameter(QgsProcessingParameterNumber(self.ROWS,
self.tr('Number of rows'),
minValue=0, maxValue=10000000, defaultValue=300))
self.addParameter(QgsProcessingParameterExtent(self.EXTENT,
self.tr('Extent'),
optional=False))
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Interpolated')))
triangulation_file_param = QgsProcessingParameterFeatureSink(self.TRIANGULATION,
self.tr('Triangulation'),
type=QgsProcessing.TypeVectorLine,
optional=True)
triangulation_file_param.setCreateByDefault(False)
self.addParameter(triangulation_file_param)
def name(self):
return 'tininterpolation'
def displayName(self):
return self.tr('TIN interpolation')
def processAlgorithm(self, parameters, context, feedback):
interpolationData = ParameterInterpolationData.parseValue(parameters[self.INTERPOLATION_DATA])
method = self.parameterAsEnum(parameters, self.METHOD, context)
columns = self.parameterAsInt(parameters, self.COLUMNS, context)
rows = self.parameterAsInt(parameters, self.ROWS, context)
bbox = self.parameterAsExtent(parameters, self.EXTENT, context)
output = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
if interpolationData is None:
raise QgsProcessingException(
self.tr('You need to specify at least one input layer.'))
layerData = []
layers = []
crs = QgsCoordinateReferenceSystem()
for row in interpolationData.split(';'):
v = row.split(',')
data = QgsInterpolator.LayerData()
# need to keep a reference until interpolation is complete
layer = QgsProcessingUtils.variantToSource(v[0], context)
data.source = layer
layers.append(layer)
if not crs.isValid():
crs = layer.sourceCrs()
data.valueSource = int(v[1])
data.interpolationAttribute = int(v[2])
if v[3] == '0':
data.sourceType = QgsInterpolator.SourcePoints
elif v[3] == '1':
data.sourceType = QgsInterpolator.SourceStructureLines
else:
data.sourceType = QgsInterpolator.SourceBreakLines
layerData.append(data)
if method == 0:
interpolationMethod = QgsTinInterpolator.Linear
else:
interpolationMethod = QgsTinInterpolator.CloughTocher
(triangulation_sink, triangulation_dest_id) = self.parameterAsSink(parameters, self.TRIANGULATION, context,
QgsTinInterpolator.triangulationFields(), QgsWkbTypes.LineString, crs)
interpolator = QgsTinInterpolator(layerData, interpolationMethod, feedback)
if triangulation_sink is not None:
interpolator.setTriangulationSink(triangulation_sink)
writer = QgsGridFileWriter(interpolator,
output,
bbox,
columns,
rows)
writer.writeFile(feedback)
return {self.OUTPUT: output, self.TRIANGULATION: triangulation_dest_id}
| gpl-2.0 |
palashahuja/pgmpy | pgmpy/models/ClusterGraph.py | 2 | 9575 | #!/usr/bin/env python3
from collections import defaultdict
import numpy as np
from pgmpy.base import UndirectedGraph
from pgmpy.exceptions import CardinalityError
from pgmpy.factors import factor_product
class ClusterGraph(UndirectedGraph):
r"""
Base class for representing Cluster Graph.
Cluster graph is an undirected graph which is associated with a subset of variables. The graph contains undirected
edges that connects clusters whose scopes have a non-empty intersection.
Formally, a cluster graph is :math:`\mathcal{U}` for a set of factors :math:`\Phi` over :math:`\mathcal{X}` is an
undirected graph, each of whose nodes :math:`i` is associated with a subset :math:`C_i \subseteq X`. A cluster
graph must be family-preserving - each factor :math:`\phi \in \Phi` must be associated with a cluster C, denoted
:math:`\alpha(\phi)`, such that :math:`Scope[\phi] \subseteq C_i`. Each edge between a pair of clusters :math:`C_i`
and :math:`C_j` is associated with a sepset :math:`S_{i,j} \subseteq C_i \cap C_j`.
Parameters
----------
data: input graph
Data to initialize graph. If data=None (default) an empty graph is created. The data is an edge list
Examples
--------
Create an empty ClusterGraph with no nodes and no edges
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
G can be grown by adding clique nodes.
**Nodes:**
Add a tuple (or list or set) of nodes as single clique node.
>>> G.add_node(('a', 'b', 'c'))
>>> G.add_nodes_from([('a', 'b'), ('a', 'b', 'c')])
**Edges:**
G can also be grown by adding edges.
>>> G.add_edge(('a', 'b', 'c'), ('a', 'b'))
or a list of edges
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
"""
def __init__(self, ebunch=None):
super().__init__()
if ebunch:
self.add_edges_from(ebunch)
self.factors = []
self.cardinalities = defaultdict(int)
def add_node(self, node, **kwargs):
"""
Add a single node to the cluster graph.
Parameters
----------
node: node
A node should be a collection of nodes forming a clique. It can be
a list, set or tuple of nodes
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
>>> G.add_node(('a', 'b', 'c'))
"""
if not isinstance(node, (list, set, tuple)):
raise TypeError('Node can only be a list, set or tuple of nodes forming a clique')
node = tuple(node)
super().add_node(node, **kwargs)
def add_nodes_from(self, nodes, **kwargs):
"""
Add multiple nodes to the cluster graph.
Parameters
----------
nodes: iterable container
A container of nodes (list, dict, set, etc.).
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b'), ('a', 'b', 'c')])
"""
for node in nodes:
self.add_node(node, **kwargs)
def add_edge(self, u, v, **kwargs):
"""
Add an edge between two clique nodes.
Parameters
----------
u, v: nodes
Nodes can be any list or set or tuple of nodes forming a clique.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')])
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
"""
set_u = set(u)
set_v = set(v)
if set_u.isdisjoint(set_v):
raise ValueError('No sepset found between these two edges.')
super().add_edge(u, v)
def add_factors(self, *factors):
"""
Associate a factor to the graph.
See factors class for the order of potential values
Parameters
----------
*factor: pgmpy.factors.factors object
A factor object on any subset of the variables of the model which
is to be associated with the model.
Returns
-------
None
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors import Factor
>>> student = ClusterGraph()
>>> student.add_node(('Alice', 'Bob'))
>>> factor = Factor(['Alice', 'Bob'], cardinality=[3, 2],
... value=np.random.rand(6))
>>> student.add_factors(factor)
"""
for factor in factors:
factor_scope = set(factor.scope())
nodes = [set(node) for node in self.nodes()]
if factor_scope not in nodes:
raise ValueError('Factors defined on clusters of variable not'
'present in model')
self.factors.append(factor)
def get_factors(self, node=None):
"""
Return the factors that have been added till now to the graph.
If node is not None, it would return the factor corresponding to the
given node.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors import Factor
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')])
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
>>> phi1 = Factor(['a', 'b', 'c'], [2, 2, 2], np.random.rand(8))
>>> phi2 = Factor(['a', 'b'], [2, 2], np.random.rand(4))
>>> phi3 = Factor(['a', 'c'], [2, 2], np.random.rand(4))
>>> G.add_factors(phi1, phi2, phi3)
>>> G.get_factors()
>>> G.get_factors(node=('a', 'b', 'c'))
"""
if node is None:
return self.factors
else:
nodes = [set(node) for node in self.nodes()]
if set(node) not in nodes:
raise ValueError('Node not present in Cluster Graph')
factors = filter(lambda x: set(x.scope()) == set(node), self.factors)
return next(factors)
def remove_factors(self, *factors):
"""
Removes the given factors from the added factors.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors import Factor
>>> student = ClusterGraph()
>>> factor = Factor(['Alice', 'Bob'], cardinality=[2, 2],
... value=np.random.rand(4))
>>> student.add_factors(factor)
>>> student.remove_factors(factor)
"""
for factor in factors:
self.factors.remove(factor)
def get_partition_function(self):
r"""
Returns the partition function for a given undirected graph.
A partition function is defined as
.. math:: \sum_{X}(\prod_{i=1}^{m} \phi_i)
where m is the number of factors present in the graph
and X are all the random variables present.
Examples
--------
>>> from pgmpy.models import ClusterGraph
>>> from pgmpy.factors import Factor
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b', 'c'), ('a', 'b'), ('a', 'c')])
>>> G.add_edges_from([(('a', 'b', 'c'), ('a', 'b')),
... (('a', 'b', 'c'), ('a', 'c'))])
>>> phi1 = Factor(['a', 'b', 'c'], [2, 2, 2], np.random.rand(8))
>>> phi2 = Factor(['a', 'b'], [2, 2], np.random.rand(4))
>>> phi3 = Factor(['a', 'c'], [2, 2], np.random.rand(4))
>>> G.add_factors(phi1, phi2, phi3)
>>> G.get_partition_function()
"""
if self.check_model():
factor = self.factors[0]
factor = factor_product(factor, *[self.factors[i] for i in range(1, len(self.factors))])
return np.sum(factor.values)
def check_model(self):
"""
Check the model for various errors. This method checks for the following
errors. In the same time also updates the cardinalities of all the random
variables.
* Checks if clique potentials are defined for all the cliques or not.
* Check for running intersection property is not done explicitly over
here as it done in the add_edges method.
Returns
-------
check: boolean
True if all the checks are passed
"""
for clique in self.nodes():
if self.get_factors(clique):
pass
else:
raise ValueError('Factors for all the cliques or clusters not'
'defined.')
if len(self.factors) != len(self.nodes()):
raise ValueError('One to one mapping of factor to clique or cluster'
'is not there.')
for factor in self.factors:
for variable, cardinality in zip(factor.scope(), factor.cardinality):
if ((self.cardinalities[variable]) and
(self.cardinalities[variable] != cardinality)):
raise CardinalityError(
'Cardinality of variable %s not matching among factors' % variable)
else:
self.cardinalities[variable] = cardinality
return True
| mit |
zhoffice/minos | owl/machine/management/commands/import_xman.py | 5 | 2543 | import csv
import logging
import json
import sys
import urllib2
from django.conf import settings
from django.core.management.base import BaseCommand
from machine.models import Machine
logger = logging.getLogger(__name__)
XMAN_URL = "http://10.180.2.243/api/hostinfo.php?sql=hostname+=+'%s'"
IDC_ABBR = {
'shangdi': 'sd',
'lugu': 'lg',
'lugu6': 'lg',
'haihang': 'hh',
'wucaicheng': 'dp',
}
class Command(BaseCommand):
def handle(self, *args, **options):
changes = []
for machine in Machine.objects.order_by('hostname'):
hostname = machine.hostname
url = XMAN_URL % hostname
data = json.load(urllib2.urlopen(url))
xman = {}
if data and type(data) is dict:
k, v = data.popitem()
if v and type(v) is dict:
try:
xman = {
'ip': v['ipaddr'],
'idc': IDC_ABBR[v['site'].lower()],
'rack': v['location'].lower(),
}
except Exception as e:
print 'Error on host: %s' % hostname
raise
if not xman:
# the machine doesn't exist in xman, delete it later.
changes.append((machine, xman, ))
else:
# check if any field changed.
# can't use iteritems as the dict might change.
for k, v in xman.items():
if getattr(machine, k) == v:
del xman[k]
if xman:
# some fields changed.
changes.append((machine, xman, ))
if not changes:
print 'Nothing updated from xman, exiting.'
else:
print 'All changes from xman:'
for machine, xman in changes:
self.print_change(machine, xman)
print
print 'Confirm following changes...'
answer = None
for machine, xman in changes:
self.print_change(machine, xman)
while answer != 'a':
answer = raw_input('Apply this or all following change[s]? '
'<y[es]/n[o]/a[ll]>: ')
if answer in ['y', 'n', 'a']: break
if answer == 'n': continue
# apply change
self.apply_change(machine, xman)
def print_change(self, machine, xman):
if not xman:
action = 'host deleted'
else:
action = ', '.join(['%s: %s ==> %s' % (k, getattr(machine, k), v)
for k, v in xman.iteritems()])
print '%s: %s' % (machine.hostname, action)
def apply_change(self, machine, xman):
if not xman:
machine.delete()
else:
for k, v in xman.iteritems():
setattr(machine, k, v)
machine.save()
| apache-2.0 |
NcLang/vimrc | sources_non_forked/YouCompleteMe/third_party/ycmd/ycmd/hmac_utils.py | 5 | 3286 | # Copyright (C) 2015 Google Inc.
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import * # noqa
from builtins import bytes
import hmac
import hashlib
def CreateHmac( content, hmac_secret ):
# Note that py2's str type passes this check (and that's ok)
if not isinstance( content, bytes ):
raise TypeError( 'content was not of bytes type; you have a bug!' )
if not isinstance( hmac_secret, bytes ):
raise TypeError( 'hmac_secret was not of bytes type; you have a bug!' )
return bytes( hmac.new( hmac_secret,
msg = content,
digestmod = hashlib.sha256 ).digest() )
def CreateRequestHmac( method, path, body, hmac_secret ):
# Note that py2's str type passes this check (and that's ok)
if not isinstance( body, bytes ):
raise TypeError( 'body was not of bytes type; you have a bug!' )
if not isinstance( hmac_secret, bytes ):
raise TypeError( 'hmac_secret was not of bytes type; you have a bug!' )
if not isinstance( method, bytes ):
raise TypeError( 'method was not of bytes type; you have a bug!' )
if not isinstance( path, bytes ):
raise TypeError( 'path was not of bytes type; you have a bug!' )
method_hmac = CreateHmac( method, hmac_secret )
path_hmac = CreateHmac( path, hmac_secret )
body_hmac = CreateHmac( body, hmac_secret )
joined_hmac_input = bytes().join( ( method_hmac, path_hmac, body_hmac ) )
return CreateHmac( joined_hmac_input, hmac_secret )
# This is the compare_digest function from python 3.4
# http://hg.python.org/cpython/file/460407f35aa9/Lib/hmac.py#l16
def SecureBytesEqual( a, b ):
"""Returns the equivalent of 'a == b', but avoids content based short
circuiting to reduce the vulnerability to timing attacks."""
# Consistent timing matters more here than data type flexibility
# We do NOT want to support py2's str type because iterating over them
# (below) produces different results.
if type( a ) != bytes or type( b ) != bytes:
raise TypeError( "inputs must be bytes instances" )
# We assume the length of the expected digest is public knowledge,
# thus this early return isn't leaking anything an attacker wouldn't
# already know
if len( a ) != len( b ):
return False
# We assume that integers in the bytes range are all cached,
# thus timing shouldn't vary much due to integer object creation
result = 0
for x, y in zip( a, b ):
result |= x ^ y
return result == 0
| mit |
p0deje/selenium | py/selenium/webdriver/common/proxy.py | 71 | 10352 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
The Proxy implementation.
"""
class ProxyTypeFactory:
"""
Factory for proxy types.
"""
@staticmethod
def make(ff_value, string):
return {'ff_value': ff_value, 'string': string}
class ProxyType:
"""
Set of possible types of proxy.
Each proxy type has 2 properties:
'ff_value' is value of Firefox profile preference,
'string' is id of proxy type.
"""
DIRECT = ProxyTypeFactory.make(0, 'DIRECT') # Direct connection, no proxy (default on Windows).
MANUAL = ProxyTypeFactory.make(1, 'MANUAL') # Manual proxy settings (e.g., for httpProxy).
PAC = ProxyTypeFactory.make(2, 'PAC') # Proxy autoconfiguration from URL.
RESERVED_1 = ProxyTypeFactory.make(3, 'RESERVED1') # Never used.
AUTODETECT = ProxyTypeFactory.make(4, 'AUTODETECT') # Proxy autodetection (presumably with WPAD).
SYSTEM = ProxyTypeFactory.make(5, 'SYSTEM') # Use system settings (default on Linux).
UNSPECIFIED = ProxyTypeFactory.make(6, 'UNSPECIFIED') # Not initialized (for internal use).
@classmethod
def load(cls, value):
if isinstance(value, dict) and 'string' in value:
value = value['string']
value = str(value).upper()
for attr in dir(cls):
attr_value = getattr(cls, attr)
if isinstance(attr_value, dict) and 'string' in attr_value and \
attr_value['string'] is not None and attr_value['string'] == value:
return attr_value
raise Exception("No proxy type is found for %s" % (value))
class Proxy(object):
"""
Proxy contains information about proxy type and necessary proxy settings.
"""
proxyType = ProxyType.UNSPECIFIED
autodetect = False
ftpProxy = ''
httpProxy = ''
noProxy = ''
proxyAutoconfigUrl = ''
sslProxy = ''
socksProxy = ''
socksUsername = ''
socksPassword = ''
def __init__(self, raw=None):
"""
Creates a new Proxy.
:Args:
- raw: raw proxy data. If None, default class values are used.
"""
if raw is not None:
if 'proxyType' in raw and raw['proxyType'] is not None:
self.proxy_type = ProxyType.load(raw['proxyType'])
if 'ftpProxy' in raw and raw['ftpProxy'] is not None:
self.ftp_proxy = raw['ftpProxy']
if 'httpProxy' in raw and raw['httpProxy'] is not None:
self.http_proxy = raw['httpProxy']
if 'noProxy' in raw and raw['noProxy'] is not None:
self.no_proxy = raw['noProxy']
if 'proxyAutoconfigUrl' in raw and raw['proxyAutoconfigUrl'] is not None:
self.proxy_autoconfig_url = raw['proxyAutoconfigUrl']
if 'sslProxy' in raw and raw['sslProxy'] is not None:
self.sslProxy = raw['sslProxy']
if 'autodetect' in raw and raw['autodetect'] is not None:
self.auto_detect = raw['autodetect']
if 'socksProxy' in raw and raw['socksProxy'] is not None:
self.socksProxy = raw['socksProxy']
if 'socksUsername' in raw and raw['socksUsername'] is not None:
self.socksUsername = raw['socksUsername']
if 'socksPassword' in raw and raw['socksPassword'] is not None:
self.socksPassword = raw['socksPassword']
@property
def proxy_type(self):
"""
Returns proxy type as `ProxyType`.
"""
return self.proxyType
@proxy_type.setter
def proxy_type(self, value):
"""
Sets proxy type.
:Args:
- value: The proxy type.
"""
self._verify_proxy_type_compatibility(value)
self.proxyType = value
@property
def auto_detect(self):
"""
Returns autodetect setting.
"""
return self.autodetect
@auto_detect.setter
def auto_detect(self, value):
"""
Sets autodetect setting.
:Args:
- value: The autodetect value.
"""
if isinstance(value, bool):
if self.autodetect is not value:
self._verify_proxy_type_compatibility(ProxyType.AUTODETECT)
self.proxyType = ProxyType.AUTODETECT
self.autodetect = value
else:
raise ValueError("Autodetect proxy value needs to be a boolean")
@property
def ftp_proxy(self):
"""
Returns ftp proxy setting.
"""
return self.ftpProxy
@ftp_proxy.setter
def ftp_proxy(self, value):
"""
Sets ftp proxy setting.
:Args:
- value: The ftp proxy value.
"""
self._verify_proxy_type_compatibility(ProxyType.MANUAL)
self.proxyType = ProxyType.MANUAL
self.ftpProxy = value
@property
def http_proxy(self):
"""
Returns http proxy setting.
"""
return self.httpProxy
@http_proxy.setter
def http_proxy(self, value):
"""
Sets http proxy setting.
:Args:
- value: The http proxy value.
"""
self._verify_proxy_type_compatibility(ProxyType.MANUAL)
self.proxyType = ProxyType.MANUAL
self.httpProxy = value
@property
def no_proxy(self):
"""
Returns noproxy setting.
"""
return self.noProxy
@no_proxy.setter
def no_proxy(self, value):
"""
Sets noproxy setting.
:Args:
- value: The noproxy value.
"""
self._verify_proxy_type_compatibility(ProxyType.MANUAL)
self.proxyType = ProxyType.MANUAL
self.noProxy = value
@property
def proxy_autoconfig_url(self):
"""
Returns proxy autoconfig url setting.
"""
return self.proxyAutoconfigUrl
@proxy_autoconfig_url.setter
def proxy_autoconfig_url(self, value):
"""
Sets proxy autoconfig url setting.
:Args:
- value: The proxy autoconfig url value.
"""
self._verify_proxy_type_compatibility(ProxyType.PAC)
self.proxyType = ProxyType.PAC
self.proxyAutoconfigUrl = value
@property
def ssl_proxy(self):
"""
Returns https proxy setting.
"""
return self.sslProxy
@ssl_proxy.setter
def ssl_proxy(self, value):
"""
Sets https proxy setting.
:Args:
- value: The https proxy value.
"""
self._verify_proxy_type_compatibility(ProxyType.MANUAL)
self.proxyType = ProxyType.MANUAL
self.sslProxy = value
@property
def socks_proxy(self):
"""
Returns socks proxy setting.
"""
return self.socksProxy
@socks_proxy.setter
def socks_proxy(self, value):
"""
Sets socks proxy setting.
:Args:
- value: The socks proxy value.
"""
self._verify_proxy_type_compatibility(ProxyType.MANUAL)
self.proxyType = ProxyType.MANUAL
self.socksProxy = value
@property
def socks_username(self):
"""
Returns socks proxy username setting.
"""
return self.socksUsername
@socks_username.setter
def socks_username(self, value):
"""
Sets socks proxy username setting.
:Args:
- value: The socks proxy username value.
"""
self._verify_proxy_type_compatibility(ProxyType.MANUAL)
self.proxyType = ProxyType.MANUAL
self.socksUsername = value
@property
def socks_password(self):
"""
Returns socks proxy password setting.
"""
return self.socksPassword
@socks_password.setter
def socks_password(self, value):
"""
Sets socks proxy password setting.
:Args:
- value: The socks proxy password value.
"""
self._verify_proxy_type_compatibility(ProxyType.MANUAL)
self.proxyType = ProxyType.MANUAL
self.socksPassword = value
def _verify_proxy_type_compatibility(self, compatibleProxy):
if self.proxyType != ProxyType.UNSPECIFIED and self.proxyType != compatibleProxy:
raise Exception(" Specified proxy type (%s) not compatible with current setting (%s)" % \
(compatibleProxy, self.proxyType))
def add_to_capabilities(self, capabilities):
"""
Adds proxy information as capability in specified capabilities.
:Args:
- capabilities: The capabilities to which proxy will be added.
"""
proxy_caps = {}
proxy_caps['proxyType'] = self.proxyType['string']
if self.autodetect:
proxy_caps['autodetect'] = self.autodetect
if self.ftpProxy:
proxy_caps['ftpProxy'] = self.ftpProxy
if self.httpProxy:
proxy_caps['httpProxy'] = self.httpProxy
if self.proxyAutoconfigUrl:
proxy_caps['proxyAutoconfigUrl'] = self.proxyAutoconfigUrl
if self.sslProxy:
proxy_caps['sslProxy'] = self.sslProxy
if self.noProxy:
proxy_caps['noProxy'] = self.noProxy
if self.socksProxy:
proxy_caps['socksProxy'] = self.socksProxy
if self.socksUsername:
proxy_caps['socksUsername'] = self.socksUsername
if self.socksPassword:
proxy_caps['socksPassword'] = self.socksPassword
capabilities['proxy'] = proxy_caps
| apache-2.0 |
mancoast/CPythonPyc_test | cpython/279_test_compare.py | 194 | 1488 | import unittest
from test import test_support
class Empty:
def __repr__(self):
return '<Empty>'
class Coerce:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return '<Coerce %s>' % self.arg
def __coerce__(self, other):
if isinstance(other, Coerce):
return self.arg, other.arg
else:
return self.arg, other
class Cmp:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<Cmp %s>' % self.arg
def __cmp__(self, other):
return cmp(self.arg, other)
class ComparisonTest(unittest.TestCase):
set1 = [2, 2.0, 2L, 2+0j, Coerce(2), Cmp(2.0)]
set2 = [[1], (3,), None, Empty()]
candidates = set1 + set2
def test_comparisons(self):
for a in self.candidates:
for b in self.candidates:
if ((a in self.set1) and (b in self.set1)) or a is b:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
def test_id_comparisons(self):
# Ensure default comparison compares id() of args
L = []
for i in range(10):
L.insert(len(L)//2, Empty())
for a in L:
for b in L:
self.assertEqual(cmp(a, b), cmp(id(a), id(b)),
'a=%r, b=%r' % (a, b))
def test_main():
test_support.run_unittest(ComparisonTest)
if __name__ == '__main__':
test_main()
| gpl-3.0 |
npapak/NXT_Labyrinth | toolchain/libnxt-0.3/make_flash_header.py | 3 | 3024 | #!/usr/bin/env python
#
# Take the flash_routine.bin file, and embed it as an array of bytes
# in a flash_routine.h, ready for packaging with the C firmware
# flasher.
#
# If a file name is provided on the commandline, load that file as the
# firmware flashing routine instead.
#
import sys
import os
import os.path
import urllib2
import sha
FLASH_DIR = 'flash_write'
FLASH_BIN = 'flash.bin'
FLASH_PATH = os.path.join(FLASH_DIR, FLASH_BIN)
DOWNLOAD_FLASH_CHECKSUM = '589501072d76be483f873a787080adcab20841f4'
DOWNLOAD_FLASH_URL = 'http://libnxt.googlecode.com/files/flash.bin'
def check_flash_size():
statinfo = os.stat(FLASH_PATH)
if statinfo.st_size > 1024:
print "The flash driver looks too big, refusing to embed."
return False
return True
def ensure_flash_bin():
# If the flash binary does exist, just check its size and return.
if os.path.isfile(FLASH_PATH):
return check_flash_size()
# If the binary doesn't exist, offer to download a binary build
# from Google Code.
print """
Embedded flash driver not found. This is required to build LibNXT.
If you have an ARM7 cross-compiler toolchain available, you can build
the flash driver by interrupting (ctrl-C) this build and running
'make' in the 'flash_write' subdirectory. Then rerun this build again,
and everything should work great.
If you do not have a cross-compiler, do not despair! I can also
download a copy of the compiled driver (built by the libnxt developer)
from the project website and use that.
"""
reply = raw_input("Is that okay? (y/n) ")
if reply not in ('y', 'Y', 'yes'):
print ("Okay, you're the boss. But that does mean I can't build "
"LibNXT. Sorry.")
return False
f = urllib2.urlopen(DOWNLOAD_FLASH_URL)
data = f.read()
f.close()
# Verify the SHA-1 checksum
checksum = sha.new(data).hexdigest()
if checksum != DOWNLOAD_FLASH_CHECKSUM:
print "Oops, the flash binary I downloaded has the wrong checksum!"
print "Aborting :("
return False
f = open(FLASH_PATH, 'w')
f.write(data)
f.close()
if os.path.isfile(FLASH_PATH):
return check_flash_size()
# Dude, you're really not lucky, are you.
return False
def main():
if not ensure_flash_bin():
sys.exit(1)
f = file(FLASH_PATH)
fwbin = f.read()
f.close()
data = ['0x%s' % c.encode('hex') for c in fwbin]
for i in range(0, len(data), 12):
data[i] = "\n " + data[i]
data_str = ', '.join(data)
len_data = "%d" % len(data)
# Read in the template
tplfile = file('flash_routine.h.base')
template = tplfile.read()
tplfile.close()
# Replace the values in the template
template = template.replace('___FLASH_BIN___', data_str + '\n')
template = template.replace('___FLASH_LEN___', len_data)
# Output the done header
out = file('flash_routine.h', 'w')
out.write(template)
out.close()
if __name__ == '__main__':
main()
| gpl-3.0 |
tenstream/tenstream | plexrt/gen_lwc_inp.py | 1 | 5860 | from pylab import *
import netCDF4 as NC
import sys
def get_z_grid(): # grid info from: https://code.mpimet.mpg.de/projects/icon-lem/wiki/Short_model_description
dz = [ 382.208, 327.557, 307.888, 294.912, 285.042, 276.981, 270.110, 264.084, 258.691, 253.790, 249.284, 245.104, 241.195, 237.518, 234.042, 230.740, 227.593, 224.582, 221.694, 218.917, 216.241, 213.656, 211.156, 208.733, 206.381, 204.096, 201.873, 199.707, 197.595, 195.534, 193.521, 191.552, 189.626, 187.740, 185.891, 184.079, 182.301, 180.555, 178.841, 177.156, 175.500, 173.870, 172.266, 170.687, 169.132, 167.600, 166.090, 164.600, 163.131, 161.681, 160.250, 158.837, 157.441, 156.062, 154.699, 153.352, 152.019, 150.702, 149.398, 148.107, 146.830, 145.565, 144.312, 143.071, 141.841, 140.622, 139.413, 138.215, 137.026, 135.847, 134.677, 133.516, 132.363, 131.218, 130.081, 128.952, 127.830, 126.715, 125.606, 124.503, 123.407, 122.317, 121.232, 120.152, 119.077, 118.006, 116.941, 115.879, 114.821, 113.767, 112.716, 111.667, 110.622, 109.579, 108.539, 107.500, 106.463, 105.427, 104.392, 103.357, 102.323, 101.289, 100.255, 99.219, 98.183, 97.145, 96.106, 95.064, 94.019, 92.972, 91.920, 90.865, 89.804, 88.739, 87.667, 86.589, 85.504, 84.411, 83.310, 82.198, 81.077, 79.943, 78.798, 77.638, 76.463, 75.272, 74.063, 72.834, 71.583, 70.308, 69.007, 67.676, 66.312, 64.911, 63.469, 61.981, 60.440, 58.839, 57.169, 55.417, 53.570, 51.609, 49.507, 47.231, 44.728, 41.923, 38.683, 34.760, 29.554, 20.000, 0.0 ]
zm = [ 21000.000 , 20617.792 , 20290.235 , 19982.347 , 19687.435 , 19402.393 , 19125.412 , 18855.302 , 18591.217 , 18332.527 , 18078.737 , 17829.452 , 17584.349 , 17343.154 , 17105.635 , 16871.593 , 16640.853 , 16413.261 , 16188.679 , 15966.985 , 15748.067 , 15531.826 , 15318.170 , 15107.015 , 14898.282 , 14691.901 , 14487.805 , 14285.932 , 14086.225 , 13888.630 , 13693.095 , 13499.575 , 13308.022 , 13118.397 , 12930.657 , 12744.766 , 12560.687 , 12378.386 , 12197.831 , 12018.990 , 11841.834 , 11666.334 , 11492.464 , 11320.198 , 11149.510 , 10980.378 , 10812.778 , 10646.689 , 10482.089 , 10318.958 , 10157.277 , 9997.027 , 9838.190 , 9680.749 , 9524.686 , 9369.987 , 9216.635 , 9064.616 , 8913.914 , 8764.517 , 8616.410 , 8469.580 , 8324.016 , 8179.704 , 8036.633 , 7894.793 , 7754.171 , 7614.758 , 7476.543 , 7339.516 , 7203.669 , 7068.992 , 6935.476 , 6803.113 , 6671.895 , 6541.813 , 6412.861 , 6285.031 , 6158.317 , 6032.711 , 5908.207 , 5784.800 , 5662.484 , 5541.252 , 5421.100 , 5302.024 , 5184.017 , 5067.077 , 4951.198 , 4836.377 , 4722.610 , 4609.895 , 4498.227 , 4387.605 , 4278.026 , 4169.487 , 4061.987 , 3955.525 , 3850.098 , 3745.707 , 3642.350 , 3540.027 , 3438.738 , 3338.483 , 3239.264 , 3141.081 , 3043.935 , 2947.829 , 2852.765 , 2758.746 , 2665.774 , 2573.854 , 2482.990 , 2393.185 , 2304.447 , 2216.779 , 2130.190 , 2044.686 , 1960.274 , 1876.965 , 1794.766 , 1713.690 , 1633.747 , 1554.949 , 1477.311 , 1400.848 , 1325.575 , 1251.512 , 1178.678 , 1107.095 , 1036.786 , 967.780 , 900.104 , 833.792 , 768.881 , 705.412 , 643.431 , 582.990 , 524.151 , 466.982 , 411.564 , 357.994 , 306.385 , 256.878 , 209.648 , 164.919 , 122.997 , 84.314 , 49.554 , 20.000 , 0.000 ]
return np.array(zm), np.array(dz)
def simple_ex(Ncells=24, Nz=1, default_lwc=1e-3, default_iwc=0, dz=100):
D=NC.Dataset('lwc_ex_{}_{}.nc'.format(Ncells,Nz),'w')
D.createDimension('ncells', Ncells)
D.createDimension('hhl_level', Nz+1)
D.createDimension('hhl', Nz)
hhl=D.createVariable('height',float32, dimensions=('hhl',))
for i in range(Nz):
hhl[i] = dz/2 + dz*i
hhl[:] = hhl[::-1]
hl=D.createVariable('height_level',float32, dimensions=('hhl_level',))
for i in range(Nz+1):
hl[i] = 0 + dz*i
hl[:] = hl[::-1]
lwc=D.createVariable('clw',float32, dimensions=('hhl','ncells'))
lwc[:] = 0
if Ncells==24:
lwc[Nz/2,[18,19,20,21,23]] = default_lwc
iwc=D.createVariable('cli',float32, dimensions=('hhl','ncells'))
iwc[:] = 0
if Ncells==24:
iwc[Nz/2,[18,19,20,21,23]] = default_iwc
D.sync()
D.close()
def icon_2_lwcfile(fname='/home/f/Fabian.Jakub/work/icon_3d_fine_day_DOM01_ML_20140729T120230Z/3d_fine_day_DOM01_ML_20140729T120230Z.nc'):
DI = NC.Dataset(fname, 'r')
Nt, Nz, Ncells = np.shape(DI['clw'])
D=NC.Dataset('lwc.nc','w')
D.createDimension('ncells', Ncells)
D.createDimension('hhl_level', Nz+1)
D.createDimension('hhl', Nz)
zm, dz = get_z_grid()
szm = zm[::-1][:Nz+1][::-1]
hl=D.createVariable('height_level',float32, dimensions=('hhl_level',))
hl[:] = szm
hhl=D.createVariable('height',float32, dimensions=('hhl',))
hhl[:] = (szm[0:-1]+szm[1:])/2
def copy_var(varname):
var=D.createVariable(varname, float32, dimensions=('hhl','ncells'))
invar = DI[varname][0][::-1][:Nz][::-1]
var[:] = invar
copy_var('clw')
copy_var('cli')
D.sync()
D.close()
DI.close()
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('icon_file', help='icon data file, e.g. /home/f/Fabian.Jakub/work/icon_3d_fine_day_DOM01_ML_20140729T120230Z/3d_fine_day_DOM01_ML_20140729T120230Z.nc')
args = parser.parse_args()
if args.icon_file:
icon_2_lwcfile(fname=args.icon_file)
#simple_ex()
| gpl-3.0 |
kenshay/ImageScripter | Script_Runner/PYTHON/Lib/multiprocessing/managers.py | 5 | 40074 | #
# Module providing the `SyncManager` class for dealing
# with shared objects
#
# multiprocessing/managers.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
#
# Imports
#
import sys
import threading
import array
import queue
from time import time as _time
from traceback import format_exc
from . import connection
from .context import reduction, get_spawning_popen, ProcessError
from . import pool
from . import process
from . import util
from . import get_context
#
# Register some things for pickling
#
def reduce_array(a):
return array.array, (a.typecode, a.tobytes())
reduction.register(array.array, reduce_array)
view_types = [type(getattr({}, name)()) for name in ('items','keys','values')]
if view_types[0] is not list: # only needed in Py3.0
def rebuild_as_list(obj):
return list, (list(obj),)
for view_type in view_types:
reduction.register(view_type, rebuild_as_list)
#
# Type for identifying shared objects
#
class Token(object):
'''
Type to uniquely indentify a shared object
'''
__slots__ = ('typeid', 'address', 'id')
def __init__(self, typeid, address, id):
(self.typeid, self.address, self.id) = (typeid, address, id)
def __getstate__(self):
return (self.typeid, self.address, self.id)
def __setstate__(self, state):
(self.typeid, self.address, self.id) = state
def __repr__(self):
return '%s(typeid=%r, address=%r, id=%r)' % \
(self.__class__.__name__, self.typeid, self.address, self.id)
#
# Function for communication with a manager's server process
#
def dispatch(c, id, methodname, args=(), kwds={}):
'''
Send a message to manager using connection `c` and return response
'''
c.send((id, methodname, args, kwds))
kind, result = c.recv()
if kind == '#RETURN':
return result
raise convert_to_error(kind, result)
def convert_to_error(kind, result):
if kind == '#ERROR':
return result
elif kind in ('#TRACEBACK', '#UNSERIALIZABLE'):
if not isinstance(result, str):
raise TypeError(
"Result {0!r} (kind '{1}') type is {2}, not str".format(
result, kind, type(result)))
if kind == '#UNSERIALIZABLE':
return RemoteError('Unserializable message: %s\n' % result)
else:
return RemoteError(result)
else:
return ValueError('Unrecognized message type {!r}'.format(kind))
class RemoteError(Exception):
def __str__(self):
return ('\n' + '-'*75 + '\n' + str(self.args[0]) + '-'*75)
#
# Functions for finding the method names of an object
#
def all_methods(obj):
'''
Return a list of names of methods of `obj`
'''
temp = []
for name in dir(obj):
func = getattr(obj, name)
if callable(func):
temp.append(name)
return temp
def public_methods(obj):
'''
Return a list of names of methods of `obj` which do not start with '_'
'''
return [name for name in all_methods(obj) if name[0] != '_']
#
# Server which is run in a process controlled by a manager
#
class Server(object):
'''
Server class which runs in a process controlled by a manager object
'''
public = ['shutdown', 'create', 'accept_connection', 'get_methods',
'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref']
def __init__(self, registry, address, authkey, serializer):
if not isinstance(authkey, bytes):
raise TypeError(
"Authkey {0!r} is type {1!s}, not bytes".format(
authkey, type(authkey)))
self.registry = registry
self.authkey = process.AuthenticationString(authkey)
Listener, Client = listener_client[serializer]
# do authentication later
self.listener = Listener(address=address, backlog=16)
self.address = self.listener.address
self.id_to_obj = {'0': (None, ())}
self.id_to_refcount = {}
self.id_to_local_proxy_obj = {}
self.mutex = threading.Lock()
def serve_forever(self):
'''
Run the server forever
'''
self.stop_event = threading.Event()
process.current_process()._manager_server = self
try:
accepter = threading.Thread(target=self.accepter)
accepter.daemon = True
accepter.start()
try:
while not self.stop_event.is_set():
self.stop_event.wait(1)
except (KeyboardInterrupt, SystemExit):
pass
finally:
if sys.stdout != sys.__stdout__: # what about stderr?
util.debug('resetting stdout, stderr')
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
sys.exit(0)
def accepter(self):
while True:
try:
c = self.listener.accept()
except OSError:
continue
t = threading.Thread(target=self.handle_request, args=(c,))
t.daemon = True
t.start()
def handle_request(self, c):
'''
Handle a new connection
'''
funcname = result = request = None
try:
connection.deliver_challenge(c, self.authkey)
connection.answer_challenge(c, self.authkey)
request = c.recv()
ignore, funcname, args, kwds = request
assert funcname in self.public, '%r unrecognized' % funcname
func = getattr(self, funcname)
except Exception:
msg = ('#TRACEBACK', format_exc())
else:
try:
result = func(c, *args, **kwds)
except Exception:
msg = ('#TRACEBACK', format_exc())
else:
msg = ('#RETURN', result)
try:
c.send(msg)
except Exception as e:
try:
c.send(('#TRACEBACK', format_exc()))
except Exception:
pass
util.info('Failure to send message: %r', msg)
util.info(' ... request was %r', request)
util.info(' ... exception was %r', e)
c.close()
def serve_client(self, conn):
'''
Handle requests from the proxies in a particular process/thread
'''
util.debug('starting server thread to service %r',
threading.current_thread().name)
recv = conn.recv
send = conn.send
id_to_obj = self.id_to_obj
while not self.stop_event.is_set():
try:
methodname = obj = None
request = recv()
ident, methodname, args, kwds = request
try:
obj, exposed, gettypeid = id_to_obj[ident]
except KeyError as ke:
try:
obj, exposed, gettypeid = \
self.id_to_local_proxy_obj[ident]
except KeyError as second_ke:
raise ke
if methodname not in exposed:
raise AttributeError(
'method %r of %r object is not in exposed=%r' %
(methodname, type(obj), exposed)
)
function = getattr(obj, methodname)
try:
res = function(*args, **kwds)
except Exception as e:
msg = ('#ERROR', e)
else:
typeid = gettypeid and gettypeid.get(methodname, None)
if typeid:
rident, rexposed = self.create(conn, typeid, res)
token = Token(typeid, self.address, rident)
msg = ('#PROXY', (rexposed, token))
else:
msg = ('#RETURN', res)
except AttributeError:
if methodname is None:
msg = ('#TRACEBACK', format_exc())
else:
try:
fallback_func = self.fallback_mapping[methodname]
result = fallback_func(
self, conn, ident, obj, *args, **kwds
)
msg = ('#RETURN', result)
except Exception:
msg = ('#TRACEBACK', format_exc())
except EOFError:
util.debug('got EOF -- exiting thread serving %r',
threading.current_thread().name)
sys.exit(0)
except Exception:
msg = ('#TRACEBACK', format_exc())
try:
try:
send(msg)
except Exception as e:
send(('#UNSERIALIZABLE', format_exc()))
except Exception as e:
util.info('exception in thread serving %r',
threading.current_thread().name)
util.info(' ... message was %r', msg)
util.info(' ... exception was %r', e)
conn.close()
sys.exit(1)
def fallback_getvalue(self, conn, ident, obj):
return obj
def fallback_str(self, conn, ident, obj):
return str(obj)
def fallback_repr(self, conn, ident, obj):
return repr(obj)
fallback_mapping = {
'__str__':fallback_str,
'__repr__':fallback_repr,
'#GETVALUE':fallback_getvalue
}
def dummy(self, c):
pass
def debug_info(self, c):
'''
Return some info --- useful to spot problems with refcounting
'''
# Perhaps include debug info about 'c'?
with self.mutex:
result = []
keys = list(self.id_to_refcount.keys())
keys.sort()
for ident in keys:
if ident != '0':
result.append(' %s: refcount=%s\n %s' %
(ident, self.id_to_refcount[ident],
str(self.id_to_obj[ident][0])[:75]))
return '\n'.join(result)
def number_of_objects(self, c):
'''
Number of shared objects
'''
# Doesn't use (len(self.id_to_obj) - 1) as we shouldn't count ident='0'
return len(self.id_to_refcount)
def shutdown(self, c):
'''
Shutdown this process
'''
try:
util.debug('manager received shutdown message')
c.send(('#RETURN', None))
except:
import traceback
traceback.print_exc()
finally:
self.stop_event.set()
def create(self, c, typeid, *args, **kwds):
'''
Create a new shared object and return its id
'''
with self.mutex:
callable, exposed, method_to_typeid, proxytype = \
self.registry[typeid]
if callable is None:
if kwds or (len(args) != 1):
raise ValueError(
"Without callable, must have one non-keyword argument")
obj = args[0]
else:
obj = callable(*args, **kwds)
if exposed is None:
exposed = public_methods(obj)
if method_to_typeid is not None:
if not isinstance(method_to_typeid, dict):
raise TypeError(
"Method_to_typeid {0!r}: type {1!s}, not dict".format(
method_to_typeid, type(method_to_typeid)))
exposed = list(exposed) + list(method_to_typeid)
ident = '%x' % id(obj) # convert to string because xmlrpclib
# only has 32 bit signed integers
util.debug('%r callable returned object with id %r', typeid, ident)
self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid)
if ident not in self.id_to_refcount:
self.id_to_refcount[ident] = 0
self.incref(c, ident)
return ident, tuple(exposed)
def get_methods(self, c, token):
'''
Return the methods of the shared object indicated by token
'''
return tuple(self.id_to_obj[token.id][1])
def accept_connection(self, c, name):
'''
Spawn a new thread to serve this connection
'''
threading.current_thread().name = name
c.send(('#RETURN', None))
self.serve_client(c)
def incref(self, c, ident):
with self.mutex:
try:
self.id_to_refcount[ident] += 1
except KeyError as ke:
# If no external references exist but an internal (to the
# manager) still does and a new external reference is created
# from it, restore the manager's tracking of it from the
# previously stashed internal ref.
if ident in self.id_to_local_proxy_obj:
self.id_to_refcount[ident] = 1
self.id_to_obj[ident] = \
self.id_to_local_proxy_obj[ident]
obj, exposed, gettypeid = self.id_to_obj[ident]
util.debug('Server re-enabled tracking & INCREF %r', ident)
else:
raise ke
def decref(self, c, ident):
if ident not in self.id_to_refcount and \
ident in self.id_to_local_proxy_obj:
util.debug('Server DECREF skipping %r', ident)
return
with self.mutex:
if self.id_to_refcount[ident] <= 0:
raise AssertionError(
"Id {0!s} ({1!r}) has refcount {2:n}, not 1+".format(
ident, self.id_to_obj[ident],
self.id_to_refcount[ident]))
self.id_to_refcount[ident] -= 1
if self.id_to_refcount[ident] == 0:
del self.id_to_refcount[ident]
if ident not in self.id_to_refcount:
# Two-step process in case the object turns out to contain other
# proxy objects (e.g. a managed list of managed lists).
# Otherwise, deleting self.id_to_obj[ident] would trigger the
# deleting of the stored value (another managed object) which would
# in turn attempt to acquire the mutex that is already held here.
self.id_to_obj[ident] = (None, (), None) # thread-safe
util.debug('disposing of obj with id %r', ident)
with self.mutex:
del self.id_to_obj[ident]
#
# Class to represent state of a manager
#
class State(object):
__slots__ = ['value']
INITIAL = 0
STARTED = 1
SHUTDOWN = 2
#
# Mapping from serializer name to Listener and Client types
#
listener_client = {
'pickle' : (connection.Listener, connection.Client),
'xmlrpclib' : (connection.XmlListener, connection.XmlClient)
}
#
# Definition of BaseManager
#
class BaseManager(object):
'''
Base class for managers
'''
_registry = {}
_Server = Server
def __init__(self, address=None, authkey=None, serializer='pickle',
ctx=None):
if authkey is None:
authkey = process.current_process().authkey
self._address = address # XXX not final address if eg ('', 0)
self._authkey = process.AuthenticationString(authkey)
self._state = State()
self._state.value = State.INITIAL
self._serializer = serializer
self._Listener, self._Client = listener_client[serializer]
self._ctx = ctx or get_context()
def get_server(self):
'''
Return server object with serve_forever() method and address attribute
'''
if self._state.value != State.INITIAL:
if self._state.value == State.STARTED:
raise ProcessError("Already started server")
elif self._state.value == State.SHUTDOWN:
raise ProcessError("Manager has shut down")
else:
raise ProcessError(
"Unknown state {!r}".format(self._state.value))
return Server(self._registry, self._address,
self._authkey, self._serializer)
def connect(self):
'''
Connect manager object to the server process
'''
Listener, Client = listener_client[self._serializer]
conn = Client(self._address, authkey=self._authkey)
dispatch(conn, None, 'dummy')
self._state.value = State.STARTED
def start(self, initializer=None, initargs=()):
'''
Spawn a server process for this manager object
'''
if self._state.value != State.INITIAL:
if self._state.value == State.STARTED:
raise ProcessError("Already started server")
elif self._state.value == State.SHUTDOWN:
raise ProcessError("Manager has shut down")
else:
raise ProcessError(
"Unknown state {!r}".format(self._state.value))
if initializer is not None and not callable(initializer):
raise TypeError('initializer must be a callable')
# pipe over which we will retrieve address of server
reader, writer = connection.Pipe(duplex=False)
# spawn process which runs a server
self._process = self._ctx.Process(
target=type(self)._run_server,
args=(self._registry, self._address, self._authkey,
self._serializer, writer, initializer, initargs),
)
ident = ':'.join(str(i) for i in self._process._identity)
self._process.name = type(self).__name__ + '-' + ident
self._process.start()
# get address of server
writer.close()
self._address = reader.recv()
reader.close()
# register a finalizer
self._state.value = State.STARTED
self.shutdown = util.Finalize(
self, type(self)._finalize_manager,
args=(self._process, self._address, self._authkey,
self._state, self._Client),
exitpriority=0
)
@classmethod
def _run_server(cls, registry, address, authkey, serializer, writer,
initializer=None, initargs=()):
'''
Create a server, report its address and run it
'''
if initializer is not None:
initializer(*initargs)
# create server
server = cls._Server(registry, address, authkey, serializer)
# inform parent process of the server's address
writer.send(server.address)
writer.close()
# run the manager
util.info('manager serving at %r', server.address)
server.serve_forever()
def _create(self, typeid, *args, **kwds):
'''
Create a new shared object; return the token and exposed tuple
'''
assert self._state.value == State.STARTED, 'server not yet started'
conn = self._Client(self._address, authkey=self._authkey)
try:
id, exposed = dispatch(conn, None, 'create', (typeid,)+args, kwds)
finally:
conn.close()
return Token(typeid, self._address, id), exposed
def join(self, timeout=None):
'''
Join the manager process (if it has been spawned)
'''
if self._process is not None:
self._process.join(timeout)
if not self._process.is_alive():
self._process = None
def _debug_info(self):
'''
Return some info about the servers shared objects and connections
'''
conn = self._Client(self._address, authkey=self._authkey)
try:
return dispatch(conn, None, 'debug_info')
finally:
conn.close()
def _number_of_objects(self):
'''
Return the number of shared objects
'''
conn = self._Client(self._address, authkey=self._authkey)
try:
return dispatch(conn, None, 'number_of_objects')
finally:
conn.close()
def __enter__(self):
if self._state.value == State.INITIAL:
self.start()
if self._state.value != State.STARTED:
if self._state.value == State.INITIAL:
raise ProcessError("Unable to start server")
elif self._state.value == State.SHUTDOWN:
raise ProcessError("Manager has shut down")
else:
raise ProcessError(
"Unknown state {!r}".format(self._state.value))
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown()
@staticmethod
def _finalize_manager(process, address, authkey, state, _Client):
'''
Shutdown the manager process; will be registered as a finalizer
'''
if process.is_alive():
util.info('sending shutdown message to manager')
try:
conn = _Client(address, authkey=authkey)
try:
dispatch(conn, None, 'shutdown')
finally:
conn.close()
except Exception:
pass
process.join(timeout=1.0)
if process.is_alive():
util.info('manager still alive')
if hasattr(process, 'terminate'):
util.info('trying to `terminate()` manager process')
process.terminate()
process.join(timeout=0.1)
if process.is_alive():
util.info('manager still alive after terminate')
state.value = State.SHUTDOWN
try:
del BaseProxy._address_to_local[address]
except KeyError:
pass
@property
def address(self):
return self._address
@classmethod
def register(cls, typeid, callable=None, proxytype=None, exposed=None,
method_to_typeid=None, create_method=True):
'''
Register a typeid with the manager type
'''
if '_registry' not in cls.__dict__:
cls._registry = cls._registry.copy()
if proxytype is None:
proxytype = AutoProxy
exposed = exposed or getattr(proxytype, '_exposed_', None)
method_to_typeid = method_to_typeid or \
getattr(proxytype, '_method_to_typeid_', None)
if method_to_typeid:
for key, value in list(method_to_typeid.items()): # isinstance?
assert type(key) is str, '%r is not a string' % key
assert type(value) is str, '%r is not a string' % value
cls._registry[typeid] = (
callable, exposed, method_to_typeid, proxytype
)
if create_method:
def temp(self, *args, **kwds):
util.debug('requesting creation of a shared %r object', typeid)
token, exp = self._create(typeid, *args, **kwds)
proxy = proxytype(
token, self._serializer, manager=self,
authkey=self._authkey, exposed=exp
)
conn = self._Client(token.address, authkey=self._authkey)
dispatch(conn, None, 'decref', (token.id,))
return proxy
temp.__name__ = typeid
setattr(cls, typeid, temp)
#
# Subclass of set which get cleared after a fork
#
class ProcessLocalSet(set):
def __init__(self):
util.register_after_fork(self, lambda obj: obj.clear())
def __reduce__(self):
return type(self), ()
#
# Definition of BaseProxy
#
class BaseProxy(object):
'''
A base for proxies of shared objects
'''
_address_to_local = {}
_mutex = util.ForkAwareThreadLock()
def __init__(self, token, serializer, manager=None,
authkey=None, exposed=None, incref=True, manager_owned=False):
with BaseProxy._mutex:
tls_idset = BaseProxy._address_to_local.get(token.address, None)
if tls_idset is None:
tls_idset = util.ForkAwareLocal(), ProcessLocalSet()
BaseProxy._address_to_local[token.address] = tls_idset
# self._tls is used to record the connection used by this
# thread to communicate with the manager at token.address
self._tls = tls_idset[0]
# self._idset is used to record the identities of all shared
# objects for which the current process owns references and
# which are in the manager at token.address
self._idset = tls_idset[1]
self._token = token
self._id = self._token.id
self._manager = manager
self._serializer = serializer
self._Client = listener_client[serializer][1]
# Should be set to True only when a proxy object is being created
# on the manager server; primary use case: nested proxy objects.
# RebuildProxy detects when a proxy is being created on the manager
# and sets this value appropriately.
self._owned_by_manager = manager_owned
if authkey is not None:
self._authkey = process.AuthenticationString(authkey)
elif self._manager is not None:
self._authkey = self._manager._authkey
else:
self._authkey = process.current_process().authkey
if incref:
self._incref()
util.register_after_fork(self, BaseProxy._after_fork)
def _connect(self):
util.debug('making connection to manager')
name = process.current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
conn = self._Client(self._token.address, authkey=self._authkey)
dispatch(conn, None, 'accept_connection', (name,))
self._tls.connection = conn
def _callmethod(self, methodname, args=(), kwds={}):
'''
Try to call a method of the referrent and return a copy of the result
'''
try:
conn = self._tls.connection
except AttributeError:
util.debug('thread %r does not own a connection',
threading.current_thread().name)
self._connect()
conn = self._tls.connection
conn.send((self._id, methodname, args, kwds))
kind, result = conn.recv()
if kind == '#RETURN':
return result
elif kind == '#PROXY':
exposed, token = result
proxytype = self._manager._registry[token.typeid][-1]
token.address = self._token.address
proxy = proxytype(
token, self._serializer, manager=self._manager,
authkey=self._authkey, exposed=exposed
)
conn = self._Client(token.address, authkey=self._authkey)
dispatch(conn, None, 'decref', (token.id,))
return proxy
raise convert_to_error(kind, result)
def _getvalue(self):
'''
Get a copy of the value of the referent
'''
return self._callmethod('#GETVALUE')
def _incref(self):
if self._owned_by_manager:
util.debug('owned_by_manager skipped INCREF of %r', self._token.id)
return
conn = self._Client(self._token.address, authkey=self._authkey)
dispatch(conn, None, 'incref', (self._id,))
util.debug('INCREF %r', self._token.id)
self._idset.add(self._id)
state = self._manager and self._manager._state
self._close = util.Finalize(
self, BaseProxy._decref,
args=(self._token, self._authkey, state,
self._tls, self._idset, self._Client),
exitpriority=10
)
@staticmethod
def _decref(token, authkey, state, tls, idset, _Client):
idset.discard(token.id)
# check whether manager is still alive
if state is None or state.value == State.STARTED:
# tell manager this process no longer cares about referent
try:
util.debug('DECREF %r', token.id)
conn = _Client(token.address, authkey=authkey)
dispatch(conn, None, 'decref', (token.id,))
except Exception as e:
util.debug('... decref failed %s', e)
else:
util.debug('DECREF %r -- manager already shutdown', token.id)
# check whether we can close this thread's connection because
# the process owns no more references to objects for this manager
if not idset and hasattr(tls, 'connection'):
util.debug('thread %r has no more proxies so closing conn',
threading.current_thread().name)
tls.connection.close()
del tls.connection
def _after_fork(self):
self._manager = None
try:
self._incref()
except Exception as e:
# the proxy may just be for a manager which has shutdown
util.info('incref failed: %s' % e)
def __reduce__(self):
kwds = {}
if get_spawning_popen() is not None:
kwds['authkey'] = self._authkey
if getattr(self, '_isauto', False):
kwds['exposed'] = self._exposed_
return (RebuildProxy,
(AutoProxy, self._token, self._serializer, kwds))
else:
return (RebuildProxy,
(type(self), self._token, self._serializer, kwds))
def __deepcopy__(self, memo):
return self._getvalue()
def __repr__(self):
return '<%s object, typeid %r at %#x>' % \
(type(self).__name__, self._token.typeid, id(self))
def __str__(self):
'''
Return representation of the referent (or a fall-back if that fails)
'''
try:
return self._callmethod('__repr__')
except Exception:
return repr(self)[:-1] + "; '__str__()' failed>"
#
# Function used for unpickling
#
def RebuildProxy(func, token, serializer, kwds):
'''
Function used for unpickling proxy objects.
'''
server = getattr(process.current_process(), '_manager_server', None)
if server and server.address == token.address:
util.debug('Rebuild a proxy owned by manager, token=%r', token)
kwds['manager_owned'] = True
if token.id not in server.id_to_local_proxy_obj:
server.id_to_local_proxy_obj[token.id] = \
server.id_to_obj[token.id]
incref = (
kwds.pop('incref', True) and
not getattr(process.current_process(), '_inheriting', False)
)
return func(token, serializer, incref=incref, **kwds)
#
# Functions to create proxies and proxy types
#
def MakeProxyType(name, exposed, _cache={}):
'''
Return a proxy type whose methods are given by `exposed`
'''
exposed = tuple(exposed)
try:
return _cache[(name, exposed)]
except KeyError:
pass
dic = {}
for meth in exposed:
exec('''def %s(self, *args, **kwds):
return self._callmethod(%r, args, kwds)''' % (meth, meth), dic)
ProxyType = type(name, (BaseProxy,), dic)
ProxyType._exposed_ = exposed
_cache[(name, exposed)] = ProxyType
return ProxyType
def AutoProxy(token, serializer, manager=None, authkey=None,
exposed=None, incref=True):
'''
Return an auto-proxy for `token`
'''
_Client = listener_client[serializer][1]
if exposed is None:
conn = _Client(token.address, authkey=authkey)
try:
exposed = dispatch(conn, None, 'get_methods', (token,))
finally:
conn.close()
if authkey is None and manager is not None:
authkey = manager._authkey
if authkey is None:
authkey = process.current_process().authkey
ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
incref=incref)
proxy._isauto = True
return proxy
#
# Types/callables which we will register with SyncManager
#
class Namespace(object):
def __init__(self, **kwds):
self.__dict__.update(kwds)
def __repr__(self):
items = list(self.__dict__.items())
temp = []
for name, value in items:
if not name.startswith('_'):
temp.append('%s=%r' % (name, value))
temp.sort()
return '%s(%s)' % (self.__class__.__name__, ', '.join(temp))
class Value(object):
def __init__(self, typecode, value, lock=True):
self._typecode = typecode
self._value = value
def get(self):
return self._value
def set(self, value):
self._value = value
def __repr__(self):
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
value = property(get, set)
def Array(typecode, sequence, lock=True):
return array.array(typecode, sequence)
#
# Proxy types used by SyncManager
#
class IteratorProxy(BaseProxy):
_exposed_ = ('__next__', 'send', 'throw', 'close')
def __iter__(self):
return self
def __next__(self, *args):
return self._callmethod('__next__', args)
def send(self, *args):
return self._callmethod('send', args)
def throw(self, *args):
return self._callmethod('throw', args)
def close(self, *args):
return self._callmethod('close', args)
class AcquirerProxy(BaseProxy):
_exposed_ = ('acquire', 'release')
def acquire(self, blocking=True, timeout=None):
args = (blocking,) if timeout is None else (blocking, timeout)
return self._callmethod('acquire', args)
def release(self):
return self._callmethod('release')
def __enter__(self):
return self._callmethod('acquire')
def __exit__(self, exc_type, exc_val, exc_tb):
return self._callmethod('release')
class ConditionProxy(AcquirerProxy):
_exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
def notify(self, n=1):
return self._callmethod('notify', (n,))
def notify_all(self):
return self._callmethod('notify_all')
def wait_for(self, predicate, timeout=None):
result = predicate()
if result:
return result
if timeout is not None:
endtime = _time() + timeout
else:
endtime = None
waittime = None
while not result:
if endtime is not None:
waittime = endtime - _time()
if waittime <= 0:
break
self.wait(waittime)
result = predicate()
return result
class EventProxy(BaseProxy):
_exposed_ = ('is_set', 'set', 'clear', 'wait')
def is_set(self):
return self._callmethod('is_set')
def set(self):
return self._callmethod('set')
def clear(self):
return self._callmethod('clear')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
class BarrierProxy(BaseProxy):
_exposed_ = ('__getattribute__', 'wait', 'abort', 'reset')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
def abort(self):
return self._callmethod('abort')
def reset(self):
return self._callmethod('reset')
@property
def parties(self):
return self._callmethod('__getattribute__', ('parties',))
@property
def n_waiting(self):
return self._callmethod('__getattribute__', ('n_waiting',))
@property
def broken(self):
return self._callmethod('__getattribute__', ('broken',))
class NamespaceProxy(BaseProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
def __getattr__(self, key):
if key[0] == '_':
return object.__getattribute__(self, key)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__getattribute__', (key,))
def __setattr__(self, key, value):
if key[0] == '_':
return object.__setattr__(self, key, value)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__setattr__', (key, value))
def __delattr__(self, key):
if key[0] == '_':
return object.__delattr__(self, key)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__delattr__', (key,))
class ValueProxy(BaseProxy):
_exposed_ = ('get', 'set')
def get(self):
return self._callmethod('get')
def set(self, value):
return self._callmethod('set', (value,))
value = property(get, set)
BaseListProxy = MakeProxyType('BaseListProxy', (
'__add__', '__contains__', '__delitem__', '__getitem__', '__len__',
'__mul__', '__reversed__', '__rmul__', '__setitem__',
'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
'reverse', 'sort', '__imul__'
))
class ListProxy(BaseListProxy):
def __iadd__(self, value):
self._callmethod('extend', (value,))
return self
def __imul__(self, value):
self._callmethod('__imul__', (value,))
return self
DictProxy = MakeProxyType('DictProxy', (
'__contains__', '__delitem__', '__getitem__', '__len__',
'__setitem__', 'clear', 'copy', 'get', 'has_key', 'items',
'keys', 'pop', 'popitem', 'setdefault', 'update', 'values'
))
ArrayProxy = MakeProxyType('ArrayProxy', (
'__len__', '__getitem__', '__setitem__'
))
BasePoolProxy = MakeProxyType('PoolProxy', (
'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
'map', 'map_async', 'starmap', 'starmap_async', 'terminate',
))
BasePoolProxy._method_to_typeid_ = {
'apply_async': 'AsyncResult',
'map_async': 'AsyncResult',
'starmap_async': 'AsyncResult',
'imap': 'Iterator',
'imap_unordered': 'Iterator'
}
class PoolProxy(BasePoolProxy):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.terminate()
#
# Definition of SyncManager
#
class SyncManager(BaseManager):
'''
Subclass of `BaseManager` which supports a number of shared object types.
The types registered are those intended for the synchronization
of threads, plus `dict`, `list` and `Namespace`.
The `multiprocessing.Manager()` function creates started instances of
this class.
'''
SyncManager.register('Queue', queue.Queue)
SyncManager.register('JoinableQueue', queue.Queue)
SyncManager.register('Event', threading.Event, EventProxy)
SyncManager.register('Lock', threading.Lock, AcquirerProxy)
SyncManager.register('RLock', threading.RLock, AcquirerProxy)
SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy)
SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore,
AcquirerProxy)
SyncManager.register('Condition', threading.Condition, ConditionProxy)
SyncManager.register('Barrier', threading.Barrier, BarrierProxy)
SyncManager.register('Pool', pool.Pool, PoolProxy)
SyncManager.register('list', list, ListProxy)
SyncManager.register('dict', dict, DictProxy)
SyncManager.register('Value', Value, ValueProxy)
SyncManager.register('Array', Array, ArrayProxy)
SyncManager.register('Namespace', Namespace, NamespaceProxy)
# types returned by methods of PoolProxy
SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
SyncManager.register('AsyncResult', create_method=False)
| gpl-3.0 |
GauravSahu/odoo | addons/account_payment/wizard/__init__.py | 436 | 1144 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_payment_order
import account_payment_populate_statement
import account_payment_pay
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hsuchifeng/cuda-convnet2 | python_util/util.py | 181 | 2825 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import cPickle
import os
from cStringIO import StringIO
class UnpickleError(Exception):
pass
GPU_LOCK_NO_SCRIPT = -2
GPU_LOCK_NO_LOCK = -1
def pickle(filename, data):
fo = filename
if type(filename) == str:
fo = open(filename, "w")
cPickle.dump(data, fo, protocol=cPickle.HIGHEST_PROTOCOL)
fo.close()
def unpickle(filename):
if not os.path.exists(filename):
raise UnpickleError("Path '%s' does not exist." % filename)
fo = open(filename, 'r')
z = StringIO()
file_size = os.fstat(fo.fileno()).st_size
# Read 1GB at a time to avoid overflow
while fo.tell() < file_size:
z.write(fo.read(1 << 30))
fo.close()
dict = cPickle.loads(z.getvalue())
z.close()
return dict
def is_intel_machine():
VENDOR_ID_REGEX = re.compile('^vendor_id\s+: (\S+)')
f = open('/proc/cpuinfo')
for line in f:
m = VENDOR_ID_REGEX.match(line)
if m:
f.close()
return m.group(1) == 'GenuineIntel'
f.close()
return False
# Returns the CPUs associated with a given GPU
def get_cpus_for_gpu(gpu):
#proc = subprocess.Popen(['nvidia-smi', '-q', '-i', str(gpu)], stdout=subprocess.PIPE)
#lines = proc.communicate()[0]
#lines = subprocess.check_output(['nvidia-smi', '-q', '-i', str(gpu)]).split(os.linesep)
with open('/proc/driver/nvidia/gpus/%d/information' % gpu) as f:
for line in f:
if line.startswith('Bus Location'):
bus_id = line.split(':', 1)[1].strip()
bus_id = bus_id[:7] + ':' + bus_id[8:]
ff = open('/sys/module/nvidia/drivers/pci:nvidia/%s/local_cpulist' % bus_id)
cpus_str = ff.readline()
ff.close()
cpus = [cpu for s in cpus_str.split(',') for cpu in range(int(s.split('-')[0]),int(s.split('-')[1])+1)]
return cpus
return [-1]
def get_cpu():
if is_intel_machine():
return 'intel'
return 'amd'
def is_windows_machine():
return os.name == 'nt'
def tryint(s):
try:
return int(s)
except:
return s
def alphanum_key(s):
return [tryint(c) for c in re.split('([0-9]+)', s)]
| apache-2.0 |
bioinformatics-ua/montra | emif/searchengine/management/commands/index_all.py | 2 | 1125 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Universidade de Aveiro, DETI/IEETA, Bioinformatics Group - http://bioinformatics.ua.pt/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.management.base import BaseCommand, CommandError
from fingerprint.models import Fingerprint
class Command(BaseCommand):
args = ''
help = 'Indexes all Fingerprints in SOLR (WIP)'
def handle(self, *args, **options):
Fingerprint.index_all()
self.stdout.write('-- Finished indexing all fingerprints in SOLR.\n')
| gpl-3.0 |
denisenkom/pytds | tests/settings.py | 2 | 1030 | import os
CONNECT_ARGS = []
CONNECT_KWARGS = {}
LIVE_TEST = 'HOST' in os.environ
if LIVE_TEST:
HOST = os.environ['HOST']
DATABASE = os.environ.get('DATABASE', 'test')
USER = os.environ.get('SQLUSER', 'sa')
PASSWORD = os.environ.get('SQLPASSWORD', 'sa')
USE_MARS = bool(os.environ.get('USE_MARS', True))
SKIP_SQL_AUTH = bool(os.environ.get('SKIP_SQL_AUTH'))
import pytds
CONNECT_KWARGS = {
'server': HOST,
'database': DATABASE,
'user': USER,
'password': PASSWORD,
'use_mars': USE_MARS,
'bytes_to_unicode': True,
'pooling': True,
'timeout': 30,
}
if 'tds_version' in os.environ:
CONNECT_KWARGS['tds_version'] = getattr(pytds, os.environ['tds_version'])
if 'auth' in os.environ:
import pytds.login
CONNECT_KWARGS['auth'] = getattr(pytds.login, os.environ['auth'])()
if 'bytes_to_unicode' in os.environ:
CONNECT_KWARGS['bytes_to_unicode'] = bool(os.environ.get('bytes_to_unicode'))
| mit |
dagwieers/ansible | lib/ansible/plugins/action/bigiq.py | 41 | 4252 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import copy
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.common.utils import load_provider
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.utils.display import Display
try:
from library.module_utils.network.f5.common import f5_provider_spec
except Exception:
from ansible.module_utils.network.f5.common import f5_provider_spec
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
socket_path = None
transport = 'rest'
if self._play_context.connection == 'network_cli':
provider = self._task.args.get('provider', {})
if any(provider.values()):
display.warning("'provider' is unnecessary when using 'network_cli' and will be ignored")
elif self._play_context.connection == 'local':
provider = load_provider(f5_provider_spec, self._task.args)
transport = provider['transport'] or transport
display.vvvv('connection transport is %s' % transport, self._play_context.remote_addr)
if transport == 'cli':
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'bigiq'
pc.remote_addr = provider.get('server', self._play_context.remote_addr)
pc.port = int(provider['server_port'] or self._play_context.port or 22)
pc.remote_user = provider.get('user', self._play_context.connection_user)
pc.password = provider.get('password', self._play_context.password)
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
command_timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
connection.set_options(direct={'persistent_command_timeout': command_timeout})
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'Unable to open shell. Please see: '
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
if (self._play_context.connection == 'local' and transport == 'cli') or self._play_context.connection == 'network_cli':
# make sure we are in the right cli context which should be
# enable mode and not config module
if socket_path is None:
socket_path = self._connection.socket_path
conn = Connection(socket_path)
out = conn.get_prompt()
while '(config' in to_text(out, errors='surrogate_then_replace').strip():
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
conn.send_command('exit')
out = conn.get_prompt()
result = super(ActionModule, self).run(tmp, task_vars)
return result
| gpl-3.0 |
40123210/-2015cd_40123210 | static/Brython3.1.1-20150328-091302/Lib/calendar.py | 828 | 22940 | """Calendar printing functions
Note when comparing these calendars to the ones printed by cal(1): By
default, these calendars have Monday as the first day of the week, and
Sunday as the last (the European convention). Use setfirstweekday() to
set the first day of the week (0=Monday, 6=Sunday)."""
import sys
import datetime
import locale as _locale
__all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday",
"firstweekday", "isleap", "leapdays", "weekday", "monthrange",
"monthcalendar", "prmonth", "month", "prcal", "calendar",
"timegm", "month_name", "month_abbr", "day_name", "day_abbr"]
# Exception raised for bad input (with string parameter for details)
error = ValueError
# Exceptions raised for bad input
class IllegalMonthError(ValueError):
def __init__(self, month):
self.month = month
def __str__(self):
return "bad month number %r; must be 1-12" % self.month
class IllegalWeekdayError(ValueError):
def __init__(self, weekday):
self.weekday = weekday
def __str__(self):
return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday
# Constants for months referenced later
January = 1
February = 2
# Number of days per month (except for February in leap years)
mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
# This module used to have hard-coded lists of day and month names, as
# English strings. The classes following emulate a read-only version of
# that, but supply localized names. Note that the values are computed
# fresh on each call, in case the user changes locale between calls.
class _localized_month:
_months = [datetime.date(2001, i+1, 1).strftime for i in range(12)]
_months.insert(0, lambda x: "")
def __init__(self, format):
self.format = format
def __getitem__(self, i):
funcs = self._months[i]
if isinstance(i, slice):
return [f(self.format) for f in funcs]
else:
return funcs(self.format)
def __len__(self):
return 13
class _localized_day:
# January 1, 2001, was a Monday.
_days = [datetime.date(2001, 1, i+1).strftime for i in range(7)]
def __init__(self, format):
self.format = format
def __getitem__(self, i):
funcs = self._days[i]
if isinstance(i, slice):
return [f(self.format) for f in funcs]
else:
return funcs(self.format)
def __len__(self):
return 7
# Full and abbreviated names of weekdays
day_name = _localized_day('%A')
day_abbr = _localized_day('%a')
# Full and abbreviated names of months (1-based arrays!!!)
month_name = _localized_month('%B')
month_abbr = _localized_month('%b')
# Constants for weekdays
(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7)
def isleap(year):
"""Return True for leap years, False for non-leap years."""
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def leapdays(y1, y2):
"""Return number of leap years in range [y1, y2).
Assume y1 <= y2."""
y1 -= 1
y2 -= 1
return (y2//4 - y1//4) - (y2//100 - y1//100) + (y2//400 - y1//400)
def weekday(year, month, day):
"""Return weekday (0-6 ~ Mon-Sun) for year (1970-...), month (1-12),
day (1-31)."""
return datetime.date(year, month, day).weekday()
def monthrange(year, month):
"""Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for
year, month."""
if not 1 <= month <= 12:
raise IllegalMonthError(month)
day1 = weekday(year, month, 1)
ndays = mdays[month] + (month == February and isleap(year))
return day1, ndays
class Calendar(object):
"""
Base calendar class. This class doesn't do any formatting. It simply
provides data to subclasses.
"""
def __init__(self, firstweekday=0):
self.firstweekday = firstweekday # 0 = Monday, 6 = Sunday
def getfirstweekday(self):
return self._firstweekday % 7
def setfirstweekday(self, firstweekday):
self._firstweekday = firstweekday
firstweekday = property(getfirstweekday, setfirstweekday)
def iterweekdays(self):
"""
Return a iterator for one week of weekday numbers starting with the
configured first one.
"""
for i in range(self.firstweekday, self.firstweekday + 7):
yield i%7
def itermonthdates(self, year, month):
"""
Return an iterator for one month. The iterator will yield datetime.date
values and will always iterate through complete weeks, so it will yield
dates outside the specified month.
"""
date = datetime.date(year, month, 1)
# Go back to the beginning of the week
days = (date.weekday() - self.firstweekday) % 7
date -= datetime.timedelta(days=days)
oneday = datetime.timedelta(days=1)
while True:
yield date
try:
date += oneday
except OverflowError:
# Adding one day could fail after datetime.MAXYEAR
break
if date.month != month and date.weekday() == self.firstweekday:
break
def itermonthdays2(self, year, month):
"""
Like itermonthdates(), but will yield (day number, weekday number)
tuples. For days outside the specified month the day number is 0.
"""
for date in self.itermonthdates(year, month):
if date.month != month:
yield (0, date.weekday())
else:
yield (date.day, date.weekday())
def itermonthdays(self, year, month):
"""
Like itermonthdates(), but will yield day numbers. For days outside
the specified month the day number is 0.
"""
for date in self.itermonthdates(year, month):
if date.month != month:
yield 0
else:
yield date.day
def monthdatescalendar(self, year, month):
"""
Return a matrix (list of lists) representing a month's calendar.
Each row represents a week; week entries are datetime.date values.
"""
dates = list(self.itermonthdates(year, month))
return [ dates[i:i+7] for i in range(0, len(dates), 7) ]
def monthdays2calendar(self, year, month):
"""
Return a matrix representing a month's calendar.
Each row represents a week; week entries are
(day number, weekday number) tuples. Day numbers outside this month
are zero.
"""
days = list(self.itermonthdays2(year, month))
return [ days[i:i+7] for i in range(0, len(days), 7) ]
def monthdayscalendar(self, year, month):
"""
Return a matrix representing a month's calendar.
Each row represents a week; days outside this month are zero.
"""
days = list(self.itermonthdays(year, month))
return [ days[i:i+7] for i in range(0, len(days), 7) ]
def yeardatescalendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting. The return
value is a list of month rows. Each month row contains up to width months.
Each month contains between 4 and 6 weeks and each week contains 1-7
days. Days are datetime.date objects.
"""
months = [
self.monthdatescalendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardays2calendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are
(day number, weekday number) tuples. Day numbers outside this month are
zero.
"""
months = [
self.monthdays2calendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardayscalendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are day numbers.
Day numbers outside this month are zero.
"""
months = [
self.monthdayscalendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
class TextCalendar(Calendar):
"""
Subclass of Calendar that outputs a calendar as a simple plain text
similar to the UNIX program cal.
"""
def prweek(self, theweek, width):
"""
Print a single week (no newline).
"""
print(self.formatweek(theweek, width), end=' ')
def formatday(self, day, weekday, width):
"""
Returns a formatted day.
"""
if day == 0:
s = ''
else:
s = '%2i' % day # right-align single-digit days
return s.center(width)
def formatweek(self, theweek, width):
"""
Returns a single week in a string (no newline).
"""
return ' '.join(self.formatday(d, wd, width) for (d, wd) in theweek)
def formatweekday(self, day, width):
"""
Returns a formatted week day name.
"""
if width >= 9:
names = day_name
else:
names = day_abbr
return names[day][:width].center(width)
def formatweekheader(self, width):
"""
Return a header for a week.
"""
return ' '.join(self.formatweekday(i, width) for i in self.iterweekdays())
def formatmonthname(self, theyear, themonth, width, withyear=True):
"""
Return a formatted month name.
"""
s = month_name[themonth]
if withyear:
s = "%s %r" % (s, theyear)
return s.center(width)
def prmonth(self, theyear, themonth, w=0, l=0):
"""
Print a month's calendar.
"""
print(self.formatmonth(theyear, themonth, w, l), end=' ')
def formatmonth(self, theyear, themonth, w=0, l=0):
"""
Return a month's calendar string (multi-line).
"""
w = max(2, w)
l = max(1, l)
s = self.formatmonthname(theyear, themonth, 7 * (w + 1) - 1)
s = s.rstrip()
s += '\n' * l
s += self.formatweekheader(w).rstrip()
s += '\n' * l
for week in self.monthdays2calendar(theyear, themonth):
s += self.formatweek(week, w).rstrip()
s += '\n' * l
return s
def formatyear(self, theyear, w=2, l=1, c=6, m=3):
"""
Returns a year's calendar as a multi-line string.
"""
w = max(2, w)
l = max(1, l)
c = max(2, c)
colwidth = (w + 1) * 7 - 1
v = []
a = v.append
a(repr(theyear).center(colwidth*m+c*(m-1)).rstrip())
a('\n'*l)
header = self.formatweekheader(w)
for (i, row) in enumerate(self.yeardays2calendar(theyear, m)):
# months in this row
months = range(m*i+1, min(m*(i+1)+1, 13))
a('\n'*l)
names = (self.formatmonthname(theyear, k, colwidth, False)
for k in months)
a(formatstring(names, colwidth, c).rstrip())
a('\n'*l)
headers = (header for k in months)
a(formatstring(headers, colwidth, c).rstrip())
a('\n'*l)
# max number of weeks for this row
height = max(len(cal) for cal in row)
for j in range(height):
weeks = []
for cal in row:
if j >= len(cal):
weeks.append('')
else:
weeks.append(self.formatweek(cal[j], w))
a(formatstring(weeks, colwidth, c).rstrip())
a('\n' * l)
return ''.join(v)
def pryear(self, theyear, w=0, l=0, c=6, m=3):
"""Print a year's calendar."""
print(self.formatyear(theyear, w, l, c, m))
class HTMLCalendar(Calendar):
"""
This calendar returns complete HTML pages.
"""
# CSS classes for the day <td>s
cssclasses = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
def formatday(self, day, weekday):
"""
Return a day as a table cell.
"""
if day == 0:
return '<td class="noday"> </td>' # day outside month
else:
return '<td class="%s">%d</td>' % (self.cssclasses[weekday], day)
def formatweek(self, theweek):
"""
Return a complete week as a table row.
"""
s = ''.join(self.formatday(d, wd) for (d, wd) in theweek)
return '<tr>%s</tr>' % s
def formatweekday(self, day):
"""
Return a weekday name as a table header.
"""
return '<th class="%s">%s</th>' % (self.cssclasses[day], day_abbr[day])
def formatweekheader(self):
"""
Return a header for a week as a table row.
"""
s = ''.join(self.formatweekday(i) for i in self.iterweekdays())
return '<tr>%s</tr>' % s
def formatmonthname(self, theyear, themonth, withyear=True):
"""
Return a month name as a table row.
"""
if withyear:
s = '%s %s' % (month_name[themonth], theyear)
else:
s = '%s' % month_name[themonth]
return '<tr><th colspan="7" class="month">%s</th></tr>' % s
def formatmonth(self, theyear, themonth, withyear=True):
"""
Return a formatted month as a table.
"""
v = []
a = v.append
a('<table border="0" cellpadding="0" cellspacing="0" class="month">')
a('\n')
a(self.formatmonthname(theyear, themonth, withyear=withyear))
a('\n')
a(self.formatweekheader())
a('\n')
for week in self.monthdays2calendar(theyear, themonth):
a(self.formatweek(week))
a('\n')
a('</table>')
a('\n')
return ''.join(v)
def formatyear(self, theyear, width=3):
"""
Return a formatted year as a table of tables.
"""
v = []
a = v.append
width = max(width, 1)
a('<table border="0" cellpadding="0" cellspacing="0" class="year">')
a('\n')
a('<tr><th colspan="%d" class="year">%s</th></tr>' % (width, theyear))
for i in range(January, January+12, width):
# months in this row
months = range(i, min(i+width, 13))
a('<tr>')
for m in months:
a('<td>')
a(self.formatmonth(theyear, m, withyear=False))
a('</td>')
a('</tr>')
a('</table>')
return ''.join(v)
def formatyearpage(self, theyear, width=3, css='calendar.css', encoding=None):
"""
Return a formatted year as a complete HTML page.
"""
if encoding is None:
encoding = sys.getdefaultencoding()
v = []
a = v.append
a('<?xml version="1.0" encoding="%s"?>\n' % encoding)
a('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n')
a('<html>\n')
a('<head>\n')
a('<meta http-equiv="Content-Type" content="text/html; charset=%s" />\n' % encoding)
if css is not None:
a('<link rel="stylesheet" type="text/css" href="%s" />\n' % css)
a('<title>Calendar for %d</title>\n' % theyear)
a('</head>\n')
a('<body>\n')
a(self.formatyear(theyear, width))
a('</body>\n')
a('</html>\n')
return ''.join(v).encode(encoding, "xmlcharrefreplace")
class different_locale:
def __init__(self, locale):
self.locale = locale
def __enter__(self):
self.oldlocale = _locale.getlocale(_locale.LC_TIME)
_locale.setlocale(_locale.LC_TIME, self.locale)
def __exit__(self, *args):
_locale.setlocale(_locale.LC_TIME, self.oldlocale)
class LocaleTextCalendar(TextCalendar):
"""
This class can be passed a locale name in the constructor and will return
month and weekday names in the specified locale. If this locale includes
an encoding all strings containing month and weekday names will be returned
as unicode.
"""
def __init__(self, firstweekday=0, locale=None):
TextCalendar.__init__(self, firstweekday)
if locale is None:
locale = _locale.getdefaultlocale()
self.locale = locale
def formatweekday(self, day, width):
with different_locale(self.locale):
if width >= 9:
names = day_name
else:
names = day_abbr
name = names[day]
return name[:width].center(width)
def formatmonthname(self, theyear, themonth, width, withyear=True):
with different_locale(self.locale):
s = month_name[themonth]
if withyear:
s = "%s %r" % (s, theyear)
return s.center(width)
class LocaleHTMLCalendar(HTMLCalendar):
"""
This class can be passed a locale name in the constructor and will return
month and weekday names in the specified locale. If this locale includes
an encoding all strings containing month and weekday names will be returned
as unicode.
"""
def __init__(self, firstweekday=0, locale=None):
HTMLCalendar.__init__(self, firstweekday)
if locale is None:
locale = _locale.getdefaultlocale()
self.locale = locale
def formatweekday(self, day):
with different_locale(self.locale):
s = day_abbr[day]
return '<th class="%s">%s</th>' % (self.cssclasses[day], s)
def formatmonthname(self, theyear, themonth, withyear=True):
with different_locale(self.locale):
s = month_name[themonth]
if withyear:
s = '%s %s' % (s, theyear)
return '<tr><th colspan="7" class="month">%s</th></tr>' % s
# Support for old module level interface
c = TextCalendar()
firstweekday = c.getfirstweekday
def setfirstweekday(firstweekday):
if not MONDAY <= firstweekday <= SUNDAY:
raise IllegalWeekdayError(firstweekday)
c.firstweekday = firstweekday
monthcalendar = c.monthdayscalendar
prweek = c.prweek
week = c.formatweek
weekheader = c.formatweekheader
prmonth = c.prmonth
month = c.formatmonth
calendar = c.formatyear
prcal = c.pryear
# Spacing of month columns for multi-column year calendar
_colwidth = 7*3 - 1 # Amount printed by prweek()
_spacing = 6 # Number of spaces between columns
def format(cols, colwidth=_colwidth, spacing=_spacing):
"""Prints multi-column formatting for year calendars"""
print(formatstring(cols, colwidth, spacing))
def formatstring(cols, colwidth=_colwidth, spacing=_spacing):
"""Returns a string formatted from n strings, centered within n columns."""
spacing *= ' '
return spacing.join(c.center(colwidth) for c in cols)
EPOCH = 1970
_EPOCH_ORD = datetime.date(EPOCH, 1, 1).toordinal()
def timegm(tuple):
"""Unrelated but handy function to calculate Unix timestamp from GMT."""
year, month, day, hour, minute, second = tuple[:6]
days = datetime.date(year, month, 1).toordinal() - _EPOCH_ORD + day - 1
hours = days*24 + hour
minutes = hours*60 + minute
seconds = minutes*60 + second
return seconds
def main(args):
import optparse
parser = optparse.OptionParser(usage="usage: %prog [options] [year [month]]")
parser.add_option(
"-w", "--width",
dest="width", type="int", default=2,
help="width of date column (default 2, text only)"
)
parser.add_option(
"-l", "--lines",
dest="lines", type="int", default=1,
help="number of lines for each week (default 1, text only)"
)
parser.add_option(
"-s", "--spacing",
dest="spacing", type="int", default=6,
help="spacing between months (default 6, text only)"
)
parser.add_option(
"-m", "--months",
dest="months", type="int", default=3,
help="months per row (default 3, text only)"
)
parser.add_option(
"-c", "--css",
dest="css", default="calendar.css",
help="CSS to use for page (html only)"
)
parser.add_option(
"-L", "--locale",
dest="locale", default=None,
help="locale to be used from month and weekday names"
)
parser.add_option(
"-e", "--encoding",
dest="encoding", default=None,
help="Encoding to use for output."
)
parser.add_option(
"-t", "--type",
dest="type", default="text",
choices=("text", "html"),
help="output type (text or html)"
)
(options, args) = parser.parse_args(args)
if options.locale and not options.encoding:
parser.error("if --locale is specified --encoding is required")
sys.exit(1)
locale = options.locale, options.encoding
if options.type == "html":
if options.locale:
cal = LocaleHTMLCalendar(locale=locale)
else:
cal = HTMLCalendar()
encoding = options.encoding
if encoding is None:
encoding = sys.getdefaultencoding()
optdict = dict(encoding=encoding, css=options.css)
write = sys.stdout.buffer.write
if len(args) == 1:
write(cal.formatyearpage(datetime.date.today().year, **optdict))
elif len(args) == 2:
write(cal.formatyearpage(int(args[1]), **optdict))
else:
parser.error("incorrect number of arguments")
sys.exit(1)
else:
if options.locale:
cal = LocaleTextCalendar(locale=locale)
else:
cal = TextCalendar()
optdict = dict(w=options.width, l=options.lines)
if len(args) != 3:
optdict["c"] = options.spacing
optdict["m"] = options.months
if len(args) == 1:
result = cal.formatyear(datetime.date.today().year, **optdict)
elif len(args) == 2:
result = cal.formatyear(int(args[1]), **optdict)
elif len(args) == 3:
result = cal.formatmonth(int(args[1]), int(args[2]), **optdict)
else:
parser.error("incorrect number of arguments")
sys.exit(1)
write = sys.stdout.write
if options.encoding:
result = result.encode(options.encoding)
write = sys.stdout.buffer.write
write(result)
if __name__ == "__main__":
main(sys.argv)
| gpl-3.0 |
gadomski/fgt | vendor/googletest-release-1.10.0/googletest/test/gtest_skip_environment_check_output_test.py | 91 | 2209 | #!/usr/bin/env python
#
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's gtest skip in environment setup behavior.
This script invokes gtest_skip_in_environment_setup_test_ and verifies its
output.
"""
import gtest_test_utils
# Path to the gtest_skip_in_environment_setup_test binary
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_skip_in_environment_setup_test')
OUTPUT = gtest_test_utils.Subprocess([EXE_PATH]).output
# Test.
class SkipEntireEnvironmentTest(gtest_test_utils.TestCase):
def testSkipEntireEnvironmentTest(self):
self.assertIn('Skipping the entire environment', OUTPUT)
self.assertNotIn('FAILED', OUTPUT)
if __name__ == '__main__':
gtest_test_utils.Main()
| lgpl-2.1 |
oandrew/home-assistant | tests/components/sensor/test_mqtt.py | 13 | 2148 | """The tests for the MQTT sensor platform."""
import unittest
from homeassistant.bootstrap import setup_component
import homeassistant.components.sensor as sensor
from tests.common import mock_mqtt_component, fire_mqtt_message
from tests.common import get_test_home_assistant
class TestSensorMQTT(unittest.TestCase):
"""Test the MQTT sensor."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
mock_mqtt_component(self.hass)
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setting_sensor_value_via_mqtt_message(self):
"""Test the setting of the value via MQTT."""
self.hass.config.components = ['mqtt']
assert setup_component(self.hass, sensor.DOMAIN, {
sensor.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test-topic',
'unit_of_measurement': 'fav unit'
}
})
fire_mqtt_message(self.hass, 'test-topic', '100')
self.hass.block_till_done()
state = self.hass.states.get('sensor.test')
self.assertEqual('100', state.state)
self.assertEqual('fav unit',
state.attributes.get('unit_of_measurement'))
def test_setting_sensor_value_via_mqtt_json_message(self):
"""Test the setting of the value via MQTT with JSON playload."""
self.hass.config.components = ['mqtt']
assert setup_component(self.hass, sensor.DOMAIN, {
sensor.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test-topic',
'unit_of_measurement': 'fav unit',
'value_template': '{{ value_json.val }}'
}
})
fire_mqtt_message(self.hass, 'test-topic', '{ "val": "100" }')
self.hass.block_till_done()
state = self.hass.states.get('sensor.test')
self.assertEqual('100', state.state)
| mit |
tdsmith/celltool | celltool/command_line/extract_contours.py | 1 | 5720 | # Copyright 2007 Zachary Pincus
# This file is part of CellTool.
#
# CellTool is free software; you can redistribute it and/or modify
# it under the terms of version 2 of the GNU General Public License as
# published by the Free Software Foundation.
"""Extract contours from images, optionally rescaling and resampling them.
This tool extracts contours from images at a given threshold value. Optionally
a minimum and maximum area for acceptable contours can be specified.
Once extracted, contours can be rescaled so that they are defined in terms of
some spatial length scale (instead of pixels). If a scale is specified, it is
highly recommended to also specify the units of that scale. (Note that
'microns' or 'um' will be converted to the micron symbol.)
After scaling, the contours can be re-sampled to smooth them and ensure that
each contour has the same number of points, and that those points are evenly
spaced. Smoothing is controlled by a smoothing parameter, which sets a maximum
on the mean squared-distance between the points of the original contour and
the points of the smoothed contour. This distance will be in terms of the true
spatial units, if specified.
If the primary source of measurement error in the shapes is from pixel
quantization, then a smoothing factor on the order of the squared distance
between pixels is appropriate. This would be 1 for un-scaled contours, or
the square of the units-per-pixel value otherwise.
Contours will be named based on the images that they were extracted from. If
there were multiple contours for a given image, then a number will be appended
to the image name.
"""
import celltool.utility.optparse as optparse
import celltool.simple_interface as simple_interface
import celltool.utility.path as path
import cli_tools
usage = "usage: %prog [options] image_1 ... image_n"
parser = optparse.OptionParser(usage=usage, description=__doc__.strip(),
formatter=cli_tools.CelltoolFormatter())
parser.set_defaults(
show_progress=True,
min_area=10,
units='',
resample=True,
resample_points=100,
smoothing_factor=0,
destination='.'
)
parser.add_option('-q', '--quiet', action='store_false', dest='show_progress',
help='suppress progress bars and other status updates')
parser.add_option('-v', '--contour-value', action='store', type='float', metavar='VALUE',
help='intensity level at which to extract contours [default: use the mid-point intensity of each image]')
parser.add_option('--min-area', action='store', type='float', metavar='AREA',
help='minimum area for extracted contours; those smaller will be rejected [default: %default]')
parser.add_option('--max-area', action='store', type='float', metavar='AREA',
help='maximum area for extracted contours; those larger will be rejected')
parser.add_option('-s', '--scale', action='store', type='float',
help='size of one pixel in spatial units (if specified, contours will be scaled in terms of those units)')
parser.add_option('-u', '--units', action='store',
help='name of the units in which contours are measured [default: "pixels" if no scale is specified, otherwise none]')
parser.add_option('-n', '--no-resample', action='store_false', dest='resample',
help='do not resample/smooth contours')
parser.add_option('-p', '--resample-points', type='int', metavar='POINTS',
help='number of points in each contour after resampling (if resampling is enabled) [default: %default]')
parser.add_option('-f', '--smoothing-factor', type='float', metavar='SMOOTHING',
help='maximum mean-squared-distance between original and resampled points (if resampling is enabled) [default: %default]')
parser.add_option('-d', '--destination', metavar='DIRECTORY',
help='directory in which to write the output contours [default: %default]')
def main(name, arguments):
parser.prog = name
options, args = parser.parse_args(arguments)
args = cli_tools.glob_args(args)
if len(args) == 0:
raise ValueError('Some image files must be specified!')
filenames = [path.path(arg) for arg in args]
contours_groups = simple_interface.extract_contours(filenames, options.contour_value,
options.min_area, options.max_area, options.show_progress)
contours = []
names = []
destination = path.path(options.destination)
if not destination.exists():
destination.makedirs()
for contour_group, image_name in zip(contours_groups, filenames):
num_contours = len(contour_group)
if num_contours == 1:
contours.append(contour_group[0])
# note that with path objects, the '/' operator means 'join path components.'
names.append(destination / image_name.namebase + '.contour')
contour_group[0]._filename = image_name.namebase
else:
width = len(str(num_contours))
for i, contour in enumerate(contour_group):
contours.append(contour)
names.append(destination / image_name.namebase + '-%.*d.contour'%(width, i+1))
contour._filename = image_name.namebase + '-%.*d'%(width, i+1)
if options.scale is not None:
# if not rescaling, contours are already denominated in pixels, so do nothing.
units = options.units
if units.lower() in ('um', 'micron', 'microns'):
units = u'\N{MICRO SIGN}m'
contours = simple_interface.transform_contours(contours, scale_factor=options.scale,
units=units, show_progress=options.show_progress, title='Rescaling Contours')
if options.resample:
contours = simple_interface.resample_contours(contours, options.resample_points, options.smoothing_factor, options.show_progress)
simple_interface.save_contours(contours, names, options.show_progress)
if __name__ == '__main__':
import sys
import os
main(os.path.basename(sys.argv[0]), sys.argv[1:]) | gpl-2.0 |
kennethlyn/enclustra_zynq_linux | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
pombredanne/pygments-1 | pygments/console.py | 365 | 1850 | # -*- coding: utf-8 -*-
"""
pygments.console
~~~~~~~~~~~~~~~~
Format colored console output.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
esc = "\x1b["
codes = {}
codes[""] = ""
codes["reset"] = esc + "39;49;00m"
codes["bold"] = esc + "01m"
codes["faint"] = esc + "02m"
codes["standout"] = esc + "03m"
codes["underline"] = esc + "04m"
codes["blink"] = esc + "05m"
codes["overline"] = esc + "06m"
dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
"purple", "teal", "lightgray"]
light_colors = ["darkgray", "red", "green", "yellow", "blue",
"fuchsia", "turquoise", "white"]
x = 30
for d, l in zip(dark_colors, light_colors):
codes[d] = esc + "%im" % x
codes[l] = esc + "%i;01m" % x
x += 1
del d, l, x
codes["darkteal"] = codes["turquoise"]
codes["darkyellow"] = codes["brown"]
codes["fuscia"] = codes["fuchsia"]
codes["white"] = codes["bold"]
def reset_color():
return codes["reset"]
def colorize(color_key, text):
return codes[color_key] + text + codes["reset"]
def ansiformat(attr, text):
"""
Format ``text`` with a color and/or some attributes::
color normal color
*color* bold color
_color_ underlined color
+color+ blinking color
"""
result = []
if attr[:1] == attr[-1:] == '+':
result.append(codes['blink'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '*':
result.append(codes['bold'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '_':
result.append(codes['underline'])
attr = attr[1:-1]
result.append(codes[attr])
result.append(text)
result.append(codes['reset'])
return ''.join(result)
| bsd-2-clause |
benoitsteiner/tensorflow-opencl | tensorflow/python/kernel_tests/random/random_shuffle_queue_test.py | 22 | 50588 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.data_flow_ops.Queue."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import re
import time
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
class RandomShuffleQueueTest(test.TestCase):
def setUp(self):
# Useful for debugging when a test times out.
super(RandomShuffleQueueTest, self).setUp()
tf_logging.error("Starting: %s", self._testMethodName)
def tearDown(self):
super(RandomShuffleQueueTest, self).tearDown()
tf_logging.error("Finished: %s", self._testMethodName)
def testEnqueue(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 5, dtypes_lib.float32)
enqueue_op = q.enqueue((10.0,))
self.assertAllEqual(0, q.size().eval())
enqueue_op.run()
self.assertAllEqual(1, q.size().eval())
def testEnqueueWithShape(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shapes=tensor_shape.TensorShape([3, 2]))
enqueue_correct_op = q.enqueue(([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]],))
enqueue_correct_op.run()
self.assertAllEqual(1, q.size().eval())
with self.assertRaises(ValueError):
q.enqueue(([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],))
def testEnqueueManyWithShape(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(
10, 5, [dtypes_lib.int32, dtypes_lib.int32], shapes=[(), (2,)])
q.enqueue_many([[1, 2, 3, 4], [[1, 1], [2, 2], [3, 3], [4, 4]]]).run()
self.assertAllEqual(4, q.size().eval())
q2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, shapes=tensor_shape.TensorShape([3]))
q2.enqueue(([1, 2, 3],))
q2.enqueue_many(([[1, 2, 3]],))
def testScalarShapes(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
10, 0, [dtypes_lib.int32, dtypes_lib.int32], shapes=[(), (1,)])
q.enqueue_many([[1, 2, 3, 4], [[5], [6], [7], [8]]]).run()
q.enqueue([9, [10]]).run()
dequeue_t = q.dequeue()
results = []
for _ in range(2):
a, b = sess.run(dequeue_t)
results.append((a, b))
a, b = sess.run(q.dequeue_many(3))
for i in range(3):
results.append((a[i], b[i]))
self.assertItemsEqual([(1, [5]), (2, [6]), (3, [7]), (4, [8]), (9, [10])],
results)
def testParallelEnqueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
# Run one producer thread for each element in elems.
def enqueue(enqueue_op):
sess.run(enqueue_op)
threads = [
self.checkedThread(
target=enqueue, args=(e,)) for e in enqueue_ops
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# Dequeue every element using a single thread.
results = []
for _ in xrange(len(elems)):
results.append(dequeued_t.eval())
self.assertItemsEqual(elems, results)
def testParallelDequeue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
# Enqueue every element using a single thread.
for enqueue_op in enqueue_ops:
enqueue_op.run()
# Run one consumer thread for each element in elems.
results = []
def dequeue():
results.append(sess.run(dequeued_t))
threads = [self.checkedThread(target=dequeue) for _ in enqueue_ops]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(elems, results)
def testDequeue(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
elems = [10.0, 20.0, 30.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
for enqueue_op in enqueue_ops:
enqueue_op.run()
vals = [dequeued_t.eval() for _ in xrange(len(elems))]
self.assertItemsEqual(elems, vals)
def testEnqueueAndBlockingDequeue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(3, 0, dtypes_lib.float32)
elems = [10.0, 20.0, 30.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
def enqueue():
# The enqueue_ops should run after the dequeue op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
for enqueue_op in enqueue_ops:
sess.run(enqueue_op)
results = []
def dequeue():
for _ in xrange(len(elems)):
results.append(sess.run(dequeued_t))
enqueue_thread = self.checkedThread(target=enqueue)
dequeue_thread = self.checkedThread(target=dequeue)
enqueue_thread.start()
dequeue_thread.start()
enqueue_thread.join()
dequeue_thread.join()
self.assertItemsEqual(elems, results)
def testMultiEnqueueAndDequeue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
10, 0, (dtypes_lib.int32, dtypes_lib.float32))
elems = [(5, 10.0), (10, 20.0), (15, 30.0)]
enqueue_ops = [q.enqueue((x, y)) for x, y in elems]
dequeued_t = q.dequeue()
for enqueue_op in enqueue_ops:
enqueue_op.run()
results = []
for _ in xrange(len(elems)):
x, y = sess.run(dequeued_t)
results.append((x, y))
self.assertItemsEqual(elems, results)
def testQueueSizeEmpty(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 5, dtypes_lib.float32)
self.assertEqual(0, q.size().eval())
def testQueueSizeAfterEnqueueAndDequeue(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
enqueue_op = q.enqueue((10.0,))
dequeued_t = q.dequeue()
size = q.size()
self.assertEqual([], size.get_shape())
enqueue_op.run()
self.assertEqual([1], size.eval())
dequeued_t.op.run()
self.assertEqual([0], size.eval())
def testEnqueueMany(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue()
enqueue_op.run()
enqueue_op.run()
results = []
for _ in range(8):
results.append(dequeued_t.eval())
self.assertItemsEqual(elems + elems, results)
def testEmptyEnqueueMany(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 5, dtypes_lib.float32)
empty_t = constant_op.constant(
[], dtype=dtypes_lib.float32, shape=[0, 2, 3])
enqueue_op = q.enqueue_many((empty_t,))
size_t = q.size()
self.assertEqual(0, size_t.eval())
enqueue_op.run()
self.assertEqual(0, size_t.eval())
def testEmptyDequeueMany(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, shapes=())
enqueue_op = q.enqueue((10.0,))
dequeued_t = q.dequeue_many(0)
self.assertEqual([], dequeued_t.eval().tolist())
enqueue_op.run()
self.assertEqual([], dequeued_t.eval().tolist())
def testEmptyDequeueUpTo(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, shapes=())
enqueue_op = q.enqueue((10.0,))
dequeued_t = q.dequeue_up_to(0)
self.assertEqual([], dequeued_t.eval().tolist())
enqueue_op.run()
self.assertEqual([], dequeued_t.eval().tolist())
def testEmptyDequeueManyWithNoShape(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
enqueue_op = q.enqueue((constant_op.constant(
[10.0, 20.0], shape=(1, 2)),))
dequeued_t = q.dequeue_many(0)
# Expect the operation to fail due to the shape not being constrained.
with self.assertRaisesOpError(
"require the components to have specified shapes"):
dequeued_t.eval()
enqueue_op.run()
# RandomShuffleQueue does not make any attempt to support DequeueMany
# with unspecified shapes, even if a shape could be inferred from the
# elements enqueued.
with self.assertRaisesOpError(
"require the components to have specified shapes"):
dequeued_t.eval()
def testEmptyDequeueUpToWithNoShape(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
enqueue_op = q.enqueue((constant_op.constant(
[10.0, 20.0], shape=(1, 2)),))
dequeued_t = q.dequeue_up_to(0)
# Expect the operation to fail due to the shape not being constrained.
with self.assertRaisesOpError(
"require the components to have specified shapes"):
dequeued_t.eval()
enqueue_op.run()
# RandomShuffleQueue does not make any attempt to support DequeueUpTo
# with unspecified shapes, even if a shape could be inferred from the
# elements enqueued.
with self.assertRaisesOpError(
"require the components to have specified shapes"):
dequeued_t.eval()
def testMultiEnqueueMany(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
10, 0, (dtypes_lib.float32, dtypes_lib.int32))
float_elems = [10.0, 20.0, 30.0, 40.0]
int_elems = [[1, 2], [3, 4], [5, 6], [7, 8]]
enqueue_op = q.enqueue_many((float_elems, int_elems))
dequeued_t = q.dequeue()
enqueue_op.run()
enqueue_op.run()
results = []
for _ in range(8):
float_val, int_val = sess.run(dequeued_t)
results.append((float_val, [int_val[0], int_val[1]]))
expected = list(zip(float_elems, int_elems)) * 2
self.assertItemsEqual(expected, results)
def testDequeueMany(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(5)
enqueue_op.run()
results = dequeued_t.eval().tolist()
results.extend(dequeued_t.eval())
self.assertItemsEqual(elems, results)
def testDequeueUpToNoBlocking(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_up_to(5)
enqueue_op.run()
results = dequeued_t.eval().tolist()
results.extend(dequeued_t.eval())
self.assertItemsEqual(elems, results)
def testMultiDequeueMany(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
10, 0, (dtypes_lib.float32, dtypes_lib.int32), shapes=((), (2,)))
float_elems = [
10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0
]
int_elems = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12], [13, 14],
[15, 16], [17, 18], [19, 20]]
enqueue_op = q.enqueue_many((float_elems, int_elems))
dequeued_t = q.dequeue_many(4)
dequeued_single_t = q.dequeue()
enqueue_op.run()
results = []
float_val, int_val = sess.run(dequeued_t)
self.assertEqual(float_val.shape, dequeued_t[0].get_shape())
self.assertEqual(int_val.shape, dequeued_t[1].get_shape())
results.extend(zip(float_val, int_val.tolist()))
float_val, int_val = sess.run(dequeued_t)
results.extend(zip(float_val, int_val.tolist()))
float_val, int_val = sess.run(dequeued_single_t)
self.assertEqual(float_val.shape, dequeued_single_t[0].get_shape())
self.assertEqual(int_val.shape, dequeued_single_t[1].get_shape())
results.append((float_val, int_val.tolist()))
float_val, int_val = sess.run(dequeued_single_t)
results.append((float_val, int_val.tolist()))
self.assertItemsEqual(zip(float_elems, int_elems), results)
def testMultiDequeueUpToNoBlocking(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
10, 0, (dtypes_lib.float32, dtypes_lib.int32), shapes=((), (2,)))
float_elems = [
10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0
]
int_elems = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12], [13, 14],
[15, 16], [17, 18], [19, 20]]
enqueue_op = q.enqueue_many((float_elems, int_elems))
dequeued_t = q.dequeue_up_to(4)
dequeued_single_t = q.dequeue()
enqueue_op.run()
results = []
float_val, int_val = sess.run(dequeued_t)
# dequeue_up_to has undefined shape.
self.assertEqual([None], dequeued_t[0].get_shape().as_list())
self.assertEqual([None, 2], dequeued_t[1].get_shape().as_list())
results.extend(zip(float_val, int_val.tolist()))
float_val, int_val = sess.run(dequeued_t)
results.extend(zip(float_val, int_val.tolist()))
float_val, int_val = sess.run(dequeued_single_t)
self.assertEqual(float_val.shape, dequeued_single_t[0].get_shape())
self.assertEqual(int_val.shape, dequeued_single_t[1].get_shape())
results.append((float_val, int_val.tolist()))
float_val, int_val = sess.run(dequeued_single_t)
results.append((float_val, int_val.tolist()))
self.assertItemsEqual(zip(float_elems, int_elems), results)
def testHighDimension(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.int32, (
(4, 4, 4, 4)))
elems = np.array([[[[[x] * 4] * 4] * 4] * 4 for x in range(10)], np.int32)
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(10)
enqueue_op.run()
self.assertItemsEqual(dequeued_t.eval().tolist(), elems.tolist())
def testParallelEnqueueMany(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
1000, 0, dtypes_lib.float32, shapes=())
elems = [10.0 * x for x in range(100)]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(1000)
# Enqueue 100 items in parallel on 10 threads.
def enqueue():
sess.run(enqueue_op)
threads = [self.checkedThread(target=enqueue) for _ in range(10)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(dequeued_t.eval(), elems * 10)
def testParallelDequeueMany(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
1000, 0, dtypes_lib.float32, shapes=())
elems = [10.0 * x for x in range(1000)]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(100)
enqueue_op.run()
# Dequeue 100 items in parallel on 10 threads.
dequeued_elems = []
def dequeue():
dequeued_elems.extend(sess.run(dequeued_t))
threads = [self.checkedThread(target=dequeue) for _ in range(10)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(elems, dequeued_elems)
def testParallelDequeueUpTo(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
1000, 0, dtypes_lib.float32, shapes=())
elems = [10.0 * x for x in range(1000)]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_up_to(100)
enqueue_op.run()
# Dequeue 100 items in parallel on 10 threads.
dequeued_elems = []
def dequeue():
dequeued_elems.extend(sess.run(dequeued_t))
threads = [self.checkedThread(target=dequeue) for _ in range(10)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(elems, dequeued_elems)
def testParallelDequeueUpToRandomPartition(self):
with self.test_session() as sess:
dequeue_sizes = [random.randint(50, 150) for _ in xrange(10)]
total_elements = sum(dequeue_sizes)
q = data_flow_ops.RandomShuffleQueue(
total_elements, 0, dtypes_lib.float32, shapes=())
elems = [10.0 * x for x in xrange(total_elements)]
enqueue_op = q.enqueue_many((elems,))
dequeue_ops = [q.dequeue_up_to(size) for size in dequeue_sizes]
enqueue_op.run()
# Dequeue random number of items in parallel on 10 threads.
dequeued_elems = []
def dequeue(dequeue_op):
dequeued_elems.extend(sess.run(dequeue_op))
threads = []
for dequeue_op in dequeue_ops:
threads.append(self.checkedThread(target=dequeue, args=(dequeue_op,)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(elems, dequeued_elems)
def testBlockingDequeueMany(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(4)
dequeued_elems = []
def enqueue():
# The enqueue_op should run after the dequeue op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
sess.run(enqueue_op)
def dequeue():
dequeued_elems.extend(sess.run(dequeued_t).tolist())
enqueue_thread = self.checkedThread(target=enqueue)
dequeue_thread = self.checkedThread(target=dequeue)
enqueue_thread.start()
dequeue_thread.start()
enqueue_thread.join()
dequeue_thread.join()
self.assertItemsEqual(elems, dequeued_elems)
def testBlockingDequeueUpTo(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_up_to(4)
dequeued_elems = []
def enqueue():
# The enqueue_op should run after the dequeue op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
sess.run(enqueue_op)
def dequeue():
dequeued_elems.extend(sess.run(dequeued_t).tolist())
enqueue_thread = self.checkedThread(target=enqueue)
dequeue_thread = self.checkedThread(target=dequeue)
enqueue_thread.start()
dequeue_thread.start()
enqueue_thread.join()
dequeue_thread.join()
self.assertItemsEqual(elems, dequeued_elems)
def testDequeueManyWithTensorParameter(self):
with self.test_session():
# Define a first queue that contains integer counts.
dequeue_counts = [random.randint(1, 10) for _ in range(100)]
count_q = data_flow_ops.RandomShuffleQueue(100, 0, dtypes_lib.int32)
enqueue_counts_op = count_q.enqueue_many((dequeue_counts,))
total_count = sum(dequeue_counts)
# Define a second queue that contains total_count elements.
elems = [random.randint(0, 100) for _ in range(total_count)]
q = data_flow_ops.RandomShuffleQueue(total_count, 0, dtypes_lib.int32, (
(),))
enqueue_elems_op = q.enqueue_many((elems,))
# Define a subgraph that first dequeues a count, then DequeuesMany
# that number of elements.
dequeued_t = q.dequeue_many(count_q.dequeue())
enqueue_counts_op.run()
enqueue_elems_op.run()
dequeued_elems = []
for _ in dequeue_counts:
dequeued_elems.extend(dequeued_t.eval())
self.assertItemsEqual(elems, dequeued_elems)
def testDequeueUpToWithTensorParameter(self):
with self.test_session():
# Define a first queue that contains integer counts.
dequeue_counts = [random.randint(1, 10) for _ in range(100)]
count_q = data_flow_ops.RandomShuffleQueue(100, 0, dtypes_lib.int32)
enqueue_counts_op = count_q.enqueue_many((dequeue_counts,))
total_count = sum(dequeue_counts)
# Define a second queue that contains total_count elements.
elems = [random.randint(0, 100) for _ in range(total_count)]
q = data_flow_ops.RandomShuffleQueue(total_count, 0, dtypes_lib.int32, (
(),))
enqueue_elems_op = q.enqueue_many((elems,))
# Define a subgraph that first dequeues a count, then DequeuesUpTo
# that number of elements.
dequeued_t = q.dequeue_up_to(count_q.dequeue())
enqueue_counts_op.run()
enqueue_elems_op.run()
dequeued_elems = []
for _ in dequeue_counts:
dequeued_elems.extend(dequeued_t.eval())
self.assertItemsEqual(elems, dequeued_elems)
def testDequeueFromClosedQueue(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 2, dtypes_lib.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue()
enqueue_op.run()
close_op.run()
results = [dequeued_t.eval() for _ in elems]
expected = [[elem] for elem in elems]
self.assertItemsEqual(expected, results)
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError,
"is closed and has insufficient"):
dequeued_t.eval()
def testBlockingDequeueFromClosedQueue(self):
with self.test_session() as sess:
min_size = 2
q = data_flow_ops.RandomShuffleQueue(10, min_size, dtypes_lib.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue()
enqueue_op.run()
results = []
# Manually dequeue until we hit min_size.
results.append(sess.run(dequeued_t))
results.append(sess.run(dequeued_t))
def blocking_dequeue():
results.append(sess.run(dequeued_t))
results.append(sess.run(dequeued_t))
self.assertItemsEqual(elems, results)
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
dequeue_thread = self.checkedThread(target=blocking_dequeue)
dequeue_thread.start()
time.sleep(0.1)
# The dequeue thread blocked when it hit the min_size requirement.
self.assertEqual(len(results), 2)
close_op.run()
dequeue_thread.join()
# Once the queue is closed, the min_size requirement is lifted.
self.assertEqual(len(results), 4)
def testBlockingDequeueFromClosedEmptyQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
close_op = q.close()
dequeued_t = q.dequeue()
finished = [] # Needs to be a mutable type
def dequeue():
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
finished.append(True)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
self.assertEqual(len(finished), 0)
close_op.run()
dequeue_thread.join()
self.assertEqual(len(finished), 1)
def testBlockingDequeueManyFromClosedQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue_many(4)
enqueue_op.run()
progress = [] # Must be mutable
def dequeue():
self.assertItemsEqual(elems, sess.run(dequeued_t))
progress.append(1)
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
progress.append(2)
self.assertEqual(len(progress), 0)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
for _ in range(100):
time.sleep(0.01)
if len(progress) == 1:
break
self.assertEqual(len(progress), 1)
time.sleep(0.01)
close_op.run()
dequeue_thread.join()
self.assertEqual(len(progress), 2)
def testBlockingDequeueUpToFromClosedQueueReturnsRemainder(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue_up_to(3)
enqueue_op.run()
results = []
def dequeue():
results.extend(sess.run(dequeued_t))
self.assertEquals(3, len(results))
results.extend(sess.run(dequeued_t))
self.assertEquals(4, len(results))
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
self.assertItemsEqual(results, elems)
def testBlockingDequeueUpToSmallerThanMinAfterDequeue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(
capacity=10,
min_after_dequeue=2,
dtypes=dtypes_lib.float32,
shapes=((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue_up_to(3)
enqueue_op.run()
results = []
def dequeue():
results.extend(sess.run(dequeued_t))
self.assertEquals(3, len(results))
# min_after_dequeue is 2, we ask for 3 elements, and we end up only
# getting the remaining 1.
results.extend(sess.run(dequeued_t))
self.assertEquals(4, len(results))
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
self.assertItemsEqual(results, elems)
def testBlockingDequeueManyFromClosedQueueWithElementsRemaining(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue_many(3)
cleanup_dequeue_t = q.dequeue_many(q.size())
enqueue_op.run()
results = []
def dequeue():
results.extend(sess.run(dequeued_t))
self.assertEqual(len(results), 3)
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
# While the last dequeue failed, we want to insure that it returns
# any elements that it potentially reserved to dequeue. Thus the
# next cleanup should return a single element.
results.extend(sess.run(cleanup_dequeue_t))
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
self.assertEqual(len(results), 4)
def testBlockingDequeueManyFromClosedEmptyQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 5, dtypes_lib.float32, ((),))
close_op = q.close()
dequeued_t = q.dequeue_many(4)
def dequeue():
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
def testBlockingDequeueUpToFromClosedEmptyQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(10, 5, dtypes_lib.float32, ((),))
close_op = q.close()
dequeued_t = q.dequeue_up_to(4)
def dequeue():
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
def testEnqueueToClosedQueue(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 4, dtypes_lib.float32)
enqueue_op = q.enqueue((10.0,))
close_op = q.close()
enqueue_op.run()
close_op.run()
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.CancelledError, "is closed"):
enqueue_op.run()
def testEnqueueManyToClosedQueue(self):
with self.test_session():
q = data_flow_ops.RandomShuffleQueue(10, 5, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
enqueue_op.run()
close_op.run()
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.CancelledError, "is closed"):
enqueue_op.run()
def testBlockingEnqueueToFullQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(4, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue((50.0,))
dequeued_t = q.dequeue()
enqueue_op.run()
def blocking_enqueue():
sess.run(blocking_enqueue_op)
thread = self.checkedThread(target=blocking_enqueue)
thread.start()
# The dequeue ops should run after the blocking_enqueue_op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
results = []
for _ in elems:
results.append(dequeued_t.eval())
results.append(dequeued_t.eval())
self.assertItemsEqual(elems + [50.0], results)
# There wasn't room for 50.0 in the queue when the first element was
# dequeued.
self.assertNotEqual(50.0, results[0])
thread.join()
def testBlockingEnqueueManyToFullQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(4, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue_many(([50.0, 60.0],))
dequeued_t = q.dequeue()
enqueue_op.run()
def blocking_enqueue():
sess.run(blocking_enqueue_op)
thread = self.checkedThread(target=blocking_enqueue)
thread.start()
# The dequeue ops should run after the blocking_enqueue_op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
results = []
for _ in elems:
time.sleep(0.01)
results.append(dequeued_t.eval())
results.append(dequeued_t.eval())
results.append(dequeued_t.eval())
self.assertItemsEqual(elems + [50.0, 60.0], results)
# There wasn't room for 50.0 or 60.0 in the queue when the first
# element was dequeued.
self.assertNotEqual(50.0, results[0])
self.assertNotEqual(60.0, results[0])
# Similarly for 60.0 and the second element.
self.assertNotEqual(60.0, results[1])
thread.join()
def testBlockingEnqueueToClosedQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(4, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue((50.0,))
dequeued_t = q.dequeue()
close_op = q.close()
enqueue_op.run()
def blocking_enqueue():
# Expect the operation to succeed since it will complete
# before the queue is closed.
sess.run(blocking_enqueue_op)
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(errors_impl.CancelledError, "closed"):
sess.run(blocking_enqueue_op)
thread1 = self.checkedThread(target=blocking_enqueue)
thread1.start()
# The close_op should run after the first blocking_enqueue_op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
def blocking_close():
sess.run(close_op)
thread2 = self.checkedThread(target=blocking_close)
thread2.start()
# Wait for the close op to block before unblocking the enqueue.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
results = []
# Dequeue to unblock the first blocking_enqueue_op, after which the
# close will complete.
results.append(dequeued_t.eval())
self.assertTrue(results[0] in elems)
thread2.join()
thread1.join()
def testBlockingEnqueueManyToClosedQueue(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(4, 0, dtypes_lib.float32, ((),))
elems = [10.0, 20.0, 30.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue_many(([50.0, 60.0],))
close_op = q.close()
size_t = q.size()
enqueue_op.run()
self.assertEqual(size_t.eval(), 3)
def blocking_enqueue():
# This will block until the dequeue after the close.
sess.run(blocking_enqueue_op)
thread1 = self.checkedThread(target=blocking_enqueue)
thread1.start()
# First blocking_enqueue_op of blocking_enqueue has enqueued 1 of 2
# elements, and is blocked waiting for one more element to be dequeue.
for i in range(50):
queue_size = size_t.eval()
if queue_size == 4:
break
elif i == 49:
self.fail(
"Blocking enqueue op did not execute within the expected time.")
time.sleep(0.1)
def blocking_close():
sess.run(close_op)
thread2 = self.checkedThread(target=blocking_close)
thread2.start()
# Unblock the first blocking_enqueue_op in blocking_enqueue.
q.dequeue().eval()
thread2.join()
thread1.join()
# At this point the close operation will complete, so the next enqueue
# will fail.
with self.assertRaisesRegexp(errors_impl.CancelledError, "closed"):
sess.run(blocking_enqueue_op)
def testSharedQueueSameSession(self):
with self.test_session():
q1 = data_flow_ops.RandomShuffleQueue(
1, 0, dtypes_lib.float32, ((),), shared_name="shared_queue")
q1.enqueue((10.0,)).run()
# TensorFlow TestCase adds a default graph seed (=87654321). We check if
# the seed computed from the default graph seed is reproduced.
seed = 887634792
q2 = data_flow_ops.RandomShuffleQueue(
1,
0,
dtypes_lib.float32, ((),),
shared_name="shared_queue",
seed=seed)
q1_size_t = q1.size()
q2_size_t = q2.size()
self.assertEqual(q1_size_t.eval(), 1)
self.assertEqual(q2_size_t.eval(), 1)
self.assertEqual(q2.dequeue().eval(), 10.0)
self.assertEqual(q1_size_t.eval(), 0)
self.assertEqual(q2_size_t.eval(), 0)
q2.enqueue((20.0,)).run()
self.assertEqual(q1_size_t.eval(), 1)
self.assertEqual(q2_size_t.eval(), 1)
self.assertEqual(q1.dequeue().eval(), 20.0)
self.assertEqual(q1_size_t.eval(), 0)
self.assertEqual(q2_size_t.eval(), 0)
def testSharedQueueSameSessionGraphSeedNone(self):
with self.test_session():
q1 = data_flow_ops.RandomShuffleQueue(
1,
0,
dtypes_lib.float32, ((),),
shared_name="shared_queue",
seed=98765432)
q1.enqueue((10.0,)).run()
# If both graph and op seeds are not provided, the default value must be
# used, and in case a shared queue is already created, the second queue op
# must accept any previous seed value.
random_seed.set_random_seed(None)
q2 = data_flow_ops.RandomShuffleQueue(
1, 0, dtypes_lib.float32, ((),), shared_name="shared_queue")
q1_size_t = q1.size()
q2_size_t = q2.size()
self.assertEqual(q1_size_t.eval(), 1)
self.assertEqual(q2_size_t.eval(), 1)
def testIncompatibleSharedQueueErrors(self):
with self.test_session():
q_a_1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shared_name="q_a")
q_a_2 = data_flow_ops.RandomShuffleQueue(
15, 5, dtypes_lib.float32, shared_name="q_a")
q_a_1.queue_ref.op.run()
with self.assertRaisesOpError("capacity"):
q_a_2.queue_ref.op.run()
q_b_1 = data_flow_ops.RandomShuffleQueue(
10, 0, dtypes_lib.float32, shared_name="q_b")
q_b_2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shared_name="q_b")
q_b_1.queue_ref.op.run()
with self.assertRaisesOpError("min_after_dequeue"):
q_b_2.queue_ref.op.run()
q_c_1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shared_name="q_c")
q_c_2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, shared_name="q_c")
q_c_1.queue_ref.op.run()
with self.assertRaisesOpError("component types"):
q_c_2.queue_ref.op.run()
q_d_1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shared_name="q_d")
q_d_2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shapes=[(1, 1, 2, 3)], shared_name="q_d")
q_d_1.queue_ref.op.run()
with self.assertRaisesOpError("component shapes"):
q_d_2.queue_ref.op.run()
q_e_1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shapes=[(1, 1, 2, 3)], shared_name="q_e")
q_e_2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shared_name="q_e")
q_e_1.queue_ref.op.run()
with self.assertRaisesOpError("component shapes"):
q_e_2.queue_ref.op.run()
q_f_1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shapes=[(1, 1, 2, 3)], shared_name="q_f")
q_f_2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shapes=[(1, 1, 2, 4)], shared_name="q_f")
q_f_1.queue_ref.op.run()
with self.assertRaisesOpError("component shapes"):
q_f_2.queue_ref.op.run()
q_g_1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, shared_name="q_g")
q_g_2 = data_flow_ops.RandomShuffleQueue(
10, 5, (dtypes_lib.float32, dtypes_lib.int32), shared_name="q_g")
q_g_1.queue_ref.op.run()
with self.assertRaisesOpError("component types"):
q_g_2.queue_ref.op.run()
q_h_1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, seed=12, shared_name="q_h")
q_h_2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.float32, seed=21, shared_name="q_h")
q_h_1.queue_ref.op.run()
with self.assertRaisesOpError("random seeds"):
q_h_2.queue_ref.op.run()
def testSelectQueue(self):
with self.test_session():
num_queues = 10
qlist = list()
for _ in xrange(num_queues):
qlist.append(
data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32))
# Enqueue/Dequeue into a dynamically selected queue
for _ in xrange(20):
index = np.random.randint(num_queues)
q = data_flow_ops.RandomShuffleQueue.from_list(index, qlist)
q.enqueue((10.,)).run()
self.assertEqual(q.dequeue().eval(), 10.0)
def testSelectQueueOutOfRange(self):
with self.test_session():
q1 = data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32)
q2 = data_flow_ops.RandomShuffleQueue(15, 0, dtypes_lib.float32)
enq_q = data_flow_ops.RandomShuffleQueue.from_list(3, [q1, q2])
with self.assertRaisesOpError("is not in"):
enq_q.dequeue().eval()
def _blockingDequeue(self, sess, dequeue_op):
with self.assertRaisesOpError("was cancelled"):
sess.run(dequeue_op)
def _blockingDequeueMany(self, sess, dequeue_many_op):
with self.assertRaisesOpError("was cancelled"):
sess.run(dequeue_many_op)
def _blockingDequeueUpTo(self, sess, dequeue_up_to_op):
with self.assertRaisesOpError("was cancelled"):
sess.run(dequeue_up_to_op)
def _blockingEnqueue(self, sess, enqueue_op):
with self.assertRaisesOpError("was cancelled"):
sess.run(enqueue_op)
def _blockingEnqueueMany(self, sess, enqueue_many_op):
with self.assertRaisesOpError("was cancelled"):
sess.run(enqueue_many_op)
def testResetOfBlockingOperation(self):
with self.test_session() as sess:
q_empty = data_flow_ops.RandomShuffleQueue(5, 0, dtypes_lib.float32, (
(),))
dequeue_op = q_empty.dequeue()
dequeue_many_op = q_empty.dequeue_many(1)
dequeue_up_to_op = q_empty.dequeue_up_to(1)
q_full = data_flow_ops.RandomShuffleQueue(5, 0, dtypes_lib.float32, ((),))
sess.run(q_full.enqueue_many(([1.0, 2.0, 3.0, 4.0, 5.0],)))
enqueue_op = q_full.enqueue((6.0,))
enqueue_many_op = q_full.enqueue_many(([6.0],))
threads = [
self.checkedThread(
self._blockingDequeue, args=(sess, dequeue_op)),
self.checkedThread(
self._blockingDequeueMany, args=(sess, dequeue_many_op)),
self.checkedThread(
self._blockingDequeueUpTo, args=(sess, dequeue_up_to_op)),
self.checkedThread(
self._blockingEnqueue, args=(sess, enqueue_op)),
self.checkedThread(
self._blockingEnqueueMany, args=(sess, enqueue_many_op))
]
for t in threads:
t.start()
time.sleep(0.1)
sess.close() # Will cancel the blocked operations.
for t in threads:
t.join()
def testDequeueManyInDifferentOrders(self):
with self.test_session():
# Specify seeds to make the test deterministic
# (https://en.wikipedia.org/wiki/Taxicab_number).
q1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, ((),), seed=1729)
q2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, ((),), seed=87539319)
enq1 = q1.enqueue_many(([1, 2, 3, 4, 5],))
enq2 = q2.enqueue_many(([1, 2, 3, 4, 5],))
deq1 = q1.dequeue_many(5)
deq2 = q2.dequeue_many(5)
enq1.run()
enq1.run()
enq2.run()
enq2.run()
results = [[], [], [], []]
results[0].extend(deq1.eval())
results[1].extend(deq2.eval())
q1.close().run()
q2.close().run()
results[2].extend(deq1.eval())
results[3].extend(deq2.eval())
# No two should match
for i in range(1, 4):
for j in range(i):
self.assertNotEqual(results[i], results[j])
def testDequeueUpToInDifferentOrders(self):
with self.test_session():
# Specify seeds to make the test deterministic
# (https://en.wikipedia.org/wiki/Taxicab_number).
q1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, ((),), seed=1729)
q2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, ((),), seed=87539319)
enq1 = q1.enqueue_many(([1, 2, 3, 4, 5],))
enq2 = q2.enqueue_many(([1, 2, 3, 4, 5],))
deq1 = q1.dequeue_up_to(5)
deq2 = q2.dequeue_up_to(5)
enq1.run()
enq1.run()
enq2.run()
enq2.run()
results = [[], [], [], []]
results[0].extend(deq1.eval())
results[1].extend(deq2.eval())
q1.close().run()
q2.close().run()
results[2].extend(deq1.eval())
results[3].extend(deq2.eval())
# No two should match
for i in range(1, 4):
for j in range(i):
self.assertNotEqual(results[i], results[j])
def testDequeueInDifferentOrders(self):
with self.test_session():
# Specify seeds to make the test deterministic
# (https://en.wikipedia.org/wiki/Taxicab_number).
q1 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, ((),), seed=1729)
q2 = data_flow_ops.RandomShuffleQueue(
10, 5, dtypes_lib.int32, ((),), seed=87539319)
enq1 = q1.enqueue_many(([1, 2, 3, 4, 5],))
enq2 = q2.enqueue_many(([1, 2, 3, 4, 5],))
deq1 = q1.dequeue()
deq2 = q2.dequeue()
enq1.run()
enq1.run()
enq2.run()
enq2.run()
results = [[], [], [], []]
for _ in range(5):
results[0].append(deq1.eval())
results[1].append(deq2.eval())
q1.close().run()
q2.close().run()
for _ in range(5):
results[2].append(deq1.eval())
results[3].append(deq2.eval())
# No two should match
for i in range(1, 4):
for j in range(i):
self.assertNotEqual(results[i], results[j])
def testBigEnqueueMany(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(5, 0, dtypes_lib.int32, ((),))
elem = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
enq = q.enqueue_many((elem,))
deq = q.dequeue()
size_op = q.size()
enq_done = []
def blocking_enqueue():
enq_done.append(False)
# This will fill the queue and then block until enough dequeues happen.
sess.run(enq)
enq_done.append(True)
thread = self.checkedThread(target=blocking_enqueue)
thread.start()
# The enqueue should start and then block.
results = []
results.append(deq.eval()) # Will only complete after the enqueue starts.
self.assertEqual(len(enq_done), 1)
self.assertEqual(sess.run(size_op), 5)
for _ in range(3):
results.append(deq.eval())
time.sleep(0.1)
self.assertEqual(len(enq_done), 1)
self.assertEqual(sess.run(size_op), 5)
# This dequeue will unblock the thread.
results.append(deq.eval())
time.sleep(0.1)
self.assertEqual(len(enq_done), 2)
thread.join()
for i in range(5):
self.assertEqual(size_op.eval(), 5 - i)
results.append(deq.eval())
self.assertEqual(size_op.eval(), 5 - i - 1)
self.assertItemsEqual(elem, results)
def testBigDequeueMany(self):
with self.test_session() as sess:
q = data_flow_ops.RandomShuffleQueue(2, 0, dtypes_lib.int32, ((),))
elem = np.arange(4, dtype=np.int32)
enq_list = [q.enqueue((e,)) for e in elem]
deq = q.dequeue_many(4)
results = []
def blocking_dequeue():
# Will only complete after 4 enqueues complete.
results.extend(sess.run(deq))
thread = self.checkedThread(target=blocking_dequeue)
thread.start()
# The dequeue should start and then block.
for enq in enq_list:
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
self.assertEqual(len(results), 0)
sess.run(enq)
# Enough enqueued to unblock the dequeue
thread.join()
self.assertItemsEqual(elem, results)
if __name__ == "__main__":
test.main()
| apache-2.0 |
vmx/perfrunner | perfrunner/tests/index.py | 7 | 4990 | from time import sleep
from logger import logger
from perfrunner.helpers.cbmonitor import with_stats
from perfrunner.tests import PerfTest
from perfrunner.workloads.viewgen import ViewGen, ViewGenDev
class IndexTest(PerfTest):
"""
The test measures time it takes to build index (views). This is just a base
class, actual measurements happen in initial and incremental indexing tests.
It doesn't differentiate index types and basically benchmarks dumb/bulk
indexing.
"""
def __init__(self, *args):
super(IndexTest, self).__init__(*args)
index_settings = self.test_config.index_settings
if index_settings.disabled_updates:
options = {'updateMinChanges': 0, 'replicaUpdateMinChanges': 0}
else:
options = None
self.ddocs = ViewGen().generate_ddocs(index_settings.views, options)
def define_ddocs(self):
for master in self.cluster_spec.yield_masters():
for bucket in self.test_config.buckets:
for ddoc_name, ddoc in self.ddocs.iteritems():
self.rest.create_ddoc(master, bucket, ddoc_name, ddoc)
def build_index(self):
for master in self.cluster_spec.yield_masters():
for bucket in self.test_config.buckets:
for ddoc_name, ddoc in self.ddocs.iteritems():
for view_name in ddoc['views']:
self.rest.query_view(master, bucket,
ddoc_name, view_name,
params={'limit': 10})
sleep(self.MONITORING_DELAY)
for master in self.cluster_spec.yield_masters():
self.monitor.monitor_task(master, 'indexer')
def compact_index(self):
for master in self.cluster_spec.yield_masters():
for bucket in self.test_config.buckets:
for ddoc_name in self.ddocs:
self.rest.trigger_index_compaction(master, bucket,
ddoc_name)
for master in self.cluster_spec.yield_masters():
self.monitor.monitor_task(master, 'view_compaction')
class InitialIndexTest(IndexTest):
"""
The test measures time it takes to build index for the first time. Scenario
is pretty straightforward, there are only two phases:
-- Initial data load
-- Index building
"""
@with_stats
def build_index(self):
super(InitialIndexTest, self).build_index()
def run(self):
self.load()
self.wait_for_persistence()
self.compact_bucket()
self.define_ddocs()
from_ts, to_ts = self.build_index()
time_elapsed = (to_ts - from_ts) / 1000.0
time_elapsed = self.reporter.finish('Initial index', time_elapsed)
self.reporter.post_to_sf(time_elapsed)
class InitialAndIncrementalIndexTest(IndexTest):
"""
Extended version of initial indexing test which also has access phase for
data/index mutation. It is critical to disable automatic index updates so
that we can control index building.
"""
@with_stats
def build_init_index(self):
return super(InitialAndIncrementalIndexTest, self).build_index()
@with_stats
def build_incr_index(self):
super(InitialAndIncrementalIndexTest, self).build_index()
def run(self):
self.load()
self.wait_for_persistence()
self.compact_bucket()
self.reporter.start()
self.define_ddocs()
from_ts, to_ts = self.build_init_index()
time_elapsed = (to_ts - from_ts) / 1000.0
time_elapsed = self.reporter.finish('Initial index', time_elapsed)
self.reporter.post_to_sf(
*self.metric_helper.get_indexing_meta(value=time_elapsed,
index_type='Initial')
)
self.access()
self.wait_for_persistence()
self.compact_bucket()
from_ts, to_ts = self.build_incr_index()
time_elapsed = (to_ts - from_ts) / 1000.0
time_elapsed = self.reporter.finish('Incremental index', time_elapsed)
self.reporter.post_to_sf(
*self.metric_helper.get_indexing_meta(value=time_elapsed,
index_type='Incremental')
)
class DevIndexTest(IndexTest):
"""
Unlike base test this one introduces measurements per different index type.
It only used as a base class for view query tests (in order to get separate
measurements for different types of queries).
"""
def __init__(self, *args):
super(IndexTest, self).__init__(*args)
index_type = self.test_config.index_settings.index_type
if index_type is None:
logger.interrupt('Missing index_type param')
self.ddocs = ViewGenDev().generate_ddocs(index_type)
class DevInitialIndexTest(DevIndexTest, InitialIndexTest):
pass
| apache-2.0 |
ehingant/StoBeDo | stobedo.py | 1 | 8786 | #!/usr/bin/python
import math as m
import numpy as np
import random
# Rate function of coagulation
def a(i):
return i
# Rate function of fragmentation
def b(i):
return 1.
def CalculDistribution(M,T):
r = random.seed()
t = np.float64(0.0)
increment = m.ceil( m.sqrt(M) + 2 )
I_max = increment
C = np.zeros(I_max,dtype=np.int64)
I = np.zeros(I_max,dtype=np.int64)
NP = 1
I[0] = 1
C[0] = M
Ac = np.zeros(I_max,dtype=np.float64) # Propensity Ac[k] of coagulation of a cluster of size I[k] to size I[k]+1
Tc = np.zeros(I_max,dtype=np.float64) # Next time of coagulation (associate to popensity Ac[k])
Ab = np.zeros(I_max,dtype=np.float64) # Propenstiy Ab[k] of break-up of a cluster of size I[k] to size I[k]-1
Tb = np.zeros(I_max,dtype=np.float64) # Next time of break-up (associate to popensity Ab[k])
# Init clock and propensity of each coagulation reactions.
for i in range(0,NP):
Ac[i] = np.float64(a(I[i])*C[i]*C[0])
Tc[i] = UpdateNextTime(Ac[i],Ac[i],np.float64('inf'),r,t)
# Init break-up of cluster size 1 to "impossible"
Ab[0] = np.float64('nan')
Tb[0] = np.float64('Inf')
# Init clock and propensity of each break-up reactions.
for i in range(1,NP):
Ab[i] = np.float64(b(I[i])*C[i])
Tb[i] = UpdateNextTime(Ab[i],Ab[i],np.float64('inf'),r,t)
# Stop the loop when a cluster of size N is created
while t < T:
i1 = np.argmin(Tc[0:NP])
t1 = Tc[i1]
i2 = np.argmin(Tb[0:NP])
t2 = Tb[i2]
if t1<= t2:
t = t1
k = i1
dp = 0
else:
t = t2
k = i2
dp = 1
if t == float('inf'):
print 'stop'
return t
if t <= 0:
print 'problem negative value'
return
NP = UpdateReaction(C,I,Ac,Tc,Ab,Tb,NP,k,dp,r,t)
if C[k] == 0 and k != 0:
NP = ZeroReorder(C,I,Ac,Tc,Ab,Tb,NP,k)
if NP > I_max-2:
C = np.concatenate((C,np.zeros(increment)))
I = np.concatenate((I,np.zeros(increment)))
Ac = np.concatenate((Ac,np.zeros(increment)))
Tc = np.concatenate((Tc,np.zeros(increment)))
Ab = np.concatenate((Ab,np.zeros(increment)))
Tb = np.concatenate((Tb,np.zeros(increment)))
imax = np.max(I[0:NP])
X = np.zeros(imax+1)
Y = np.zeros(imax+1)
for i in range(0,imax+1):
X[i] = i
for k in range(0,NP):
Y[I[k]] = C[k]
return X,Y
def CalculPath(M,S,T):
x=[]
y=[]
r = random.seed()
t = np.float64(0.0)
increment = m.ceil( m.sqrt(M) + 2 )
I_max = increment
C = np.zeros(I_max,dtype=np.int64)
I = np.zeros(I_max,dtype=np.int64)
NP = 1
I[0] = 1
C[0] = M
Ac = np.zeros(I_max,dtype=np.float64) # Propensity Ac[k] of coagulation of a cluster of size I[k] to size I[k]+1
Tc = np.zeros(I_max,dtype=np.float64) # Next time of coagulation (associate to popensity Ac[k])
Ab = np.zeros(I_max,dtype=np.float64) # Propenstiy Ab[k] of break-up of a cluster of size I[k] to size I[k]-1
Tb = np.zeros(I_max,dtype=np.float64) # Next time of break-up (associate to popensity Ab[k])
# Init clock and propensity of each coagulation reactions.
for i in range(0,NP):
Ac[i] = np.float64(a(I[i])*C[i]*C[0])
Tc[i] = UpdateNextTime(Ac[i],Ac[i],np.float64('inf'),r,t)
# Init break-up of cluster size 1 to "impossible"
Ab[0] = np.float64('nan')
Tb[0] = np.float64('Inf')
# Init clock and propensity of each break-up reactions.
for i in range(1,NP):
Ab[i] = np.float64(b(I[i])*C[i])
Tb[i] = UpdateNextTime(Ab[i],Ab[i],np.float64('inf'),r,t)
# Stop the loop when the time T is reached
while t < T:
i1 = np.argmin(Tc[0:NP])
t1 = Tc[i1]
i2 = np.argmin(Tb[0:NP])
t2 = Tb[i2]
if t1<= t2:
t = t1
k = i1
dp = 0
else:
t = t2
k = i2
dp = 1
if t == float('inf'):
print 'stop'
return t
if t <= 0:
print 'problem negative value'
return
NP = UpdateReaction(C,I,Ac,Tc,Ab,Tb,NP,k,dp,r,t)
if C[k] == 0 and k != 0:
NP = ZeroReorder(C,I,Ac,Tc,Ab,Tb,NP,k)
if NP > I_max-2:
C = np.concatenate((C,np.zeros(increment)))
I = np.concatenate((I,np.zeros(increment)))
Ac = np.concatenate((Ac,np.zeros(increment)))
Tc = np.concatenate((Tc,np.zeros(increment)))
Ab = np.concatenate((Ab,np.zeros(increment)))
Tb = np.concatenate((Tb,np.zeros(increment)))
# output x = time, y = C_i(t) (i=S specified size)
ok = 0
for i in range(0,NP):
if I[i] == S:
y = np.concatenate((y,[C[i]]))
x = np.concatenate((x,[t]))
ok = 1
if ok == 0:
y = np.concatenate((y,[0]))
x = np.concatenate((x,[t]))
return x,y
def CalculNucleationTime(M,N):
r = random.seed()
t = np.float64(0.0)
increment = m.ceil( m.sqrt(M) + 2 )
I_max = increment
C = np.zeros(I_max,dtype=np.int64)
I = np.zeros(I_max,dtype=np.int64)
NP = 1
I[0] = 1
C[0] = M
Ac = np.zeros(I_max,dtype=np.float64) # Propensity Ac[k] of coagulation of a cluster of size I[k] to size I[k]+1
Tc = np.zeros(I_max,dtype=np.float64) # Next time of coagulation (associate to popensity Ac[k])
Ab = np.zeros(I_max,dtype=np.float64) # Propenstiy Ab[k] of break-up of a cluster of size I[k] to size I[k]-1
Tb = np.zeros(I_max,dtype=np.float64) # Next time of break-up (associate to popensity Ab[k])
# Init clock and propensity of each coagulation reactions.
for i in range(0,NP):
Ac[i] = np.float64(a(I[i])*C[i]*C[0])
Tc[i] = UpdateNextTime(Ac[i],Ac[i],np.float64('inf'),r,t)
# Init break-up of cluster size 1 to "impossible"
Ab[0] = np.float64('nan')
Tb[0] = np.float64('Inf')
# Init clock and propensity of each break-up reactions.
for i in range(1,NP):
Ab[i] = np.float64(b(I[i])*C[i])
Tb[i] = UpdateNextTime(Ab[i],Ab[i],np.float64('inf'),r,t)
# Stop the loop when a cluster of size N is created
while I[NP-1] != N:
i1 = np.argmin(Tc[0:NP])
t1 = Tc[i1]
i2 = np.argmin(Tb[0:NP])
t2 = Tb[i2]
if t1<= t2:
t = t1
k = i1
dp = 0
else:
t = t2
k = i2
dp = 1
if t == np.float64('inf'):
print 'stop'
return t
if t <= 0:
print 'problem negative value'
return
NP = UpdateReaction(C,I,Ac,Tc,Ab,Tb,NP,k,dp,r,t)
if C[k] == 0 and k != 0:
NP = ZeroReorder(C,I,Ac,Tc,Ab,Tb,NP,k)
if NP > I_max-2:
C = np.concatenate((C,np.zeros(increment,dtype=np.int64)))
I = np.concatenate((I,np.zeros(increment,dtype=np.int64)))
Ac = np.concatenate((Ac,np.zeros(increment,dtype=np.float64)))
Tc = np.concatenate((Tc,np.zeros(increment,dtype=np.float64)))
Ab = np.concatenate((Ab,np.zeros(increment,dtype=np.float64)))
Tb = np.concatenate((Tb,np.zeros(increment,dtype=np.float64)))
return t
def UpdateNextTime(A_new,A_old,T,r,t):
if A_new == 0.:
T = np.float64('inf')
elif T == np.float64('inf'):
U = random.random()
T = -1./A_new*np.float64(m.log(U)) + t
else:
T = A_old/A_new*(T - t) + t
return T
def UpdateCluster(C,I,NP,k,dp):
ok = 0;
# Update clusters after coagulation of index k (dp=0)
if dp == 0:
C[0] -= 1
C[k] -= 1
for i in range(1,NP):
if I[i] == I[k]+1:
C[i] += 1
ik = i
ok = 1
if ok == 0:
I[NP] = I[k] + 1
C[NP] = 1
ik = NP
# Update clusters after fragmentation of index k (dp=0)
elif dp == 1:
C[0] += 1
C[k] -= 1
for i in range(0,NP):
if I[i] == I[k]-1:
C[i] += 1
ik = i
ok = 1
if ok == 0:
I[NP] = I[k]-1
C[NP] = 1
ik = NP
return ik
def UpdateReaction(C,I,Ac,Tc,Ab,Tb,NP,k,dp,r,t):
ik = UpdateCluster(C,I,NP,k,dp)
if ik == NP:
NP += 1
Ac[ik] = 0.
Tc[ik] = np.float64('inf')
Ab[ik] = 0.
Tb[ik] = np.float64('inf')
if dp == 0:
Tc[k] = np.float64('inf')
Ac_new = np.float64(a(I[0])*C[0]*(C[0]-1)/2)
Tc[0] = UpdateNextTime(Ac_new,Ac[0],Tc[0],r,t)
Ac[0] = Ac_new
for i in range(1,NP):
Ac_new = np.float64(a(I[i])*C[i]*C[0])
Tc[i] = UpdateNextTime(Ac_new,Ac[i],Tc[i],r,t)
Ac[i] = Ac_new
if k != 0:
Ab_new = np.float64(b(I[k])*C[k])
Tb[k] = UpdateNextTime(Ab_new,Ab[k],Tb[k],r,t)
Ab[k] = Ab_new
Ab_new = np.float64(b(I[ik])*C[ik])
Tb[ik] = UpdateNextTime(Ab_new,Ab[ik],Tb[ik],r,t)
Ab[ik] = Ab_new
elif dp == 1:
Ac_new = np.float64(a(I[0])*C[0]*(C[0]-1)/2)
Tc[0] = UpdateNextTime(Ac_new,Ac[0],Tc[0],r,t)
Ac[0] = Ac_new
for i in range(1,NP):
Ac_new = np.float64(a(I[i])*C[i]*C[0])
Tc[i] = UpdateNextTime(Ac_new,Ac[i],Tc[i],r,t)
Ac[i] = Ac_new
if ik != 0:
Ab_new = np.float64(b(I[ik])*C[ik])
Tb[ik] = UpdateNextTime(Ab_new,Ab[ik],Tb[ik],r,t)
Ab[ik] = Ab_new
Tb[k] = np.float64('inf')
Ab_new = np.float64(b(I[k])*C[k])
Tb[k] = UpdateNextTime(Ab_new,Ab[k],Tb[k],r,t)
Ab[k] = Ab_new
return NP
def ZeroReorder(C,I,Ac,Tc,Ab,Tb,NP,k):
for i in range(k,NP-1):
C[i] = C[i+1]
I[i] = I[i+1]
Ac[i] = Ac[i+1]
Tc[i] = Tc[i+1]
Ab[i] = Ab[i+1]
Tb[i] = Tb[i+1]
return NP-1
| gpl-3.0 |
IllusionRom-deprecated/android_platform_external_chromium_org_testing_gtest | scripts/upload.py | 2511 | 51024 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import md5
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com"):
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response and directs us to
authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
## elif e.code >= 500 and e.code < 600:
## # Server Error - try again.
## continue
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs (default).")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default="codereview.appspot.com",
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to 'codereview.appspot.com'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-d", "--description", action="store", dest="description",
metavar="DESCRIPTION", default=None,
help="Optional description when creating an issue.")
group.add_option("-f", "--description_file", action="store",
dest="description_file", metavar="DESCRIPTION_FILE",
default=None,
help="Optional path of a file that contains "
"the description when creating an issue.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=None,
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default=None,
help="Add CC (comma separated email addresses).")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-m", "--message", action="store", dest="message",
metavar="MESSAGE", default=None,
help="A message to identify the patch. "
"Will prompt if omitted.")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Branch/tree/revision to diff against (used by DVCS).")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=False,
help="Send notification email to reviewers.")
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5.new(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# SVN base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns the SVN base URL.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
info = RunShell(["svn", "info"])
for line in info.splitlines():
words = line.split()
if len(words) == 2 and words[0] == "URL:":
url = words[1]
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
username, netloc = urllib.splituser(netloc)
if username:
logging.info("Removed username from base URL")
if netloc.endswith("svn.python.org"):
if netloc == "svn.python.org":
if path.startswith("/projects/"):
path = path[9:]
elif netloc != "pythondev@svn.python.org":
ErrorExit("Unrecognized Python URL: %s" % url)
base = "http://svn.python.org/view/*checkout*%s/" % path
logging.info("Guessed Python base = %s", base)
elif netloc.endswith("svn.collab.net"):
if path.startswith("/repos/"):
path = path[6:]
base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
logging.info("Guessed CollabNet base = %s", base)
elif netloc.endswith(".googlecode.com"):
path = path + "/"
base = urlparse.urlunparse(("http", netloc, path, params,
query, fragment))
logging.info("Guessed Google Code base = %s", base)
else:
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed base = %s", base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals", filename])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to get status for %s." % filename)
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
silent_ok=True)
base_content = ""
is_binary = mimetype and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
get_base = False
is_binary = mimetype and not mimetype.startswith("text/")
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content = RunShell(["svn", "cat", filename],
universal_newlines=universal_newlines,
silent_ok=True)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> hash of base file.
self.base_hashes = {}
def GenerateDiff(self, extra_args):
# This is more complicated than svn's GenerateDiff because we must convert
# the diff output to include an svn-style "Index:" line as well as record
# the hashes of the base files, so we can upload them along with our diff.
if self.options.revision:
extra_args = [self.options.revision] + extra_args
gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/.*$", line)
if match:
filecount += 1
filename = match.group(1)
svndiff.append("Index: %s\n" % filename)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.", line)
if match:
self.base_hashes[filename] = match.group(1)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
return "".join(svndiff)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetBaseFile(self, filename):
hash = self.base_hashes[filename]
base_content = None
new_content = None
is_binary = False
if hash == "0" * 40: # All-zero hash indicates no base file.
status = "A"
base_content = ""
else:
status = "M"
base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
if returncode:
ErrorExit("Got error status from 'git show %s'" % hash)
return (base_content, new_content, is_binary, status)
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), filename
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
if len(out) > 1:
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
else:
status, _ = out[0].split(' ', 1)
if status != "A":
base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
silent_ok=True)
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCS(options):
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an instance of the appropriate class. Exit with an
error if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
try:
out, returncode = RunShellWithReturnCode(["hg", "root"])
if returncode == 0:
return MercurialVCS(options, out.strip())
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have hg installed.
raise
# Subversion has a .svn in all working directories.
if os.path.isdir('.svn'):
logging.info("Guessed VCS = Subversion")
return SubversionVCS(options)
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
try:
out, returncode = RunShellWithReturnCode(["git", "rev-parse",
"--is-inside-work-tree"])
if returncode == 0:
return GitVCS(options)
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have git installed.
raise
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
options, args = parser.parse_args(argv[1:])
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
if isinstance(vcs, SubversionVCS):
# base field is only allowed for Subversion.
# Note: Fetching base files may become deprecated in future releases.
base = vcs.GuessBase(options.download_base)
else:
base = None
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Message describing this patch set: "
else:
prompt = "New issue subject: "
message = options.message or raw_input(prompt).strip()
if not message:
ErrorExit("A non-empty message is required")
rpc_server = GetRpcServer(options)
form_fields = [("subject", message)]
if base:
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
if "@" in cc and not cc.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % cc)
form_fields.append(("cc", options.cc))
description = options.description
if options.description_file:
if options.description:
ErrorExit("Can't specify description and description_file")
file = open(options.description_file, 'r')
description = file.read()
file.close()
if description:
form_fields.append(("description", description))
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5.new(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
# If we're uploading base files, don't send the email before the uploads, so
# that it contains the file status.
if options.send_mail and options.download_base:
form_fields.append(("send_mail", "1"))
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
StatusUpdate(msg)
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
if options.send_mail:
rpc_server.Send("/" + issue + "/mail", payload="")
return issue, patchset
def main():
try:
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main()
| bsd-3-clause |
d-das/pyethapp | examples/importblock.py | 4 | 1976 | from pyethapp.leveldb_service import LevelDB
from pyethapp.config import default_data_dir
from pyethapp.eth_protocol import TransientBlock
from ethereum.chain import Chain
from ethereum.slogging import configure
import rlp
import os
configure(':trace')
def get_chain(data_dir=default_data_dir):
"""
returns an ethereum.chain.Chain instance
"""
dbfile = os.path.join(data_dir, 'leveldb')
db = LevelDB(dbfile)
return Chain(db)
# block # 447361 / 0x9c496f3bdfd428d19c8ae87fc8f653cac3278e0b07528bd0a065b1878dc56ca6
rlp_data = """f90201f901fca03a3ce492dd9865a0baeafbf4df006aff84e0b4ae39c8fbecfd994f9a7c0af748a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479482b27dbe07d34fb96309d2306e2729b6c5d155ffa0b9e10a7b297a3e5fb57bb222dc1b5df6211457271a37ebe2710b63da47d7904aa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000850177ffb17e8306d381832fefd880845564a4f280a089a6165f905f48436103b7a6ee6c216530029e01378883672d2ccb46e3ce478f888b11ce88aed597f2c0c0""".decode('hex')
def import_block(chain, rlp_data):
ll = rlp.decode_lazy(rlp_data)
transient_block = TransientBlock(ll, 0)
transient_block.to_block(chain.db)
if __name__ == '__main__':
chain = get_chain()
print '\nIMPORTING BLOCK'
# h = chain.index.get_block_by_number(447360)
# b = chain.get(h)
# rlp_data = rlp.encode(b)
import_block(chain, rlp_data)
| mit |
za-creature/puls | puls/htmlcompress.py | 1 | 6349 | # -*- coding: utf-8 -*-
"""
jinja2htmlcompress
~~~~~~~~~~~~~~~~~~
A Jinja2 extension that eliminates useless whitespace at template
compilation time without extra overhead.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import re
from jinja2.ext import Extension
from jinja2.lexer import Token, describe_token
from jinja2 import TemplateSyntaxError
_tag_re = re.compile(r'(?:<(/?)([a-zA-Z0-9_-]+)\s*|(>\s*))(?s)')
_ws_normalize_re = re.compile(r'[ \t\r\n]+')
class StreamProcessContext(object):
def __init__(self, stream):
self.stream = stream
self.token = None
self.stack = []
def fail(self, message):
raise TemplateSyntaxError(message, self.token.lineno,
self.stream.name, self.stream.filename)
def _make_dict_from_listing(listing):
rv = {}
for keys, value in listing:
for key in keys:
rv[key] = value
return rv
class HTMLCompress(Extension):
isolated_elements = set(['script', 'style', 'noscript', 'textarea'])
void_elements = set(['br', 'img', 'area', 'hr', 'param', 'input',
'embed', 'col'])
block_elements = set(['div', 'p', 'form', 'ul', 'ol', 'li', 'table', 'tr',
'tbody', 'thead', 'tfoot', 'tr', 'td', 'th', 'dl',
'dt', 'dd', 'blockquote', 'h1', 'h2', 'h3', 'h4',
'h5', 'h6', 'pre'])
breaking_rules = _make_dict_from_listing([
(['p'], set(['#block'])),
(['li'], set(['li'])),
(['td', 'th'], set(['td', 'th', 'tr', 'tbody', 'thead', 'tfoot'])),
(['tr'], set(['tr', 'tbody', 'thead', 'tfoot'])),
(['thead', 'tbody', 'tfoot'], set(['thead', 'tbody', 'tfoot'])),
(['dd', 'dt'], set(['dl', 'dt', 'dd']))
])
def is_isolated(self, stack):
for tag in reversed(stack):
if tag in self.isolated_elements:
return True
return False
def is_breaking(self, tag, other_tag):
breaking = self.breaking_rules.get(other_tag)
return breaking and (tag in breaking or
('#block' in breaking and tag in self.block_elements))
def enter_tag(self, tag, ctx):
while ctx.stack and self.is_breaking(tag, ctx.stack[-1]):
self.leave_tag(ctx.stack[-1], ctx)
if tag not in self.void_elements:
ctx.stack.append(tag)
def leave_tag(self, tag, ctx):
if not ctx.stack:
ctx.fail('Tried to leave "%s" but something closed '
'it already' % tag)
if tag == ctx.stack[-1]:
ctx.stack.pop()
return
for idx, other_tag in enumerate(reversed(ctx.stack)):
if other_tag == tag:
for num in xrange(idx + 1):
ctx.stack.pop()
elif not self.breaking_rules.get(other_tag):
break
def normalize(self, ctx):
pos = 0
buffer = []
def write_data(value):
if not self.is_isolated(ctx.stack):
value = _ws_normalize_re.sub(' ', value)
buffer.append(value)
for match in _tag_re.finditer(ctx.token.value):
closes, tag, sole = match.groups()
preamble = ctx.token.value[pos:match.start()]
write_data(preamble)
if sole:
write_data(sole)
else:
buffer.append(match.group())
(closes and self.leave_tag or self.enter_tag)(tag, ctx)
pos = match.end()
write_data(ctx.token.value[pos:])
return u''.join(buffer)
def filter_stream(self, stream):
ctx = StreamProcessContext(stream)
for token in stream:
if token.type != 'data':
yield token
continue
ctx.token = token
value = self.normalize(ctx)
yield Token(token.lineno, 'data', value)
class SelectiveHTMLCompress(HTMLCompress):
def filter_stream(self, stream):
ctx = StreamProcessContext(stream)
strip_depth = 0
while 1:
if stream.current.type == 'block_begin':
if stream.look().test('name:strip') or \
stream.look().test('name:endstrip'):
stream.skip()
if stream.current.value == 'strip':
strip_depth += 1
else:
strip_depth -= 1
if strip_depth < 0:
ctx.fail('Unexpected tag endstrip')
stream.skip()
if stream.current.type != 'block_end':
ctx.fail('expected end of block, got %s' %
describe_token(stream.current))
stream.skip()
if strip_depth > 0 and stream.current.type == 'data':
ctx.token = stream.current
value = self.normalize(ctx)
yield Token(stream.current.lineno, 'data', value)
else:
yield stream.current
stream.next()
def test():
from jinja2 import Environment
env = Environment(extensions=[HTMLCompress])
tmpl = env.from_string('''
<html>
<head>
<title>{{ title }}</title>
</head>
<script type=text/javascript>
if (foo < 42) {
document.write('Foo < Bar');
}
</script>
<body>
<li><a href="{{ href }}">{{ title }}</a><br>Test Foo
<li><a href="{{ href }}">{{ title }}</a><img src=test.png>
</body>
</html>
''')
print(tmpl.render(title=42, href='index.html'))
env = Environment(extensions=[SelectiveHTMLCompress])
tmpl = env.from_string('''
Normal <span> unchanged </span> stuff
{% strip %}Stripped <span class=foo > test </span>
<a href="foo"> test </a> {{ foo }}
Normal <stuff> again {{ foo }} </stuff>
<p>
Foo<br>Bar
Baz
<p>
Moep <span>Test</span> Moep
</p>
{% endstrip %}
''')
print(tmpl.render(foo=42))
if __name__ == '__main__':
test()
| mit |
clstl/servo | components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py | 120 | 3162 | def WebIDLTest(parser, harness):
parser.parse("""
dictionary Dict {
any foo;
[ChromeOnly] any bar;
};
""")
results = parser.finish()
harness.check(len(results), 1, "Should have a dictionary")
members = results[0].members;
harness.check(len(members), 2, "Should have two members")
# Note that members are ordered lexicographically, so "bar" comes
# before "foo".
harness.ok(members[0].getExtendedAttribute("ChromeOnly"),
"First member is not ChromeOnly")
harness.ok(not members[1].getExtendedAttribute("ChromeOnly"),
"Second member is ChromeOnly")
parser = parser.reset()
parser.parse("""
dictionary Dict {
any foo;
any bar;
};
interface Iface {
[Constant, Cached] readonly attribute Dict dict;
};
""")
results = parser.finish()
harness.check(len(results), 2, "Should have a dictionary and an interface")
parser = parser.reset()
exception = None
try:
parser.parse("""
dictionary Dict {
any foo;
[ChromeOnly] any bar;
};
interface Iface {
[Constant, Cached] readonly attribute Dict dict;
};
""")
results = parser.finish()
except Exception, exception:
pass
harness.ok(exception, "Should have thrown.")
harness.check(exception.message,
"[Cached] and [StoreInSlot] must not be used on an attribute "
"whose type contains a [ChromeOnly] dictionary member",
"Should have thrown the right exception")
parser = parser.reset()
exception = None
try:
parser.parse("""
dictionary ParentDict {
[ChromeOnly] any bar;
};
dictionary Dict : ParentDict {
any foo;
};
interface Iface {
[Constant, Cached] readonly attribute Dict dict;
};
""")
results = parser.finish()
except Exception, exception:
pass
harness.ok(exception, "Should have thrown (2).")
harness.check(exception.message,
"[Cached] and [StoreInSlot] must not be used on an attribute "
"whose type contains a [ChromeOnly] dictionary member",
"Should have thrown the right exception (2)")
parser = parser.reset()
exception = None
try:
parser.parse("""
dictionary GrandParentDict {
[ChromeOnly] any baz;
};
dictionary ParentDict : GrandParentDict {
any bar;
};
dictionary Dict : ParentDict {
any foo;
};
interface Iface {
[Constant, Cached] readonly attribute Dict dict;
};
""")
results = parser.finish()
except Exception, exception:
pass
harness.ok(exception, "Should have thrown (3).")
harness.check(exception.message,
"[Cached] and [StoreInSlot] must not be used on an attribute "
"whose type contains a [ChromeOnly] dictionary member",
"Should have thrown the right exception (3)")
| mpl-2.0 |
rafalo1333/kivy | kivy/uix/anchorlayout.py | 22 | 3608 | '''
Anchor Layout
=============
.. only:: html
.. image:: images/anchorlayout.gif
:align: right
.. only:: latex
.. image:: images/anchorlayout.png
:align: right
The :class:`AnchorLayout` aligns its children to a border (top, bottom,
left, right) or center.
To draw a button in the lower-right corner::
layout = AnchorLayout(
anchor_x='right', anchor_y='bottom')
btn = Button(text='Hello World')
layout.add_widget(btn)
'''
__all__ = ('AnchorLayout', )
from kivy.uix.layout import Layout
from kivy.properties import OptionProperty, VariableListProperty
class AnchorLayout(Layout):
'''Anchor layout class. See the module documentation for more information.
'''
padding = VariableListProperty([0, 0, 0, 0])
'''Padding between the widget box and its children, in pixels:
[padding_left, padding_top, padding_right, padding_bottom].
padding also accepts a two argument form [padding_horizontal,
padding_vertical] and a one argument form [padding].
:attr:`padding` is a :class:`~kivy.properties.VariableListProperty` and
defaults to [0, 0, 0, 0].
'''
anchor_x = OptionProperty('center', options=(
'left', 'center', 'right'))
'''Horizontal anchor.
:attr:`anchor_x` is an :class:`~kivy.properties.OptionProperty` and
defaults to 'center'. It accepts values of 'left', 'center' or
'right'.
'''
anchor_y = OptionProperty('center', options=(
'top', 'center', 'bottom'))
'''Vertical anchor.
:attr:`anchor_y` is an :class:`~kivy.properties.OptionProperty` and
defaults to 'center'. It accepts values of 'top', 'center' or
'bottom'.
'''
def __init__(self, **kwargs):
super(AnchorLayout, self).__init__(**kwargs)
fbind = self.fbind
update = self._trigger_layout
fbind('children', update)
fbind('parent', update)
fbind('padding', update)
fbind('anchor_x', update)
fbind('anchor_y', update)
fbind('size', update)
fbind('pos', update)
def do_layout(self, *largs):
_x, _y = self.pos
width = self.width
height = self.height
anchor_x = self.anchor_x
anchor_y = self.anchor_y
pad_left, pad_top, pad_right, pad_bottom = self.padding
for c in self.children:
x, y = _x, _y
cw, ch = c.size
shw, shh = c.size_hint
shw_min, shh_min = c.size_hint_min
shw_max, shh_max = c.size_hint_max
if shw is not None:
cw = shw * (width - pad_left - pad_right)
if shw_min is not None and cw < shw_min:
cw = shw_min
elif shw_max is not None and cw > shw_max:
cw = shw_max
if shh is not None:
ch = shh * (height - pad_top - pad_bottom)
if shh_min is not None and ch < shh_min:
ch = shh_min
elif shh_max is not None and ch > shh_max:
ch = shh_max
if anchor_x == 'left':
x = x + pad_left
elif anchor_x == 'right':
x = x + width - (cw + pad_right)
else:
x = x + (width - pad_right + pad_left - cw) / 2
if anchor_y == 'bottom':
y = y + pad_bottom
elif anchor_y == 'top':
y = y + height - (ch + pad_top)
else:
y = y + (height - pad_top + pad_bottom - ch) / 2
c.pos = x, y
c.size = cw, ch
| mit |
lz1988/django-web | build/lib/django/contrib/localflavor/hr/forms.py | 100 | 9127 | # -*- coding: utf-8 -*-
"""
HR-specific Form helpers
"""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from django.contrib.localflavor.hr.hr_choices import (
HR_LICENSE_PLATE_PREFIX_CHOICES, HR_COUNTY_CHOICES,
HR_PHONE_NUMBER_PREFIX_CHOICES)
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, Select, RegexField
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
jmbg_re = re.compile(r'^(?P<dd>\d{2})(?P<mm>\d{2})(?P<yyy>\d{3})' + \
r'(?P<rr>\d{2})(?P<bbb>\d{3})(?P<k>\d{1})$')
oib_re = re.compile(r'^\d{11}$')
plate_re = re.compile(r'^(?P<prefix>[A-ZČŠŽ]{2})' + \
r'(?P<number>\d{3,4})(?P<suffix>[ABCDEFGHIJKLMNOPRSTUVZ]{1,2})$')
postal_code_re = re.compile(r'^\d{5}$')
phone_re = re.compile(r'^(\+385|00385|0)(?P<prefix>\d{2})(?P<number>\d{6,7})$')
jmbag_re = re.compile(r'^601983(?P<copy>\d{1})1(?P<jmbag>\d{10})(?P<k>\d{1})$')
class HRCountySelect(Select):
"""
A Select widget that uses a list of counties of Croatia as its choices.
"""
def __init__(self, attrs=None):
super(HRCountySelect, self).__init__(attrs, choices=HR_COUNTY_CHOICES)
class HRLicensePlatePrefixSelect(Select):
"""
A Select widget that uses a list of vehicle license plate prefixes of
Croatia as its choices.
"""
def __init__(self, attrs=None):
super(HRLicensePlatePrefixSelect, self).__init__(attrs,
choices=HR_LICENSE_PLATE_PREFIX_CHOICES)
class HRPhoneNumberPrefixSelect(Select):
"""
A Select widget that uses a list of phone number prefixes of Croatia as its
choices.
"""
def __init__(self, attrs=None):
super(HRPhoneNumberPrefixSelect, self).__init__(attrs,
choices=HR_PHONE_NUMBER_PREFIX_CHOICES)
class HRJMBGField(Field):
"""
Unique Master Citizen Number (JMBG) field.
The number is still in use in Croatia, but it is being replaced by OIB.
Source: http://en.wikipedia.org/wiki/Unique_Master_Citizen_Number
For who might be reimplementing:
The "area" regular expression group is used to calculate the region where a
person was registered. Additional validation can be implemented in
accordance with it, however this could result in exclusion of legit
immigrated citizens. Therefore, this field works for any ex-Yugoslavia
country.
"""
default_error_messages = {
'invalid': _('Enter a valid 13 digit JMBG'),
'date': _('Error in date segment'),
}
def clean(self, value):
super(HRJMBGField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = value.strip()
matches = jmbg_re.search(value)
if matches is None:
raise ValidationError(self.error_messages['invalid'])
# Make sure the date part is correct.
dd = int(matches.group('dd'))
mm = int(matches.group('mm'))
yyy = int(matches.group('yyy'))
try:
datetime.date(yyy, mm, dd)
except ValueError:
raise ValidationError(self.error_messages['date'])
# Validate checksum.
k = matches.group('k')
checksum = 0
for i, j in zip(range(7, 1, -1), range(6)):
checksum += i * (int(value[j]) + int(value[13 - i]))
m = 11 - checksum % 11
if m == 10:
raise ValidationError(self.error_messages['invalid'])
if m == 11 and k != '0':
raise ValidationError(self.error_messages['invalid'])
if not str(m) == k:
raise ValidationError(self.error_messages['invalid'])
return '%s' % (value, )
class HROIBField(RegexField):
"""
Personal Identification Number of Croatia (OIB) field.
http://www.oib.hr/
"""
default_error_messages = {
'invalid': _('Enter a valid 11 digit OIB'),
}
def __init__(self, min_length=11, max_length=11, *args, **kwargs):
super(HROIBField, self).__init__(r'^\d{11}$',
min_length, max_length, *args, **kwargs)
def clean(self, value):
super(HROIBField, self).clean(value)
if value in EMPTY_VALUES:
return ''
return '%s' % (value, )
class HRLicensePlateField(Field):
"""
Vehicle license plate of Croatia field. Normalizes to the specific format
below. Suffix is constructed from the shared letters of the Croatian and
English alphabets.
Format examples:
SB 123-A
(but also supports more characters)
ZG 1234-AA
Used for standardized license plates only.
"""
default_error_messages = {
'invalid': _('Enter a valid vehicle license plate number'),
'area': _('Enter a valid location code'),
'number': _('Number part cannot be zero'),
}
def clean(self, value):
super(HRLicensePlateField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = re.sub(r'[\s\-]+', '', smart_text(value.strip())).upper()
matches = plate_re.search(value)
if matches is None:
raise ValidationError(self.error_messages['invalid'])
# Make sure the prefix is in the list of known codes.
prefix = matches.group('prefix')
if prefix not in [choice[0] for choice in HR_LICENSE_PLATE_PREFIX_CHOICES]:
raise ValidationError(self.error_messages['area'])
# Make sure the number portion is not zero.
number = matches.group('number')
if int(number) == 0:
raise ValidationError(self.error_messages['number'])
return '%s %s-%s' % (prefix,number,matches.group('suffix'), )
class HRPostalCodeField(Field):
"""
Postal code of Croatia field.
It consists of exactly five digits ranging from 10000 to possibly less than
60000.
http://www.posta.hr/main.aspx?id=66
"""
default_error_messages = {
'invalid': _('Enter a valid 5 digit postal code'),
}
def clean(self, value):
super(HRPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = value.strip()
if not postal_code_re.search(value):
raise ValidationError(self.error_messages['invalid'])
# Make sure the number is in valid range.
if not 9999<int(value)<60000:
raise ValidationError(self.error_messages['invalid'])
return '%s' % (value, )
class HRPhoneNumberField(Field):
"""
Phone number of Croatia field.
Format: Complete country code or leading zero, area code prefix, 6 or 7
digit number.
Validates fixed, mobile and FGSM numbers. Normalizes to a full number with
country code (+385 prefix).
"""
default_error_messages = {
'invalid': _('Enter a valid phone number'),
'area': _('Enter a valid area or mobile network code'),
'number': _('The phone nubmer is too long'),
}
def clean(self, value):
super(HRPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = re.sub(r'[\-\s\(\)]', '', smart_text(value))
matches = phone_re.search(value)
if matches is None:
raise ValidationError(self.error_messages['invalid'])
# Make sure the prefix is in the list of known codes.
prefix = matches.group('prefix')
number = matches.group('number')
if prefix[0] == '1':
number = prefix[1] + number
prefix = prefix[0]
if prefix not in [choice[0] for choice in HR_PHONE_NUMBER_PREFIX_CHOICES]:
raise ValidationError(self.error_messages['area'])
# Make sure the number is of adequate length.
if prefix=='1' and len(number)!=7:
raise ValidationError(self.error_messages['number'])
return '%s%s%s' % ('+385',prefix,number)
class HRJMBAGField(Field):
"""
Unique Master Academic Citizen Number of Croatia (JMBAG) field.
This number is used by college students and professors in Croatia.
http://www.cap.srce.hr/IzgledX.aspx
"""
default_error_messages = {
'invalid': _('Enter a valid 19 digit JMBAG starting with 601983'),
'copy': _('Card issue number cannot be zero'),
}
def clean(self, value):
super(HRJMBAGField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = re.sub(r'[\-\s]', '', value.strip())
matches = jmbag_re.search(value)
if matches is None:
raise ValidationError(self.error_messages['invalid'])
# Make sure the issue number is not zero.
if matches.group('copy')=='0':
raise ValidationError(self.error_messages['copy'])
# Validate checksum using Luhn algorithm.
num = [int(x) for x in value]
if not sum(num[::-2] + [sum(divmod(d * 2, 10)) for d in num[-2::-2]]) % 10 == 0:
raise ValidationError(self.error_messages['invalid'])
return '%s' % (value, )
| apache-2.0 |
melodous/designate | designate/api/v1/extensions/sync.py | 1 | 1777 | # Copyright 2012 Hewlett-Packard Development Company, L.P. All Rights Reserved.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import flask
from designate.openstack.common import log as logging
from designate.central import rpcapi as central_rpcapi
LOG = logging.getLogger(__name__)
central_api = central_rpcapi.CentralAPI()
blueprint = flask.Blueprint('sync', __name__)
@blueprint.route('/domains/sync', methods=['POST'])
def sync_domains():
context = flask.request.environ.get('context')
central_api.sync_domains(context)
return flask.Response(status=200)
@blueprint.route('/domains/<uuid:domain_id>/sync', methods=['POST'])
def sync_domain(domain_id):
context = flask.request.environ.get('context')
central_api.sync_domain(context, domain_id)
return flask.Response(status=200)
@blueprint.route('/domains/<uuid:domain_id>/records/<uuid:record_id>/sync',
methods=['POST'])
def sync_record(domain_id, record_id):
context = flask.request.environ.get('context')
record = central_api.find_record(context, {'id': record_id})
central_api.sync_record(context, domain_id, record['recordset_id'],
record_id)
return flask.Response(status=200)
| apache-2.0 |
SCPR/calif-earthquakes | migrations/versions/30a0d4b22212_.py | 1 | 1081 | """empty message
Revision ID: 30a0d4b22212
Revises: 55464a272758
Create Date: 2014-06-27 10:39:14.030414
"""
# revision identifiers, used by Alembic.
revision = '30a0d4b22212'
down_revision = '55464a272758'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('index_on_code', 'earthquake')
op.drop_index('index_on_date_time', 'earthquake')
op.drop_index('index_on_date_time_raw', 'earthquake')
op.drop_index('index_on_date_time_raw_and_mag', 'earthquake')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('index_on_date_time_raw_and_mag', 'earthquake', [u'date_time_raw', u'mag'], unique=False)
op.create_index('index_on_date_time_raw', 'earthquake', [u'date_time_raw'], unique=False)
op.create_index('index_on_date_time', 'earthquake', [u'date_time'], unique=False)
op.create_index('index_on_code', 'earthquake', [u'code'], unique=False)
### end Alembic commands ###
| gpl-2.0 |
Cognexa/cxflow | cxflow/entry_point.py | 1 | 3175 | """
This module is **cxflow** framework entry point.
The entry point shall be accessed from command line via `cxflow` command.
At the moment **cxflow** allows to
- train a model with ``cxflow train ...``
- resume training with ``cxflow resume ...``
- generate model predictions with ``cxflow predict ...``
- invoke dataset method with ``cxflow dataset <method> ...``
Run `cxflow -h` for details.
"""
import logging
import os
import sys
from cxflow.cli import train, resume, predict, evaluate, grid_search, get_cxflow_arg_parser, invoke_dataset_method, \
list_train_dirs
from cxflow.cli.prune import prune_train_dirs
from .constants import CXF_LOG_FORMAT, CXF_LOG_DATE_FORMAT
def entry_point() -> None:
"""**cxflow** entry point."""
# make sure the path contains the current working directory
sys.path.insert(0, os.getcwd())
parser = get_cxflow_arg_parser(True)
# parse CLI arguments
known_args, unknown_args = parser.parse_known_args()
# show help if no subcommand was specified.
if not hasattr(known_args, 'subcommand'):
parser.print_help()
quit(1)
# set up global logger
logger = logging.getLogger('')
logger.setLevel(logging.DEBUG if known_args.verbose else logging.INFO)
logger.handlers = [] # remove default handlers
# set up STDERR handler
stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setFormatter(logging.Formatter(CXF_LOG_FORMAT, datefmt=CXF_LOG_DATE_FORMAT))
logger.addHandler(stderr_handler)
if known_args.subcommand == 'train':
train(config_path=known_args.config_file, cl_arguments=unknown_args, output_root=known_args.output_root)
elif known_args.subcommand == 'resume':
resume(config_path=known_args.config_path, restore_from=known_args.restore_from, cl_arguments=unknown_args,
output_root=known_args.output_root)
elif known_args.subcommand == 'predict':
logging.warning('Predict command is deprecated and will be removed, use ``cxflow eval predict ...`` instead')
predict(config_path=known_args.config_path, restore_from=known_args.restore_from, cl_arguments=unknown_args,
output_root=known_args.output_root)
elif known_args.subcommand == 'eval':
evaluate(model_path=known_args.model_path, stream_name=known_args.stream_name,
config_path=known_args.config, cl_arguments=unknown_args, output_root=known_args.output_root)
elif known_args.subcommand == 'dataset':
invoke_dataset_method(config_path=known_args.config_file, method_name=known_args.method,
cl_arguments=unknown_args, output_root=known_args.output_root)
elif known_args.subcommand == 'gridsearch':
grid_search(script=known_args.script, params=known_args.params, dry_run=known_args.dry_run)
elif known_args.subcommand == 'ls':
list_train_dirs(known_args.dir, known_args.recursive, known_args.all, known_args.long, known_args.verbose)
elif known_args.subcommand == 'prune':
prune_train_dirs(known_args.dir, known_args.epochs, known_args.subdirs)
if __name__ == '__main__':
entry_point()
| mit |
ybayle/ReproducibleResearchIEEE2017 | src/bayle.py | 1 | 21016 | # -*- coding: utf-8 -*-
#!/usr/bin/python
#
# Author Yann Bayle
# E-mail bayle.yann@live.fr
# License MIT
# Created 01/12/2016
# Updated 01/12/2016
# Version 1.0.0
#
"""
Description of bayle.py
======================
0 Input the local extracted features from YAAFE
13 MFCC per frame
186 musical pieces as train set
1 Computes delta and double delta (39 features per frame)
2 Gather global mean (39 features per musical pieces)
3 train on mfcc & deltas (39 feat/frame) to output global predictions
4 Use global preds to compute song and instru n-grams and histogramm
which add 70 feat/track
lead to a total of 109 feat/track
5 Fit on 109x186
6 predict (or predict_proba) on 41491 track
:Example:
source activate py27
ipython
run bayle.py -d /media/sf_github/yann/train/
..todo::
"""
import multiprocessing
import webbrowser
import utils
import numpy as np
from sklearn.svm import SVC
from sklearn import linear_model
import sys
from functools import partial
import time
from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score
import time
import numpy as np
import matplotlib.pyplot as plt
import math
import re
import os
import sys
import csv
import time
import utils
import argparse
from datetime import date
from collections import Counter
from matplotlib.cm import ScalarMappable
from matplotlib.colors import Normalize
from matplotlib.colorbar import ColorbarBase
import matplotlib.pyplot as plt
import numpy as np
import joblib
from sklearn.ensemble import RandomForestClassifier
import librosa
import os
import sys
import json
import math
import utils
import random
import joblib
from pprint import pprint
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import precision_score, recall_score, f1_score
from sklearn.model_selection import train_test_split, StratifiedKFold
from sklearn.neural_network import MLPClassifier
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from sklearn import datasets
from sklearn import svm
from sklearn.ensemble import RandomForestClassifier
from sklearn.cross_validation import KFold, cross_val_score
from statistics import mean, stdev
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, GradientBoostingClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis, LinearDiscriminantAnalysis
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import precision_recall_curve, precision_score, recall_score, classification_report, f1_score, accuracy_score
from sklearn import linear_model
from sklearn.tree import DecisionTreeClassifier
import classify
# import reproduce
def arr2str(data, separator=","):
return separator.join(str(x) for x in data)
def str2arr(data):
return np.array(data).astype(np.float)
def read_gts(filename, separator="\t"):
track_gts = {}
with open(filename, "r") as filep:
for line in filep:
line = line.split(separator)
track_gts[line[0]] = line[1][:-1]
return track_gts
def match_feat_with_song_gt(dir_feat, dir_gts):
"""Description of match_feat_gt
Use groundtruth created by
http://www.mathieuramona.com/wp/data/jamendo/
associate to local features
csv 7041 lines yaafe
lab 326.973 sec ramona
Definition of YAAFE from
http://yaafe.sourceforge.net/features.html
"""
utils.print_success("Matching local feat to song/instru groundtruths")
dir_feat = utils.abs_path_dir(dir_feat)
dir_gts = utils.abs_path_dir(dir_gts)
block_size = 1024.
step_size = 512.
fech = 22050.
frame_size_ms = block_size / fech
filenames = [fn for fn in os.listdir(dir_gts)]
for index, filename in enumerate(filenames):
utils.print_progress_start(str(index) + "/" + str(len(filenames)) + " " + filename)
# gather groundtruths
groundtruths = []
with open(dir_gts + filename, "r") as filep:
for row in filep:
line = row.split(" ")
end = float(line[1])
if "no" in line[2]:
tag = ",i\n"
else:
tag = ",s\n"
groundtruths.append([end, tag])
gt_len = len(groundtruths)
overflow = False
gt_index = 0
cpt = 0
# Write features & groundtruths to file
str_to_write = ""
feat_fn = filename.split(".")[0]
feat_fn += ".wav.mfcc.csv"
with open(dir_feat + feat_fn, "r") as filep:
for index, line in enumerate(filep):
# todo cleanup
if gt_index < gt_len:
if frame_size_ms * index > groundtruths[gt_index][0]:
gt_index += 1
if gt_index < gt_len:
str_to_write += line[:-1] + groundtruths[gt_index][1]
with open(dir_feat + feat_fn, "w") as filep:
filep.write(str_to_write)
utils.print_progress_end()
def match_feat_with_instru_gt(indir, outdir):
"""Description of match_feat_gt
Apply instru groundtruth to CCmixter and MedleyDB
"""
utils.print_success("Matching local features to instrumental groundtruths")
indir = utils.abs_path_dir(indir) + "/"
outdir = utils.abs_path_dir(outdir) + "/"
filenames = [fn for fn in os.listdir(indir)]
for filename in filenames:
outfile = open(outdir + filename, "w")
with open(indir + filename, "r") as filep:
for line in filep:
outfile.write(line[:-1] + " i\n")
outfile.close()
def process_local_feat(indir, file_gts_track, outdir_local, out_feat_global, train):
"""Description of process_local_feat
Add delta and double delta to MFCCs
"""
utils.print_success("Processing local features")
# Preprocess arg
indir = utils.abs_path_dir(indir)
file_gts_track = utils.abs_path_file(file_gts_track)
filelist = os.listdir(indir)
outdir_local = utils.abs_path_dir(outdir_local)
track_gts = {}
with open(file_gts_track, "r") as filep:
for line in filep:
line = line.split(",")
if train:
index = line[0]
else:
index = line[0] + ".wav.mfcc.csv"
track_gts[index] = line[1][:-1]
for index, filename in enumerate(filelist):
utils.print_progress_start(str(index) + "/" + str(len(filelist)) + " " + filename)
if filename in track_gts:
mfccs = []
groundtruths = []
with open(indir + filename, "r") as filep:
next(filep)
next(filep)
next(filep)
next(filep)
next(filep)
for line in filep:
line = line.split(",")
mfccs.append(str2arr(line[:-1]))
if train:
groundtruths.append(line[-1][:-1])
mfccs = np.array(mfccs)
delta_mfcc = librosa.feature.delta(mfccs)
delta2_mfcc = librosa.feature.delta(mfccs, order=2)
# Write local features in outdir_local
with open(outdir_local + filename, "w") as filep:
gt_to_write = ""
if "i" in track_gts[filename]:
gt_to_write = ",i"
elif "s" in track_gts[filename]:
# postpone frame groundtruth annotationa to another function later in the code
gt_to_write = ""
else:
utils.print_warning("bayle.py line 231 local frame groundtruth undefined")
if train:
for a, b, c, d in zip(mfccs, delta_mfcc, delta2_mfcc, groundtruths):
filep.write(arr2str(a) + "," + arr2str(b) + "," + arr2str(c) + "," + d + "\n")
else:
for a, b, c in zip(mfccs, delta_mfcc, delta2_mfcc):
filep.write(arr2str(a) + "," + arr2str(b) + "," + arr2str(c) + gt_to_write + "\n")
# # Write global features in out_feat_global
# with open(out_feat_global, "a") as filep:
# filep.write(filename + "," +
# arr2str(np.mean(mfccs, axis=0)) + "," +
# arr2str(np.mean(delta_mfcc, axis=0)) + "," +
# arr2str(np.mean(delta2_mfcc, axis=0)) + "," +
# track_gts[filename] + "\n")
utils.print_progress_end()
utils.print_success("Adding local groundtruths to Songs in Jamendo thanks to Ramona annotations")
match_feat_with_song_gt(dir_feat=outdir_local, dir_gts="groundtruths/frame_annot_jamendo_ramona/")
utils.print_success("Done")
def column(matrix, i):
return [row[i] for row in matrix]
def ngram_proba(local_pred, threshold=0.5, above_threshold=True):
"""
n-gram creation
"""
cpt_ngram = 0
nb_ngram = 30
ngrams = [0,] * nb_ngram
for pred in local_pred:
if above_threshold:
condition = pred > threshold
else:
condition = pred <= threshold
if condition:
cpt_ngram += 1
else:
if cpt_ngram < nb_ngram:
ngrams[cpt_ngram] += 1
else:
ngrams[nb_ngram-1] += 1
cpt_ngram = 0
nb_tag_sing = float(sum(ngrams))
if nb_tag_sing > 0.:
ngrams = [float(x) / nb_tag_sing for x in ngrams]
# utils.print_error(ngrams)
return ','.join(str(x) for x in ngrams)
def ngram(preds, tag):
"""Description of ngram
"""
cpt_ngram = 0
nb_ngram = 30
ngrams = [0,] * nb_ngram
for pred in preds:
if tag in pred:
cpt_ngram += 1
else:
if cpt_ngram < nb_ngram:
ngrams[cpt_ngram] += 1
else:
ngrams[nb_ngram-1] += 1
cpt_ngram = 0
nb_tag = float(sum(ngrams))
if nb_tag > 0.:
ngrams = [float(x) / nb_tag for x in ngrams]
return ','.join(str(x) for x in ngrams)
def create_track_feat_testset(folder, infile, outfile, model_file, train=False):
"""Description of create_track_feat_testset
Need to read each test file
compute deltas on mfcc in the ram
predict and predict_proba
generate song and instru ngrams and histograms
Add the mean of mfcc+deltas
append 109 features vector in feat_track/feat_test.csv
"""
utils.print_success("Create track feat testset")
folder = utils.abs_path_dir(folder)
infile = utils.abs_path_file(infile)
clf = joblib.load(model_file)
track_gts = read_gts(infile, separator=",")
for index, filename in enumerate(track_gts):
utils.print_progress_start(str(index+1) + "/" + str(len(track_gts)) + " " + filename)
mfccs = []
mfccs_1 = []
extension = ""
if train:
extension = ""
else:
extension += "_audio_full_mono_22k"
extension += ".wav.mfcc.csv"
with open(folder + filename + extension, "r") as filep:
if train:
next(filep)
next(filep)
next(filep)
next(filep)
next(filep)
for line in filep:
if train:
line = line.split(",")
else:
line = line.split(" ")
mfccs_1.append(str2arr(line[:-1]))
# if train:
# mfccs.append(str2arr(line[:-1]))
# else:
# mfccs.append(str2arr(line[0:]))
mfccs = np.array(mfccs_1)
delta_mfcc = librosa.feature.delta(mfccs)
delta2_mfcc = librosa.feature.delta(mfccs, order=2)
tmp = np.append(mfccs, delta_mfcc, axis=1)
features = np.append(tmp, delta2_mfcc, axis=1)
preds_proba = clf.predict_proba(features)
# Histogramm
nb_hist_class = 10
numbers = column(preds_proba, 0)
hist_pred = np.histogram(numbers, nb_hist_class)
hist_pred_norm = hist_pred[0] / float(sum(hist_pred[0]))
ngram_threshold = 0.5
song_ngram_proba = ngram_proba(local_pred=numbers, threshold=ngram_threshold, above_threshold=True)
instru_ngram_proba = ngram_proba(local_pred=numbers, threshold=ngram_threshold, above_threshold=False)
preds = clf.predict(features)
song_ngram = ngram(preds, "s")
instru_ngram = ngram(preds, "i")
with open(outfile, "a") as filep:
filep.write(filename[:12] + "," +
arr2str(np.mean(mfccs, axis=0)) + "," +
arr2str(np.mean(delta_mfcc, axis=0)) + "," +
arr2str(np.mean(delta2_mfcc, axis=0)) + "," +
arr2str(hist_pred_norm) + "," +
song_ngram_proba + "," +
instru_ngram_proba + "," +
song_ngram + "," +
instru_ngram + "," +
track_gts[filename] + "\n")
utils.print_progress_end()
def figures1bd(indir, file_gts_track):
"""Description of figures1bd
infile is formated like:
/media/sf_github/yann/train/01 - 01 Les Jardins Japonais.wav.mfcc.csv
feat1 feat2 ... featn tag1
feat1 feat2 ... featn tag2
...
feat1 feat2 ... featn tag2
0 Input the local extracted features from YAAFE
13 MFCC per frame
186 musical pieces as train set
1 Computes delta and double delta (39 features per frame)
2 Gather global mean (39 features per musical pieces)
3 train on mfcc & deltas (39 feat/frame) to output global predictions
4 Use global preds to compute song and instru n-grams and histogramm
which add 70 feat/track
lead to a total of 109 feat/track
5 Fit on 109x186
6 predict (or predict_proba) on 41491 track
"""
# Preprocess arg
indir = utils.abs_path_dir(indir)
file_gts_track = utils.abs_path_file(file_gts_track)
feat_frame_train = "feat_frame_train/"
utils.create_dir(feat_frame_train)
feat_frame_test = "feat_frame_test/"
utils.create_dir(feat_frame_test)
outdir_global = "feat_track/"
utils.create_dir(outdir_global)
feat_train = outdir_global + "train.csv"
feat_test = outdir_global + "test.csv"
models_dir = "models/"
utils.create_dir(models_dir)
loc_feat_testset_dirpath = "/media/sf_DATA/Datasets/Simbals/yaafe/results/processed/"
filelist_test = "filelist_test.tsv"
filelist_train = "filelist_train.tsv"
models_global = "models_track/"
utils.create_dir(models_global)
# process_local_feat(indir, file_gts_track, feat_frame_train, feat_train, train=True)
# classify.create_models(outdir=models_dir, train_dir=feat_frame_train, separator=",")
# create_track_feat_testset(indir, filelist_train, feat_train, train=True)
# 15h28m44s to 19h08m28s Done in 13184117ms
# create_track_feat_testset(loc_feat_testset_dirpath, filelist_test, feat_test)
# classify.create_models(outdir=models_global, train_file=feat_train)
# classify.test_models_parallel(
# models_dir=models_global,
# out_dir="results/",
# test_file=feat_test)
# Display results
reproduce.plot_results("results/")
def figure1a(file_gts_track):
"""Description of figure1a
"""
outdir_global = "feat_track/"
utils.create_dir(outdir_global)
feat_train = outdir_global + "train.csv"
# process_local_feat(indir, file_gts_track, feat_frame_train, feat_train, train=True)
classify.cross_validation(feat_train, n_folds=5)
def figure2(indir, file_gts_track):
"""Description of figure2
Method to maintain 100 percent of precision and to maximize recall.
"""
pass
def read_file_bayle(filename):
"""Description of read_file
train/test example line:
filename,feat1,feat2,...,featn,tag
"""
filename = utils.abs_path_file(filename)
filenames = []
groundtruths = []
features = []
with open(filename, "r") as filep:
for row in filep:
line = row.split(",")
filenames.append(line[0])
features.append([float(i) for i in line[1:-1]])
gt = line[-1]
while "\n" in gt or "\r" in gt:
gt = gt [:-1]
groundtruths.append(gt)
return filenames, features, groundtruths
def column(matrix, i):
return [row[i] for row in matrix]
def process_results(train, test):
train_fn, train_features, train_groundtruths = read_file_bayle(train)
test_fn, test_features, test_groundtruths = read_file_bayle(test)
step = 0.1
# for weight in np.arange(0.0, 1.0, step):
# inside_clf = RandomForestClassifier(random_state=2)
inside_clf = DecisionTreeClassifier(random_state=2)
# class_weight={"i":weight, "s":1-weight})
clf = AdaBoostClassifier(
random_state=2,#with 4 98%precision song class
base_estimator=inside_clf)
clf.fit(train_features, train_groundtruths)
predictions = clf.predict(test_features)
print("Accuracy " + str(accuracy_score(test_groundtruths, predictions)))
print("F-Measure " + str(f1_score(test_groundtruths, predictions, average="weighted")))
print("Precision " + str(precision_score(test_groundtruths, predictions, average=None)))
print("Recall " + str(recall_score(test_groundtruths, predictions, average=None)))
print("F-Measure " + str(f1_score(test_groundtruths, predictions, average=None)))
# predictions = [1.0 if i=="s" else 0.0 for i in predictions]
predictions = column(clf.predict_proba(test_features), 0)
outdir = "predictions/"
with open(outdir + "Bayle.csv", "w") as filep:
for name, pred in zip(test_fn, predictions):
filep.write(name + "," + str(1.0 - float(pred)) + "\n")
def new_algo_final(indir, file_gts_track):
utils.print_success("Approx. time ~6 hours.")
# Preprocess arg
indir = utils.abs_path_dir(indir)
file_gts_track = utils.abs_path_file(file_gts_track)
dir_tmp = utils.create_dir(utils.create_dir("src/tmp") + "bayle")
feat_frame_train = utils.create_dir(dir_tmp + "feat_frame_train")
feat_frame_test = utils.create_dir(dir_tmp + "feat_frame_test")
outdir_global = utils.create_dir(dir_tmp + "feat_track")
feat_train = outdir_global + "train.csv"
feat_test = outdir_global + "test.csv"
models_dir = utils.create_dir(dir_tmp + "models")
loc_feat_testset_dirpath = "features/database2/"
filelist_train = "groundtruths/database1.csv"
filelist_test = "groundtruths/database2.csv"
models_global = utils.create_dir(dir_tmp + "models_track")
process_local_feat(indir, file_gts_track, outdir_local=feat_frame_train, out_feat_global=feat_train, train=False)
classify.create_models(outdir=models_dir, train_dir=feat_frame_train, separator=",", classifiers="RandomForest")
"""
Create features at track scale for the train set
Features: MFCC + Delta + Double Delta + ngrams + hist
"""
model_file = "src/tmp/bayle/models/RandomForest/RandomForest.pkl"
model_file = "/media/sf_DATA/ReproducibleResearchIEEE2017/src/tmp/bayle/models/RandomForest/RandomForest.pkl"
create_track_feat_testset(indir, filelist_train, feat_train, model_file, train=True)
# # 15h28m44s to 19h08m28s Done in 13184117ms
create_track_feat_testset(loc_feat_testset_dirpath, filelist_test, feat_test, model_file)
classify.create_models(outdir=models_global, train_file=feat_train, classifiers="RandomForest")
process_results(feat_train, feat_test)
def main():
begin = int(round(time.time() * 1000))
PARSER = argparse.ArgumentParser(description="Bayle et al. (2017) algorithm")
PARSER.add_argument(
"-d",
"--indir",
help="input dir containing all local features extracted by YAAFE",
type=str,
default="/media/sf_github/yann/train/",
metavar="indir")
PARSER.add_argument(
"-i",
"--gts",
help="input file containing all track groundtruths",
type=str,
default="filelist_train.tsv")
indir = "features/database1/"
file_gts_track = "groundtruths/database1.csv"
new_algo_final(indir, file_gts_track)
# figure1a(PARSER.parse_args().gts)
# figures1bd(PARSER.parse_args().indir, PARSER.parse_args().gts)
# figure2(PARSER.parse_args().indir, PARSER.parse_args().gts)
# Local feat processing
# Global feat processing
# bayle_fig3()
utils.print_success("Done in " + str(int(round(time.time() * 1000)) - begin) + "ms")
if __name__ == "__main__":
main()
| mit |
WhySoGeeky/DroidPot | venv/lib/python2.7/site-packages/django/conf/locale/pl/formats.py | 115 | 1147 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j E Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j E Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%y-%m-%d', # '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' '
NUMBER_GROUPING = 3
| mit |
johnboiles/wicd | gtk/gui.py | 2 | 37524 | #!/usr/bin/python
""" gui -- The main wicd GUI module.
Module containing the code for the main wicd GUI.
"""
#
# Copyright (C) 2007-2009 Adam Blackburn
# Copyright (C) 2007-2009 Dan O'Reilly
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import time
import gobject
import gtk
from itertools import chain
from dbus import DBusException
from wicd import misc
from wicd import wpath
from wicd import dbusmanager
from wicd.misc import noneToString
from wicd.translations import _, language
import prefs
from prefs import PreferencesDialog
import netentry
from netentry import WiredNetworkEntry, WirelessNetworkEntry
from guiutil import error, LabelEntry
if __name__ == '__main__':
wpath.chdir(__file__)
proxy_obj = daemon = wireless = wired = bus = None
DBUS_AVAIL = False
def setup_dbus(force=True):
""" Initialize DBus. """
global bus, daemon, wireless, wired, DBUS_AVAIL
try:
dbusmanager.connect_to_dbus()
except DBusException:
if force:
print "Can't connect to the daemon, ' + \
'trying to start it automatically..."
if not misc.PromptToStartDaemon():
print "Failed to find a graphical sudo program, ' + \
'cannot continue."
return False
try:
dbusmanager.connect_to_dbus()
except DBusException:
error(
None,
_("Could not connect to wicd's D-Bus interface. "
"Check the wicd log for error messages.")
)
return False
else:
return False
prefs.setup_dbus()
netentry.setup_dbus()
bus = dbusmanager.get_bus()
dbus_ifaces = dbusmanager.get_dbus_ifaces()
daemon = dbus_ifaces['daemon']
wireless = dbus_ifaces['wireless']
wired = dbus_ifaces['wired']
DBUS_AVAIL = True
return True
def handle_no_dbus(from_tray=False):
""" Handle the case where no DBus is available. """
global DBUS_AVAIL
DBUS_AVAIL = False
if from_tray:
return False
print "Wicd daemon is shutting down!"
error(
None,
_('The wicd daemon has shut down. The UI will not function '
'properly until it is restarted.'),
block=False
)
return False
class WiredProfileChooser:
""" Class for displaying the wired profile chooser. """
def __init__(self):
""" Initializes and runs the wired profile chooser. """
# Import and init WiredNetworkEntry to steal some of the
# functions and widgets it uses.
wired_net_entry = WiredNetworkEntry()
dialog = gtk.Dialog(
title=_('Wired connection detected'),
flags=gtk.DIALOG_MODAL,
buttons=(gtk.STOCK_CONNECT, 1, gtk.STOCK_CANCEL, 2)
)
dialog.set_has_separator(False)
dialog.set_size_request(400, 150)
instruct_label = gtk.Label(
_('Select or create a wired profile to connect with') + ':\n'
)
stoppopcheckbox = gtk.CheckButton(
_('Stop Showing Autoconnect pop-up temporarily')
)
wired_net_entry.is_full_gui = False
instruct_label.set_alignment(0, 0)
stoppopcheckbox.set_active(False)
# Remove widgets that were added to the normal WiredNetworkEntry
# so that they can be added to the pop-up wizard.
wired_net_entry.vbox_top.remove(wired_net_entry.hbox_temp)
wired_net_entry.vbox_top.remove(wired_net_entry.profile_help)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(instruct_label, fill=False, expand=False)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(wired_net_entry.profile_help, False, False)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(wired_net_entry.hbox_temp, False, False)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(stoppopcheckbox, False, False)
dialog.show_all()
wired_profiles = wired_net_entry.combo_profile_names
wired_net_entry.profile_help.hide()
if wired_net_entry.profile_list is not None:
wired_profiles.set_active(0)
print "wired profiles found"
else:
print "no wired profiles found"
wired_net_entry.profile_help.show()
response = dialog.run()
if response == 1:
print 'reading profile ', wired_profiles.get_active_text()
wired.ReadWiredNetworkProfile(wired_profiles.get_active_text())
wired.ConnectWired()
else:
if stoppopcheckbox.get_active():
daemon.SetForcedDisconnect(True)
dialog.destroy()
def get_wireless_prop(net_id, prop):
""" Get wireless property. """
return wireless.GetWirelessProperty(net_id, prop)
class appGui(object):
""" The main wicd GUI class. """
def __init__(self, standalone=False, tray=None):
""" Initializes everything needed for the GUI. """
setup_dbus()
if not daemon:
errmsg = _("Error connecting to wicd service via D-Bus. "
"Please ensure the wicd service is running.")
d = gtk.MessageDialog(parent=None,
flags=gtk.DIALOG_MODAL,
type=gtk.MESSAGE_ERROR,
buttons=gtk.BUTTONS_OK,
message_format=errmsg)
d.run()
sys.exit(1)
self.tray = tray
gladefile = os.path.join(wpath.gtk, "wicd.ui")
self.wTree = gtk.Builder()
self.wTree.set_translation_domain('wicd')
self.wTree.add_from_file(gladefile)
self.window = self.wTree.get_object("window1")
width = int(gtk.gdk.screen_width() / 2)
if width > 530:
width = 530
self.window.resize(width, int(gtk.gdk.screen_height() / 1.7))
dic = {
"refresh_clicked": self.refresh_clicked,
"quit_clicked": self.exit,
"rfkill_clicked": self.switch_rfkill,
"disconnect_clicked": self.disconnect_all,
"main_exit": self.exit,
"cancel_clicked": self.cancel_connect,
"hidden_clicked": self.connect_hidden,
"preferences_clicked": self.settings_dialog,
"about_clicked": self.about_dialog,
"create_adhoc_clicked": self.create_adhoc_network,
"forget_network_clicked": self.forget_network,
}
self.wTree.connect_signals(dic)
# Set some strings in the GUI - they may be translated
label_instruct = self.wTree.get_object("label_instructions")
label_instruct.set_label(_('Choose from the networks below:'))
probar = self.wTree.get_object("progressbar")
probar.set_text(_('Connecting'))
self.disconnect_all_button = self.wTree.get_object('disconnect_button')
self.rfkill_button = self.wTree.get_object("rfkill_button")
self.all_network_list = self.wTree.get_object("network_list_vbox")
self.all_network_list.show_all()
self.wired_network_box = gtk.VBox(False, 0)
self.wired_network_box.show_all()
self.network_list = gtk.VBox(False, 0)
self.all_network_list.pack_start(self.wired_network_box, False, False)
self.all_network_list.pack_start(self.network_list, True, True)
self.network_list.show_all()
self.status_area = self.wTree.get_object("connecting_hbox")
self.status_bar = self.wTree.get_object("statusbar")
menu = self.wTree.get_object("menu1")
self.status_area.hide_all()
self.window.set_icon_name('wicd-gtk')
self.statusID = None
self.first_dialog_load = True
self.is_visible = True
self.pulse_active = False
self.pref = None
self.standalone = standalone
self.wpadrivercombo = None
self.connecting = False
self.refreshing = False
self.prev_state = None
self.update_cb = None
self._wired_showing = False
self.network_list.set_sensitive(False)
label = gtk.Label("%s..." % _('Scanning'))
self.network_list.pack_start(label)
label.show()
self.wait_for_events(0.2)
self.window.connect('delete_event', self.exit)
self.window.connect('key-release-event', self.key_event)
daemon.SetGUIOpen(True)
bus.add_signal_receiver(self.dbus_scan_finished, 'SendEndScanSignal',
'org.wicd.daemon.wireless')
bus.add_signal_receiver(self.dbus_scan_started, 'SendStartScanSignal',
'org.wicd.daemon.wireless')
bus.add_signal_receiver(self.update_connect_buttons, 'StatusChanged',
'org.wicd.daemon')
bus.add_signal_receiver(self.handle_connection_results,
'ConnectResultsSent', 'org.wicd.daemon')
bus.add_signal_receiver(lambda: setup_dbus(force=False),
"DaemonStarting", "org.wicd.daemon")
bus.add_signal_receiver(self._do_statusbar_update, 'StatusChanged',
'org.wicd.daemon')
if standalone:
bus.add_signal_receiver(handle_no_dbus, "DaemonClosing",
"org.wicd.daemon")
self._do_statusbar_update(*daemon.GetConnectionStatus())
self.wait_for_events(0.1)
self.update_cb = misc.timeout_add(2, self.update_statusbar)
self.refresh_clicked()
def handle_connection_results(self, results):
""" Handle connection results. """
if results not in ['success', 'aborted'] and self.is_visible:
error(self.window, language[results], block=False)
def create_adhoc_network(self, widget=None):
""" Shows a dialog that creates a new adhoc network. """
print "Starting the Ad-Hoc Network Creation Process..."
dialog = gtk.Dialog(
title=_('Create an Ad-Hoc Network'),
flags=gtk.DIALOG_MODAL,
buttons=(gtk.STOCK_CANCEL, 2, gtk.STOCK_OK, 1)
)
dialog.set_has_separator(False)
dialog.set_size_request(400, -1)
self.chkbox_use_encryption = \
gtk.CheckButton(_('Use Encryption (WEP only)'))
self.chkbox_use_encryption.set_active(False)
ip_entry = LabelEntry(_('IP') + ':')
essid_entry = LabelEntry(_('ESSID') + ':')
channel_entry = LabelEntry(_('Channel') + ':')
self.key_entry = LabelEntry(_('Key') + ':')
self.key_entry.set_auto_hidden(True)
self.key_entry.set_sensitive(False)
chkbox_use_ics = \
gtk.CheckButton(_('Activate Internet Connection Sharing'))
self.chkbox_use_encryption.connect("toggled",
self.toggle_encrypt_check)
channel_entry.entry.set_text('3')
essid_entry.entry.set_text('My_Adhoc_Network')
ip_entry.entry.set_text('169.254.12.10') # Just a random IP
vbox_ah = gtk.VBox(False, 0)
self.wired_network_box = gtk.VBox(False, 0)
vbox_ah.pack_start(self.chkbox_use_encryption, False, False)
vbox_ah.pack_start(self.key_entry, False, False)
vbox_ah.show()
# pylint: disable-msg=E1101
dialog.vbox.pack_start(essid_entry)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(ip_entry)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(channel_entry)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(chkbox_use_ics)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(vbox_ah)
# pylint: disable-msg=E1101
dialog.vbox.set_spacing(5)
dialog.show_all()
response = dialog.run()
if response == 1:
wireless.CreateAdHocNetwork(
essid_entry.entry.get_text(),
channel_entry.entry.get_text(),
ip_entry.entry.get_text().strip(),
"WEP",
self.key_entry.entry.get_text(),
self.chkbox_use_encryption.get_active(),
False # chkbox_use_ics.get_active())
)
dialog.destroy()
def forget_network(self, widget=None):
"""
Shows a dialog that lists saved wireless networks, and lets the user
delete them.
"""
wireless.ReloadConfig()
dialog = gtk.Dialog(
title=_('List of saved networks'),
flags=gtk.DIALOG_MODAL,
buttons=(gtk.STOCK_DELETE, 1, gtk.STOCK_OK, 2)
)
dialog.set_has_separator(True)
dialog.set_size_request(400, 200)
networks = gtk.ListStore(str, str)
for entry in wireless.GetSavedWirelessNetworks():
if entry[1] != 'None':
networks.append(entry)
else:
networks.append((entry[0], _('Global settings for this ESSID')))
tree = gtk.TreeView(model=networks)
tree.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn(_('ESSID'), cell, text=0)
tree.append_column(column)
column = gtk.TreeViewColumn(_('BSSID'), cell, text=1)
tree.append_column(column)
scroll = gtk.ScrolledWindow()
scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
scroll.add(tree)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(scroll)
# pylint: disable-msg=E1101
dialog.vbox.set_spacing(5)
dialog.show_all()
response = dialog.run()
if response == 1:
model, pathlist = tree.get_selection().get_selected_rows()
to_remove = dict(essid=[], bssid=[])
if pathlist:
for row in pathlist:
it = model.get_iter(path=row)
to_remove['essid'].append(
misc.noneToString(model.get_value(it, 0))
)
to_remove['bssid'].append(model.get_value(it, 1))
confirm = gtk.MessageDialog(
flags=gtk.DIALOG_MODAL,
type=gtk.MESSAGE_INFO,
buttons=gtk.BUTTONS_YES_NO,
message_format=_('Are you sure you want to discard' +
' settings for the selected networks?')
)
confirm.format_secondary_text('\n'.join(to_remove['essid']))
response = confirm.run()
if response == gtk.RESPONSE_YES:
for x in to_remove['bssid']:
wireless.DeleteWirelessNetwork(x)
wireless.ReloadConfig()
confirm.destroy()
dialog.destroy()
def toggle_encrypt_check(self, widget=None):
""" Toggles the encryption key entry box for the ad-hoc dialog. """
self.key_entry.set_sensitive(self.chkbox_use_encryption.get_active())
def switch_rfkill(self, widget=None):
""" Switches wifi card on/off. """
wireless.SwitchRfKill()
if wireless.GetRfKillEnabled():
self.rfkill_button.set_stock_id(gtk.STOCK_MEDIA_PLAY)
self.rfkill_button.set_label(_('Switch On Wi-Fi'))
else:
self.rfkill_button.set_stock_id(gtk.STOCK_MEDIA_STOP)
self.rfkill_button.set_label(_('Switch Off Wi-Fi'))
def disconnect_all(self, widget=None):
""" Disconnects from any active network. """
def handler(*args):
gobject.idle_add(self.all_network_list.set_sensitive, True)
self.all_network_list.set_sensitive(False)
daemon.Disconnect(reply_handler=handler, error_handler=handler)
def about_dialog(self, widget, event=None):
""" Displays an about dialog. """
dialog = gtk.AboutDialog()
dialog.set_name("Wicd")
dialog.set_version(daemon.Hello())
dialog.set_authors([
"Tom Van Braeckel",
"Adam Blackburn",
"Dan O'Reilly",
"Andrew Psaltis",
"David Paleino"
])
dialog.set_website("http://launchpad.net/wicd")
dialog.run()
dialog.destroy()
def key_event(self, widget, event=None):
""" Handle key-release-events. """
if event.state & gtk.gdk.CONTROL_MASK and \
gtk.gdk.keyval_name(event.keyval) in ["w", "q"]:
self.exit()
def settings_dialog(self, widget, event=None):
""" Displays a general settings dialog. """
if not self.pref:
self.pref = PreferencesDialog(self, self.wTree)
else:
self.pref.load_preferences_diag()
if self.pref.run() == 1:
self.pref.save_results()
self.pref.hide()
def connect_hidden(self, widget):
""" Prompts the user for a hidden network, then scans for it. """
dialog = gtk.Dialog(
title=('Hidden Network'),
flags=gtk.DIALOG_MODAL,
buttons=(gtk.STOCK_CONNECT, 1, gtk.STOCK_CANCEL, 2)
)
dialog.set_has_separator(False)
lbl = gtk.Label(_('Hidden Network ESSID'))
textbox = gtk.Entry()
# pylint: disable-msg=E1101
dialog.vbox.pack_start(lbl)
# pylint: disable-msg=E1101
dialog.vbox.pack_start(textbox)
dialog.show_all()
button = dialog.run()
if button == 1:
answer = textbox.get_text()
dialog.destroy()
self.refresh_networks(None, True, answer)
else:
dialog.destroy()
def cancel_connect(self, widget):
""" Alerts the daemon to cancel the connection process. """
#should cancel a connection if there
#is one in progress
cancel_button = self.wTree.get_object("cancel_button")
cancel_button.set_sensitive(False)
daemon.CancelConnect()
# Prevents automatic reconnecting if that option is enabled
daemon.SetForcedDisconnect(True)
def pulse_progress_bar(self):
""" Pulses the progress bar while connecting to a network. """
if not self.pulse_active:
return False
if not self.is_visible:
return True
try:
gobject.idle_add(self.wTree.get_object("progressbar").pulse)
except:
pass
return True
def update_statusbar(self):
""" Triggers a status update in wicd-monitor. """
if not self.is_visible:
return True
daemon.UpdateState()
if self.connecting:
# If we're connecting, don't wait for the monitor to send
# us a signal, since it won't until the connection is made.
self._do_statusbar_update(*daemon.GetConnectionStatus())
return True
def _do_statusbar_update(self, state, info):
""" Actually perform the statusbar update. """
if not self.is_visible:
return True
if state == misc.WIRED:
return self.set_wired_state(info)
elif state == misc.WIRELESS:
return self.set_wireless_state(info)
elif state == misc.CONNECTING:
return self.set_connecting_state(info)
elif state in (misc.SUSPENDED, misc.NOT_CONNECTED):
return self.set_not_connected_state(info)
return True
def set_wired_state(self, info):
""" Set wired state. """
if self.connecting:
# Adjust our state from connecting->connected.
self._set_not_connecting_state()
self.set_status(
_('Connected to wired network (IP: $A)').replace('$A', info[0])
)
return True
def set_wireless_state(self, info):
""" Set wireless state. """
if self.connecting:
# Adjust our state from connecting->connected.
self._set_not_connecting_state()
self.set_status(_('Connected to $A at $B (IP: $C)').replace
('$A', info[1]).replace
('$B', daemon.FormatSignalForPrinting(info[2])).replace
('$C', info[0]))
return True
def set_not_connected_state(self, info):
""" Set not connected state. """
if self.connecting:
# Adjust our state from connecting->not-connected.
self._set_not_connecting_state()
self.set_status(_('Not connected'))
return True
def _set_not_connecting_state(self):
""" Set not-connecting state. """
if self.connecting:
if self.update_cb:
gobject.source_remove(self.update_cb)
self.update_cb = misc.timeout_add(2, self.update_statusbar)
self.connecting = False
if self.pulse_active:
self.pulse_active = False
gobject.idle_add(self.all_network_list.set_sensitive, True)
gobject.idle_add(self.status_area.hide_all)
if self.statusID:
gobject.idle_add(self.status_bar.remove_message, 1, self.statusID)
def set_connecting_state(self, info):
""" Set connecting state. """
if not self.connecting:
if self.update_cb:
gobject.source_remove(self.update_cb)
self.update_cb = misc.timeout_add(500, self.update_statusbar,
milli=True)
self.connecting = True
if not self.pulse_active:
self.pulse_active = True
misc.timeout_add(100, self.pulse_progress_bar, milli=True)
gobject.idle_add(self.all_network_list.set_sensitive, False)
gobject.idle_add(self.status_area.show_all)
if self.statusID:
gobject.idle_add(self.status_bar.remove_message, 1, self.statusID)
if info[0] == "wireless":
stat = wireless.CheckWirelessConnectingMessage()
gobject.idle_add(self.set_status, "%s: %s" % (info[1], stat))
elif info[0] == "wired":
gobject.idle_add(self.set_status, _('Wired Network') + ': '
+ wired.CheckWiredConnectingMessage())
return True
def update_connect_buttons(self, state=None, x=None, force_check=False):
""" Updates the connect/disconnect buttons for the GUI.
If force_check is given, update the buttons even if the
current network state is the same as the previous.
"""
if not DBUS_AVAIL:
return
if not state:
state, x = daemon.GetConnectionStatus()
self.disconnect_all_button.set_sensitive(
state in [misc.WIRED, misc.WIRELESS]
)
if self.prev_state != state or force_check:
apbssid = wireless.GetApBssid()
for entry in chain(self.network_list, self.wired_network_box):
if hasattr(entry, "update_connect_button"):
entry.update_connect_button(state, apbssid)
self.prev_state = state
def set_status(self, msg):
""" Sets the status bar message for the GUI. """
self.statusID = self.status_bar.push(1, msg)
def dbus_scan_finished(self):
""" Calls for a non-fresh update of the gui window.
This method is called after a wireless scan is completed.
"""
if not DBUS_AVAIL:
return
gobject.idle_add(self.refresh_networks, None, False, None)
def dbus_scan_started(self):
""" Called when a wireless scan starts. """
if not DBUS_AVAIL:
return
self.network_list.set_sensitive(False)
def _remove_items_from_vbox(self, vbox):
""" Remove items fro a VBox. """
for z in vbox:
vbox.remove(z)
z.destroy()
del z
def refresh_clicked(self, widget=None):
""" Kick off an asynchronous wireless scan. """
if not DBUS_AVAIL or self.connecting:
return
self.refreshing = True
# Remove stuff already in there.
self._remove_items_from_vbox(self.wired_network_box)
self._remove_items_from_vbox(self.network_list)
label = gtk.Label("%s..." % _('Scanning'))
self.network_list.pack_start(label)
self.network_list.show_all()
if wired.CheckPluggedIn() or daemon.GetAlwaysShowWiredInterface():
printLine = True # In this case we print a separator.
wirednet = WiredNetworkEntry()
self.wired_network_box.pack_start(wirednet, False, False)
wirednet.connect_button.connect("clicked", self.connect,
"wired", 0, wirednet)
wirednet.disconnect_button.connect("clicked", self.disconnect,
"wired", 0, wirednet)
wirednet.advanced_button.connect("clicked",
self.edit_advanced, "wired", 0,
wirednet)
state, x = daemon.GetConnectionStatus()
wirednet.update_connect_button(state)
self._wired_showing = True
else:
self._wired_showing = False
wireless.Scan(False)
def refresh_networks(self, widget=None, fresh=True, hidden=None):
""" Refreshes the network list.
If fresh=True, scans for wireless networks and displays the results.
If a ethernet connection is available, or the user has chosen to,
displays a Wired Network entry as well.
If hidden isn't None, will scan for networks after running
iwconfig <wireless interface> essid <hidden>.
"""
if fresh:
if hidden:
wireless.SetHiddenNetworkESSID(noneToString(hidden))
self.refresh_clicked()
return
print "refreshing..."
self.network_list.set_sensitive(False)
self._remove_items_from_vbox(self.network_list)
self.wait_for_events()
printLine = False # We don't print a separator by default.
if self._wired_showing:
printLine = True
num_networks = wireless.GetNumberOfNetworks()
instruct_label = self.wTree.get_object("label_instructions")
if num_networks > 0:
skip_never_connect = not daemon.GetShowNeverConnect()
instruct_label.show()
for x in xrange(0, num_networks):
if skip_never_connect and \
misc.to_bool(get_wireless_prop(x, 'never')):
continue
if printLine:
sep = gtk.HSeparator()
self.network_list.pack_start(sep, padding=10, fill=False,
expand=False)
sep.show()
else:
printLine = True
tempnet = WirelessNetworkEntry(x)
self.network_list.pack_start(tempnet, False, False)
tempnet.connect_button.connect("clicked",
self.connect, "wireless", x,
tempnet)
tempnet.disconnect_button.connect("clicked",
self.disconnect, "wireless",
x, tempnet)
tempnet.advanced_button.connect("clicked",
self.edit_advanced, "wireless",
x, tempnet)
else:
instruct_label.hide()
if wireless.GetKillSwitchEnabled():
label = gtk.Label(_('Wireless Kill Switch Enabled') + ".")
else:
label = gtk.Label(_('No wireless networks found.'))
self.network_list.pack_start(label)
label.show()
self.update_connect_buttons(force_check=True)
self.network_list.set_sensitive(True)
self.refreshing = False
def save_settings(self, nettype, networkid, networkentry):
""" Verifies and saves the settings for the network entry. """
entry = networkentry.advanced_dialog
opt_entlist = []
req_entlist = []
# First make sure all the Addresses entered are valid.
if entry.chkbox_static_ip.get_active():
req_entlist = [entry.txt_ip, entry.txt_netmask]
opt_entlist = [entry.txt_gateway]
if entry.chkbox_static_dns.get_active() and \
not entry.chkbox_global_dns.get_active():
for ent in [entry.txt_dns_1, entry.txt_dns_2, entry.txt_dns_3]:
opt_entlist.append(ent)
# Required entries.
for lblent in req_entlist:
lblent.set_text(lblent.get_text().strip())
if not misc.IsValidIP(lblent.get_text()):
error(self.window, _('Invalid address in $A entry.').
replace('$A', lblent.label.get_label()))
return False
# Optional entries, only check for validity if they're entered.
for lblent in opt_entlist:
lblent.set_text(lblent.get_text().strip())
if lblent.get_text() and not misc.IsValidIP(lblent.get_text()):
error(self.window, _('Invalid address in $A entry.').
replace('$A', lblent.label.get_label()))
return False
# Now save the settings.
if nettype == "wireless":
if not networkentry.save_wireless_settings(networkid):
return False
elif nettype == "wired":
if not networkentry.save_wired_settings():
return False
return True
def edit_advanced(self, widget, ttype, networkid, networkentry):
""" Display the advanced settings dialog.
Displays the advanced settings dialog and saves any changes made.
If errors occur in the settings, an error message will be displayed
and the user won't be able to save the changes until the errors
are fixed.
"""
dialog = networkentry.advanced_dialog
dialog.set_values()
dialog.show_all()
while True:
if self.run_settings_dialog(dialog, ttype, networkid, networkentry):
break
dialog.hide()
def run_settings_dialog(self, dialog, nettype, networkid, networkentry):
""" Runs the settings dialog.
Runs the settings dialog and returns True if settings are saved
successfully, and false otherwise.
"""
result = dialog.run()
if result == gtk.RESPONSE_ACCEPT:
if self.save_settings(nettype, networkid, networkentry):
return True
else:
return False
return True
def check_encryption_valid(self, networkid, entry):
""" Make sure that encryption settings are properly filled in. """
# Make sure no entries are left blank
if entry.chkbox_encryption.get_active():
encryption_info = entry.encryption_info
for entry_info in encryption_info.itervalues():
if entry_info[0].entry.get_text() == "" and \
entry_info[1] == 'required':
error(
self.window,
"%s (%s)" %
(_('Required encryption information is missing.'),
entry_info[0].label.get_label())
)
return False
# Make sure the checkbox is checked when it should be
elif not entry.chkbox_encryption.get_active() and \
wireless.GetWirelessProperty(networkid, "encryption"):
error(
self.window,
_('This network requires encryption to be enabled.')
)
return False
return True
def _wait_for_connect_thread_start(self):
""" Wait for the connect thread to start. """
self.wTree.get_object("progressbar").pulse()
if not self._connect_thread_started:
return True
else:
misc.timeout_add(2, self.update_statusbar)
self.update_statusbar()
return False
def connect(self, widget, nettype, networkid, networkentry):
""" Initiates the connection process in the daemon. """
def handler(*args):
self._connect_thread_started = True
def setup_interface_for_connection():
""" Initialize interface for connection. """
cancel_button = self.wTree.get_object("cancel_button")
cancel_button.set_sensitive(True)
self.all_network_list.set_sensitive(False)
if self.statusID:
gobject.idle_add(
self.status_bar.remove_message, 1, self.statusID)
gobject.idle_add(
self.set_status, _('Disconnecting active connections...'))
gobject.idle_add(self.status_area.show_all)
self.wait_for_events()
self._connect_thread_started = False
if nettype == "wireless":
if not self.check_encryption_valid(networkid,
networkentry.advanced_dialog):
self.edit_advanced(None, nettype, networkid, networkentry)
return False
setup_interface_for_connection()
wireless.ConnectWireless(networkid, reply_handler=handler,
error_handler=handler)
elif nettype == "wired":
setup_interface_for_connection()
wired.ConnectWired(reply_handler=handler, error_handler=handler)
gobject.source_remove(self.update_cb)
misc.timeout_add(100, self._wait_for_connect_thread_start, milli=True)
def disconnect(self, widget, nettype, networkid, networkentry):
""" Disconnects from the given network.
Keyword arguments:
widget -- The disconnect button that was pressed.
event -- unused
nettype -- "wired" or "wireless", depending on the network entry type.
networkid -- unused
networkentry -- The NetworkEntry containing the disconnect button.
"""
def handler(*args):
gobject.idle_add(self.all_network_list.set_sensitive, True)
gobject.idle_add(self.network_list.set_sensitive, True)
widget.hide()
networkentry.connect_button.show()
daemon.SetForcedDisconnect(True)
self.network_list.set_sensitive(False)
if nettype == "wired":
wired.DisconnectWired(reply_handler=handler, error_handler=handler)
else:
wireless.DisconnectWireless(reply_handler=handler,
error_handler=handler)
def wait_for_events(self, amt=0):
""" Wait for any pending gtk events to finish before moving on.
Keyword arguments:
amt -- a number specifying the number of ms to wait before checking
for pending events.
"""
time.sleep(amt)
while gtk.events_pending():
gtk.main_iteration()
def exit(self, widget=None, event=None):
""" Hide the wicd GUI.
This method hides the wicd GUI and writes the current window size
to disc for later use. This method normally does NOT actually
destroy the GUI, it just hides it.
"""
self.window.hide()
gobject.source_remove(self.update_cb)
bus.remove_signal_receiver(self._do_statusbar_update, 'StatusChanged',
'org.wicd.daemon')
[width, height] = self.window.get_size()
try:
daemon.SetGUIOpen(False)
except DBusException:
pass
if self.standalone:
sys.exit(0)
self.is_visible = False
return True
def show_win(self):
""" Brings the GUI out of the hidden state.
Method to show the wicd GUI, alert the daemon that it is open,
and refresh the network list.
"""
self.window.present()
self.window.deiconify()
self.wait_for_events()
self.is_visible = True
daemon.SetGUIOpen(True)
self.wait_for_events(0.1)
gobject.idle_add(self.refresh_clicked)
self._do_statusbar_update(*daemon.GetConnectionStatus())
bus.add_signal_receiver(self._do_statusbar_update, 'StatusChanged',
'org.wicd.daemon')
self.update_cb = misc.timeout_add(2, self.update_statusbar)
if __name__ == '__main__':
setup_dbus()
app = appGui(standalone=True)
mainloop = gobject.MainLoop()
mainloop.run()
| gpl-2.0 |
aarchiba/scipy | scipy/sparse/linalg/isolve/lgmres.py | 6 | 8990 | # Copyright (C) 2009, Pauli Virtanen <pav@iki.fi>
# Distributed under the same license as SciPy.
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from numpy.linalg import LinAlgError
from scipy._lib.six import xrange
from scipy.linalg import get_blas_funcs, get_lapack_funcs
from .utils import make_system
from ._gcrotmk import _fgmres
__all__ = ['lgmres']
def lgmres(A, b, x0=None, tol=1e-5, maxiter=1000, M=None, callback=None,
inner_m=30, outer_k=3, outer_v=None, store_outer_Av=True,
prepend_outer_v=False, atol=None):
"""
Solve a matrix equation using the LGMRES algorithm.
The LGMRES algorithm [1]_ [2]_ is designed to avoid some problems
in the convergence in restarted GMRES, and often converges in fewer
iterations.
Parameters
----------
A : {sparse matrix, dense matrix, LinearOperator}
The real or complex N-by-N matrix of the linear system.
Alternatively, ``A`` can be a linear operator which can
produce ``Ax`` using, e.g.,
``scipy.sparse.linalg.LinearOperator``.
b : {array, matrix}
Right hand side of the linear system. Has shape (N,) or (N,1).
x0 : {array, matrix}
Starting guess for the solution.
tol, atol : float, optional
Tolerances for convergence, ``norm(residual) <= max(tol*norm(b), atol)``.
The default for ``atol`` is `tol`.
.. warning::
The default value for `atol` will be changed in a future release.
For future compatibility, specify `atol` explicitly.
maxiter : int, optional
Maximum number of iterations. Iteration will stop after maxiter
steps even if the specified tolerance has not been achieved.
M : {sparse matrix, dense matrix, LinearOperator}, optional
Preconditioner for A. The preconditioner should approximate the
inverse of A. Effective preconditioning dramatically improves the
rate of convergence, which implies that fewer iterations are needed
to reach a given error tolerance.
callback : function, optional
User-supplied function to call after each iteration. It is called
as callback(xk), where xk is the current solution vector.
inner_m : int, optional
Number of inner GMRES iterations per each outer iteration.
outer_k : int, optional
Number of vectors to carry between inner GMRES iterations.
According to [1]_, good values are in the range of 1...3.
However, note that if you want to use the additional vectors to
accelerate solving multiple similar problems, larger values may
be beneficial.
outer_v : list of tuples, optional
List containing tuples ``(v, Av)`` of vectors and corresponding
matrix-vector products, used to augment the Krylov subspace, and
carried between inner GMRES iterations. The element ``Av`` can
be `None` if the matrix-vector product should be re-evaluated.
This parameter is modified in-place by `lgmres`, and can be used
to pass "guess" vectors in and out of the algorithm when solving
similar problems.
store_outer_Av : bool, optional
Whether LGMRES should store also A*v in addition to vectors `v`
in the `outer_v` list. Default is True.
prepend_outer_v : bool, optional
Whether to put outer_v augmentation vectors before Krylov iterates.
In standard LGMRES, prepend_outer_v=False.
Returns
-------
x : array or matrix
The converged solution.
info : int
Provides convergence information:
- 0 : successful exit
- >0 : convergence to tolerance not achieved, number of iterations
- <0 : illegal input or breakdown
Notes
-----
The LGMRES algorithm [1]_ [2]_ is designed to avoid the
slowing of convergence in restarted GMRES, due to alternating
residual vectors. Typically, it often outperforms GMRES(m) of
comparable memory requirements by some measure, or at least is not
much worse.
Another advantage in this algorithm is that you can supply it with
'guess' vectors in the `outer_v` argument that augment the Krylov
subspace. If the solution lies close to the span of these vectors,
the algorithm converges faster. This can be useful if several very
similar matrices need to be inverted one after another, such as in
Newton-Krylov iteration where the Jacobian matrix often changes
little in the nonlinear steps.
References
----------
.. [1] A.H. Baker and E.R. Jessup and T. Manteuffel, "A Technique for
Accelerating the Convergence of Restarted GMRES", SIAM J. Matrix
Anal. Appl. 26, 962 (2005).
.. [2] A.H. Baker, "On Improving the Performance of the Linear Solver
restarted GMRES", PhD thesis, University of Colorado (2003).
Examples
--------
>>> from scipy.sparse import csc_matrix
>>> from scipy.sparse.linalg import lgmres
>>> A = csc_matrix([[3, 2, 0], [1, -1, 0], [0, 5, 1]], dtype=float)
>>> b = np.array([2, 4, -1], dtype=float)
>>> x, exitCode = lgmres(A, b)
>>> print(exitCode) # 0 indicates successful convergence
0
>>> np.allclose(A.dot(x), b)
True
"""
A,M,x,b,postprocess = make_system(A,M,x0,b)
if not np.isfinite(b).all():
raise ValueError("RHS must contain only finite numbers")
if atol is None:
warnings.warn("scipy.sparse.linalg.lgmres called without specifying `atol`. "
"The default value will change in the future. To preserve "
"current behavior, set ``atol=tol``.",
category=DeprecationWarning, stacklevel=2)
atol = tol
matvec = A.matvec
psolve = M.matvec
if outer_v is None:
outer_v = []
axpy, dot, scal = None, None, None
nrm2 = get_blas_funcs('nrm2', [b])
b_norm = nrm2(b)
ptol_max_factor = 1.0
for k_outer in xrange(maxiter):
r_outer = matvec(x) - b
# -- callback
if callback is not None:
callback(x)
# -- determine input type routines
if axpy is None:
if np.iscomplexobj(r_outer) and not np.iscomplexobj(x):
x = x.astype(r_outer.dtype)
axpy, dot, scal, nrm2 = get_blas_funcs(['axpy', 'dot', 'scal', 'nrm2'],
(x, r_outer))
# -- check stopping condition
r_norm = nrm2(r_outer)
if r_norm <= max(atol, tol * b_norm):
break
# -- inner LGMRES iteration
v0 = -psolve(r_outer)
inner_res_0 = nrm2(v0)
if inner_res_0 == 0:
rnorm = nrm2(r_outer)
raise RuntimeError("Preconditioner returned a zero vector; "
"|v| ~ %.1g, |M v| = 0" % rnorm)
v0 = scal(1.0/inner_res_0, v0)
ptol = min(ptol_max_factor, max(atol, tol*b_norm)/r_norm)
try:
Q, R, B, vs, zs, y, pres = _fgmres(matvec,
v0,
inner_m,
lpsolve=psolve,
atol=ptol,
outer_v=outer_v,
prepend_outer_v=prepend_outer_v)
y *= inner_res_0
if not np.isfinite(y).all():
# Overflow etc. in computation. There's no way to
# recover from this, so we have to bail out.
raise LinAlgError()
except LinAlgError:
# Floating point over/underflow, non-finite result from
# matmul etc. -- report failure.
return postprocess(x), k_outer + 1
# Inner loop tolerance control
if pres > ptol:
ptol_max_factor = min(1.0, 1.5 * ptol_max_factor)
else:
ptol_max_factor = max(1e-16, 0.25 * ptol_max_factor)
# -- GMRES terminated: eval solution
dx = zs[0]*y[0]
for w, yc in zip(zs[1:], y[1:]):
dx = axpy(w, dx, dx.shape[0], yc) # dx += w*yc
# -- Store LGMRES augmentation vectors
nx = nrm2(dx)
if nx > 0:
if store_outer_Av:
q = Q.dot(R.dot(y))
ax = vs[0]*q[0]
for v, qc in zip(vs[1:], q[1:]):
ax = axpy(v, ax, ax.shape[0], qc)
outer_v.append((dx/nx, ax/nx))
else:
outer_v.append((dx/nx, None))
# -- Retain only a finite number of augmentation vectors
while len(outer_v) > outer_k:
del outer_v[0]
# -- Apply step
x += dx
else:
# didn't converge ...
return postprocess(x), maxiter
return postprocess(x), 0
| bsd-3-clause |
hmpf/nav | python/nav/smidumps/CPQPOWER-MIB.py | 2 | 550336 | # python version 1.0 DO NOT EDIT
#
# Generated by smidump version 0.4.8:
#
# smidump -f python CPQPOWER-MIB
FILENAME = "./CPQPOWER-MIB"
MIB = {
"moduleName" : "CPQPOWER-MIB",
"CPQPOWER-MIB" : {
"nodetype" : "module",
"language" : "SMIv1",
},
"imports" : (
{"module" : "CPQHOST-MIB", "name" : "compaq"},
{"module" : "RFC1155-SMI", "name" : "Counter"},
{"module" : "RFC1213-MIB", "name" : "DisplayString"},
{"module" : "RFC1213-MIB", "name" : "ifIndex"},
{"module" : "RFC1213-MIB", "name" : "ifDescr"},
{"module" : "RFC1213-MIB", "name" : "sysName"},
{"module" : "RFC1213-MIB", "name" : "sysDescr"},
{"module" : "RFC1213-MIB", "name" : "sysContact"},
{"module" : "RFC1213-MIB", "name" : "sysLocation"},
{"module" : "RFC-1212", "name" : "OBJECT-TYPE"},
{"module" : "RFC-1215", "name" : "TRAP-TYPE"},
),
"nodes" : {
"cpqPower" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165",
}, # node
"powerDevice" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1",
}, # node
"trapInfo" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.1",
}, # node
"trapCode" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A number identifying the event for the trap that was sent.
Mapped unique trap code per unique event to be used by ISEE's
decoder ring.""",
}, # scalar
"trapDescription" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""A string identifying the event for that last trap that was sent.""",
}, # scalar
"trapDeviceMgmtUrl" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""A string contains the URL for the management software.""",
}, # scalar
"trapDeviceDetails" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""A string details information about the UPS such as SN (serial number),
PN (part number), and MN (Model Name).""",
}, # scalar
"trapDeviceName" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""A string contains the name of the HP Management Module.""",
}, # scalar
"managementModuleIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2",
}, # node
"deviceManufacturer" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's manufacturer.""",
}, # scalar
"deviceModel" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's model.""",
}, # scalar
"deviceFirmwareVersion" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's firmware version(s).""",
}, # scalar
"deviceHardwareVersion" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's hardware version.""",
}, # scalar
"deviceIdentName" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""A string identifying the device. it came from sysName entered by user""",
}, # scalar
"devicePartNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's part number.""",
}, # scalar
"deviceSerialNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's serial number.""",
}, # scalar
"deviceMACAddress" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.1.2.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's MAC address.""",
}, # scalar
"pdu" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2",
}, # node
"pduIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1",
}, # node
"numOfPdu" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of PDUs.""",
}, # scalar
"pduIdentTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2",
"status" : "current",
"description" :
"""The Aggregate Object with number of entries equal to
NumOfPdu and including the PduIdent group.""",
}, # table
"pduIdentEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1",
"status" : "current",
"linkage" : [
"pduIdentIndex",
],
"description" :
"""The ident table entry containing the name,
model, manufacturer, firmware version, part number, etc.""",
}, # row
"pduIdentIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the PduIdentEntry table.""",
}, # column
"pduName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readwrite",
"description" :
"""The string identify the device.""",
}, # column
"pduModel" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Model.""",
}, # column
"pduManufacturer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Manufacturer Name (e.g. Hewlett-Packard).""",
}, # column
"pduFirmwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The firmware revision level of the device.""",
}, # column
"pduPartNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The device part number.""",
}, # column
"pduSerialNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The device serial number.""",
}, # column
"pduStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"other" : {
"nodetype" : "namednumber",
"number" : "1"
},
"ok" : {
"nodetype" : "namednumber",
"number" : "2"
},
"degraded" : {
"nodetype" : "namednumber",
"number" : "3"
},
"failed" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""The overall status of the device. A value of OK(2) indicates the device is operating normally.
A value of degraded(3) indicates the device is operating with warning indicators. A value of
failed(4) indicates the device is operating with critical indicators.""",
}, # column
"pduControllable" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.1.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"yes" : {
"nodetype" : "namednumber",
"number" : "1"
},
"no" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""This object indicates whether or not the device is controllable.""",
}, # column
"pduInput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.2",
}, # node
"pduInputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.2.1",
"status" : "current",
"description" :
"""The Aggregate Object with number of entries equal to
NumOfPdu and including the PduInput group.""",
}, # table
"pduInputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.2.1.1",
"status" : "current",
"linkage" : [
"pduInputIndex",
],
"description" :
"""The input table entry containing the voltage and
current for the PDU""",
}, # row
"pduInputIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.2.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the PduInputEntry table.""",
}, # column
"inputVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.2.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input voltage from the PDU meters in volts.""",
}, # column
"inputCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.2.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input current from the PDU meters in hundredths of Amp.""",
}, # column
"pduOutput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3",
}, # node
"pduOutputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.1",
"status" : "current",
"description" :
"""The Aggregate Object with number of entries equal to
NumOfPdu and including the PduInput group.""",
}, # table
"pduOutputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.1.1",
"status" : "current",
"linkage" : [
"pduOutputIndex",
],
"description" :
"""The input table entry containing the name,
heat load, current load, power load, firmware, etc.""",
}, # row
"pduOutputIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the PduOutputEntry table.""",
}, # column
"pduOutputLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The device output load in percent of rated capacity. A value of -1 will be
returned if the heat load is unable to be measured.""",
}, # column
"pduOutputHeat" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total heat load measured on the PDU in BTUs. A value of -1 will be
returned if the heat load is unable to be measured.""",
}, # column
"pduOutputPower" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total power load measured on the PDU in watts. A value of -1 will be
returned if the power load is unable to be measured.""",
}, # column
"pduOutputNumBreakers" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The number of breakers for the device. This variable indicates the number
of rows in the breakers table.""",
}, # column
"pduOutputBreakerTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.2",
"status" : "current",
"description" :
"""List of breaker table entries. The number of entries is given by pduOutputNumBreakers .""",
}, # table
"pduOutputBreakerEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.2.1",
"status" : "current",
"linkage" : [
"pduOutputIndex",
"breakerIndex",
],
"description" :
"""An entry containing information applicable to an breaker.""",
}, # row
"breakerIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""The breaker identifier.""",
}, # column
"breakerVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The breaker voltage in volts.""",
}, # column
"breakerCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The breaker current draw in hundredths of Amp.""",
}, # column
"breakerPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The breaker load in percent.""",
}, # column
"breakerStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.2.3.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"normal" : {
"nodetype" : "namednumber",
"number" : "1"
},
"overloadWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"overloadCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"voltageRangeWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"voltageRangeCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""This object indicates the status of the breaker. A value of normal(1)
indicates the breaker is operating normally. A value of overloadWarning(2)
indicates the breaker has an overload warning. A value of overloadCritical(3)
indicates the breaker is overloaded. A value of voltageRangeWarning(4)
indicates the breaker voltage is out of tolerance by 10-20%. A value of
voltageRangeCritical(5) indicates the breaker voltage is out of tolerance
by more than 20%. Note: Overload status has priority over voltage tolerance
status.""",
}, # column
"ups" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3",
}, # node
"upsIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.1",
}, # node
"upsIdentManufacturer" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "31"
},
],
"range" : {
"min" : "0",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""The UPS Manufacturer Name (e.g. Hewlett-Packard).""",
}, # scalar
"upsIdentModel" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The UPS Model;Part number;Serial number (e.g. HP R5500 XR;204451-B21;B00123456W).""",
}, # scalar
"upsIdentSoftwareVersions" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The firmware revision level(s) of the UPS microcontroller(s).""",
}, # scalar
"upsIdentOemCode" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""A binary code indicating vendor. This should be a ?0x0c? for HP""",
}, # scalar
"upsBattery" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.2",
}, # node
"upsBatTimeRemaining" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.2.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""Battery run time in seconds before UPS turns off due
to low battery.""",
}, # scalar
"upsBatVoltage" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.2.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""Battery voltage as reported by the UPS meters.""",
}, # scalar
"upsBatCurrent" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.2.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-2147483648",
"max" : "2147483647"
},
],
"range" : {
"min" : "-2147483648",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""Battery Current as reported by the UPS metering.
Current is positive when discharging, negative
when recharging the battery.""",
}, # scalar
"upsBatCapacity" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.2.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Battery percent charge.""",
}, # scalar
"upsBatteryAbmStatus" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.2.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"batteryCharging" : {
"nodetype" : "namednumber",
"number" : "1"
},
"batteryDischarging" : {
"nodetype" : "namednumber",
"number" : "2"
},
"batteryFloating" : {
"nodetype" : "namednumber",
"number" : "3"
},
"batteryResting" : {
"nodetype" : "namednumber",
"number" : "4"
},
"unknown" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Gives the status of the Advanced Battery Management;
batteryFloating(3) status means that the charger is temporarily
charging the battery to its float voltage; batteryResting(4) is the
state when the battery is fully charged and none of the other actions
(charging/discharging/floating) is being done.""",
}, # scalar
"upsInput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3",
}, # node
"upsInputFrequency" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The utility line frequency in tenths of Hz.""",
}, # scalar
"upsInputLineBads" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1155-SMI", "name" : "Counter"},
},
"access" : "readonly",
"description" :
"""The number of times the Input was out of tolerance
in voltage or frequency.""",
}, # scalar
"upsInputNumPhases" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
}, # scalar
"upsInputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.4",
"status" : "current",
"description" :
"""The Aggregate Object with number of entries equal to
NumPhases and including the UpsInput group.""",
}, # table
"upsInputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.4.1",
"status" : "current",
"linkage" : [
"upsInputPhase",
],
"description" :
"""The input table entry containing the current, voltage, etc.""",
}, # row
"upsInputPhase" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number of the phase. Serves as index for input table.""",
}, # column
"upsInputVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input voltage from the UPS meters in volts.""",
}, # column
"upsInputCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input current from the UPS meters in amps.""",
}, # column
"upsInputWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input real power in watts. Most UPSs do not report
this XCP data""",
}, # column
"upsInputSource" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.3.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"other" : {
"nodetype" : "namednumber",
"number" : "1"
},
"none" : {
"nodetype" : "namednumber",
"number" : "2"
},
"primaryUtility" : {
"nodetype" : "namednumber",
"number" : "3"
},
"bypassFeed" : {
"nodetype" : "namednumber",
"number" : "4"
},
"secondaryUtility" : {
"nodetype" : "namednumber",
"number" : "5"
},
"generator" : {
"nodetype" : "namednumber",
"number" : "6"
},
"flywheel" : {
"nodetype" : "namednumber",
"number" : "7"
},
"fuelcell" : {
"nodetype" : "namednumber",
"number" : "8"
},
},
},
"access" : "readonly",
"description" :
"""The present external source of input power. The enumeration
none(2) indicates that there is no external source of
power, for example, the UPS is On Battery (an internal source).
The bypassFeed(4) can only be used when the Bypass source is known
to be a separate utility feed than the primaryUtility(3).""",
}, # scalar
"upsOutput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4",
}, # node
"upsOutputLoad" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The UPS output load in percent of rated capacity.""",
}, # scalar
"upsOutputFrequency" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured UPS output frequency in tenths of Hz.""",
}, # scalar
"upsOutputNumPhases" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number of metered output phases, serves as the table
index.""",
}, # scalar
"upsOutputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.4",
"status" : "current",
"description" :
"""The Aggregate Object with number of entries equal to NumPhases
and including the UpsOutput group.""",
}, # table
"upsOutputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.4.1",
"status" : "current",
"linkage" : [
"upsOutputPhase",
],
"description" :
"""Output Table Entry containing voltage, current, etc.""",
}, # row
"upsOutputPhase" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number {1..3} of the output phase.""",
}, # column
"upsOutputVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured output voltage from the UPS metering in volts.""",
}, # column
"upsOutputCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured UPS output current in amps.""",
}, # column
"upsOutputWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured real output power in watts.""",
}, # column
"upsOutputSource" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.4.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"other" : {
"nodetype" : "namednumber",
"number" : "1"
},
"none" : {
"nodetype" : "namednumber",
"number" : "2"
},
"normal" : {
"nodetype" : "namednumber",
"number" : "3"
},
"bypass" : {
"nodetype" : "namednumber",
"number" : "4"
},
"battery" : {
"nodetype" : "namednumber",
"number" : "5"
},
"booster" : {
"nodetype" : "namednumber",
"number" : "6"
},
"reducer" : {
"nodetype" : "namednumber",
"number" : "7"
},
"parallelCapacity" : {
"nodetype" : "namednumber",
"number" : "8"
},
"parallelRedundant" : {
"nodetype" : "namednumber",
"number" : "9"
},
"highEfficiencyMode" : {
"nodetype" : "namednumber",
"number" : "10"
},
},
},
"access" : "readonly",
"description" :
"""The present source of output power. The enumeration
none(2) indicates that there is no source of output
power (and therefore no output power), for example,
the system has opened the output breaker.""",
}, # scalar
"upsBypass" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.5",
}, # node
"upsBypassFrequency" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.5.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The bypass frequency in tenths of Hz.""",
}, # scalar
"upsBypassNumPhases" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.5.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number of lines in the UPS bypass table.""",
}, # scalar
"upsBypassTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.5.3",
"status" : "current",
}, # table
"upsBypassEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.5.3.1",
"status" : "current",
"linkage" : [
"upsBypassPhase",
],
"description" :
"""Entry in the UpsBypassTable.""",
}, # row
"upsBypassPhase" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.5.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The Bypass Phase, index for the table.""",
}, # column
"upsBypassVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.5.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured UPS bypass voltage in volts.""",
}, # column
"upsEnvironment" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6",
}, # node
"upsEnvAmbientTemp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the ambient temperature in the vicinity of the
UPS or SNMP agent.""",
}, # scalar
"upsEnvAmbientLowerLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readwrite",
"description" :
"""The Lower Limit of the ambient temperature; if UpsEnvAmbientTemp
falls below this value, the UpsAmbientTempBad alarm will occur.""",
}, # scalar
"upsEnvAmbientUpperLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readwrite",
"description" :
"""The Upper Limit of the ambient temperature; if UpsEnvAmbientTemp
rises above this value, the UpsAmbientTempBad alarm will occur.
This value should be greater than UpsEnvAmbientLowerLimit.""",
}, # scalar
"upsEnvAmbientHumidity" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the ambient humidity in the vicinity of the
UPS or SNMP agent.""",
}, # scalar
"upsEnvRemoteTemp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The reading of a remote temperature sensor connected to the
UPS or SNMP agent.""",
}, # scalar
"upsEnvRemoteHumidity" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""The reading of a remote humidity sensor connected to the
UPS or SNMP agent.""",
}, # scalar
"upsEnvNumContacts" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1024"
},
],
"range" : {
"min" : "1",
"max" : "1024"
},
},
},
"access" : "readonly",
"description" :
"""The number of Contacts in the UpsContactsTable.
This object indicates the number of rows in the
UpsContactsTable.""",
}, # scalar
"upsContactsTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.8",
"status" : "current",
"description" :
"""A list of Contact Sensing table entries.
The number of entries is given by the value of
UpsEnvNumContacts.""",
}, # table
"upsContactsTableEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.8.1",
"status" : "current",
"linkage" : [
"upsContactIndex",
],
"description" :
"""An entry containing information applicable
to a particular Contact input.""",
}, # row
"upsContactIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.8.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1024"
},
],
"range" : {
"min" : "1",
"max" : "1024"
},
},
},
"access" : "readonly",
"description" :
"""The Contact identifier; identical to the Contact Number.""",
}, # column
"upsContactType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.8.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"normallyOpen" : {
"nodetype" : "namednumber",
"number" : "1"
},
"normallyClosed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"anyChange" : {
"nodetype" : "namednumber",
"number" : "3"
},
"notUsed" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readwrite",
"description" :
"""The normal state for this contact. The 'other'
state is the Active state for generating the UpstdContactActiveNotice
trap. If anyChange(3) is selected, then this trap is sent
any time the contact changes to either Open or Closed.
No traps are sent if the Contact is set to notUsed(4).
In many cases, the configuration for Contacts may be done by other
means, so this object may be read-only.""",
}, # column
"upsContactState" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.8.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"open" : {
"nodetype" : "namednumber",
"number" : "1"
},
"closed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"openWithNotice" : {
"nodetype" : "namednumber",
"number" : "3"
},
"closedWithNotice" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""The current state of the Contact input;
the value is based on the open/closed input state
and the setting for UpsContactType.
When entering the openWithNotice(3) and closedWithNotice(4)
states, no entries added to the UpsAlarmTable, but
the UpstdContactActiveNotice trap is sent.""",
}, # column
"upsContactDescr" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.8.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readwrite",
"description" :
"""A label identifying the Contact. This object should be
set by the administrator.""",
}, # column
"upsEnvRemoteTempLowerLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readwrite",
"description" :
"""The Lower Limit of the remote temperature; if UpsEnvRemoteTemp
falls below this value, the UpsRemoteTempBad alarm will occur.""",
}, # scalar
"upsEnvRemoteTempUpperLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readwrite",
"description" :
"""The Upper Limit of the remote temperature; if UpsEnvRemoteTemp
rises above this value, the UpsRemoteTempBad alarm will occur.
This value should be greater than UpsEnvRemoteTempLowerLimit.""",
}, # scalar
"upsEnvRemoteHumidityLowerLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readwrite",
"description" :
"""The Lower Limit of the remote humidity reading; if UpsEnvRemoteHumidity
falls below this value, the UpsRemoteHumidityBad alarm will occur.""",
}, # scalar
"upsEnvRemoteHumidityUpperLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.6.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readwrite",
"description" :
"""The Upper Limit of the remote humidity reading; if UpsEnvRemoteHumidity
rises above this value, the UpsRemoteHumidityBad alarm will occur.
This value should be greater than UpsEnvRemoteHumidityLowerLimit.""",
}, # scalar
"upsTest" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.7",
}, # node
"upsTestBattery" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.7.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"startTest" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this variable to startTest initiates the
battery test. All other set values are invalid.""",
}, # scalar
"upsTestBatteryStatus" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.7.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"unknown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"passed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"failed" : {
"nodetype" : "namednumber",
"number" : "3"
},
"inProgress" : {
"nodetype" : "namednumber",
"number" : "4"
},
"notSupported" : {
"nodetype" : "namednumber",
"number" : "5"
},
"inhibited" : {
"nodetype" : "namednumber",
"number" : "6"
},
"scheduled" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readonly",
"description" :
"""Reading this enumerated value gives an indication of the
UPS Battery test status.""",
}, # scalar
"upsTestTrap" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.7.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"startTestTrap" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Setting startTestTrap to 1 will initiate a TrapTest
is sent out from HPMM. All other set values are invalid.""",
}, # scalar
"upsControl" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.8",
}, # node
"upsControlOutputOffDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.8.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this value to other than zero will cause the UPS
output to turn off after the number of seconds.
Setting it to 0 will cause an attempt to abort a pending
shutdown.""",
}, # scalar
"upsControlOutputOnDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.8.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this value to other than zero will cause the UPS
output to turn on after the number of seconds.
Setting it to 0 will cause an attempt to abort a pending
startup.""",
}, # scalar
"upsControlOutputOffTrapDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.8.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""When UpsControlOutputOffDelay reaches this value, a trap will
be sent.""",
}, # scalar
"upsControlOutputOnTrapDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.8.4",
"status" : "obsolete",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""When UpsControlOutputOnDelay reaches this value, a
UpsOutputOff trap will be sent.""",
}, # scalar
"upsControlToBypassDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.8.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this value to other than zero will cause the UPS
output to go to Bypass after the number of seconds.
If the Bypass is unavailable, this may cause the UPS
to not supply power to the load.
Setting it to 0 will cause an attempt to abort a pending
shutdown.""",
}, # scalar
"upsLoadShedSecsWithRestart" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.8.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this value will cause the UPS output to turn off
after the set number of seconds, then restart (after a UPS-defined
'down time') when the utility is again available.
Unlike UpsControlOutputOffDelay, which might or might not,
this object always maps to the XCP 0x8A Load Dump & Restart command,
so the desired shutdown and restart behavior is guaranteed to happen.
Once set, this command cannot be aborted.
This is the preferred Control object to use when performing an On
Battery OS Shutdown.""",
}, # scalar
"upsConfig" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9",
}, # node
"upsConfigOutputVoltage" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The nominal UPS Output voltage per phase in volts.""",
}, # scalar
"upsConfigInputVoltage" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The nominal UPS Input voltage per phase in volts.""",
}, # scalar
"upsConfigOutputWatts" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The nominal UPS available real power output in watts.""",
}, # scalar
"upsConfigOutputFreq" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The nominal output frequency in tenths of Hz.""",
}, # scalar
"upsConfigDateAndTime" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readwrite",
"description" :
"""Date and time information for the UPS. Setting this variable
will initiate a set UPS date and time to this value. Reading
this variable will return the UPS time and date. This value
is not referenced to sysUpTime. It is simply the clock value
from the UPS real time clock.
Format is as follows: MM/DD/YYYY:HH:MM:SS.""",
}, # scalar
"upsConfigLowOutputVoltageLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The Lower limit for acceptable Output Voltage, per the UPS
specifications.""",
}, # scalar
"upsConfigHighOutputVoltageLimit" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.9.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The Upper limit for acceptable Output Voltage, per the UPS
specifications.""",
}, # scalar
"upsRecep" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10",
}, # node
"upsNumReceptacles" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "64"
},
],
"range" : {
"min" : "0",
"max" : "64"
},
},
},
"access" : "readonly",
"description" :
"""The number of independently controllable Receptacles, as described in the
UpsRecepTable.""",
}, # scalar
"upsRecepTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2",
"status" : "current",
"description" :
"""The Aggregate Object with number of entries equal to
NumReceptacles and including the UpsRecep group.""",
}, # table
"upsRecepEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1",
"status" : "current",
"linkage" : [
"upsRecepIndex",
],
"description" :
"""The Recep table entry, etc.""",
}, # row
"upsRecepIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "64"
},
],
"range" : {
"min" : "1",
"max" : "64"
},
},
},
"access" : "readonly",
"description" :
"""The number of the Receptacle. Serves as index for Receptacle table.""",
}, # column
"upsRecepStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
"off" : {
"nodetype" : "namednumber",
"number" : "2"
},
"pendingOff" : {
"nodetype" : "namednumber",
"number" : "3"
},
"pendingOn" : {
"nodetype" : "namednumber",
"number" : "4"
},
"unknown" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""The Recep Status 1=On/Close, 2=Off/Open, 3=On w/Pending Off,
4=Off w/Pending ON, 5=Unknown.""",
}, # column
"upsRecepOffDelaySecs" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "2147483647"
},
],
"range" : {
"min" : "-1",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""The Delay until the Receptacle is turned Off. Setting
this value to other than -1 will cause the UPS output to
turn off after the number of seconds (0 is immediately).
Setting it to -1 will cause an attempt to abort a pending shutdown.
When this object is set while the UPS is On Battery, it is not necessary
to set UpsRecepOnDelaySecs, since the outlet will turn back on
automatically when power is available again.""",
}, # column
"upsRecepOnDelaySecs" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "2147483647"
},
],
"range" : {
"min" : "-1",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
""" The Delay until the Receptacle is turned On. Setting
this value to other than -1 will cause the UPS output to
turn on after the number of seconds (0 is immediately).
Setting it to -1 will cause an attempt to abort a pending restart.""",
}, # column
"upsRecepAutoOffDelay" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "32767"
},
],
"range" : {
"min" : "-1",
"max" : "32767"
},
},
},
"access" : "readwrite",
"description" :
"""The delay after going On Battery until the Receptacle is
automatically turned Off. A value of -1 means that this Output should
never be turned Off automatically, but must be turned Off only by command.
Values from 0 to 30 are valid, but probably innappropriate.
The AutoOffDelay can be used to prioritize loads in the event of a prolonged
power outage; less critical loads will turn off earlier to extend battery
time for the more critical loads. If the utility power is restored before the
AutoOff delay counts down to 0 on an outlet, that outlet will not turn Off.""",
}, # column
"upsRecepAutoOnDelay" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "32767"
},
],
"range" : {
"min" : "-1",
"max" : "32767"
},
},
},
"access" : "readwrite",
"description" :
"""Seconds delay after the Outlet is signaled to turn On before the Output is
Automatically turned ON. A value of -1 means that this Output should never
be turned On automatically, but only when specifically commanded to do so.
A value of 0 means that the Receptacle should come On immediately
at power-up or for an On command.""",
}, # column
"upsRecepShedSecsWithRestart" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.10.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this value will cause the UPS output to turn off
after the set number of seconds, then restart (after a UPS-defined
'down time') when the utility is again available.
Unlike UpsRecepOffDelaySecs, which might or might not,
this object always maps to the XCP 0x8A Load Dump & Restart command,
so the desired shutdown and restart behavior is guaranteed to happen.
Once set, this command cannot be aborted.""",
}, # column
"upsTopology" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.11",
}, # node
"upsTopologyType" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.11.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "32767"
},
],
"range" : {
"min" : "0",
"max" : "32767"
},
},
},
"access" : "readonly",
"description" :
"""Value which denotes the type of UPS by its power topology. Values are the
same as those described in the XCP Topology block's Overall Topology field.""",
}, # scalar
"upsTopoMachineCode" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.11.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "32767"
},
],
"range" : {
"min" : "0",
"max" : "32767"
},
},
},
"access" : "readonly",
"description" :
"""ID Value which denotes the Compaq/HP model of the UPS for software. Values
are the same as those described in the XCP Configuration block's Machine Code
field.""",
}, # scalar
"upsTopoUnitNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.11.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "64"
},
],
"range" : {
"min" : "0",
"max" : "64"
},
},
},
"access" : "readonly",
"description" :
"""Identifies which unit and what type of data is being reported.
A value of 0 means that this MIB information comes from the top-level system
view (eg, manifold module or system bypass cabinet reporting total system
output). Standalone units also use a value of 0, since they are the 'full
system' view.
A value of 1 or higher indicates the number of the module in the system
which is reporting only its own data in the HP MIB objects.""",
}, # scalar
"upsTopoPowerStrategy" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.3.11.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"highAlert" : {
"nodetype" : "namednumber",
"number" : "1"
},
"standard" : {
"nodetype" : "namednumber",
"number" : "2"
},
"enableHighEfficiency" : {
"nodetype" : "namednumber",
"number" : "3"
},
"immediateHighEfficiency" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readwrite",
"description" :
"""Value which denotes which Power Strategy is currently set for the UPS.
The values are:
highAlert(1) - The UPS shall optimize its operating state to maximize its
power-protection levels. This mode will be held for at most 24 hours.
standard(2) - Balanced, normal power protection strategy. UPS will not enter
HE operating mode from this setting.
enableHighEfficiency(3) - The UPS is enabled to enter HE operating mode to
optimize its operating state to maximize its efficiency, when
conditions change to permit it (as determined by the UPS).
forceHighEfficiency(4) - If this value is permitted to be Set for this UPS,
and if conditions permit, requires the UPS to enter High Efficiency
mode now, without delay (for as long as utility conditions permit).
After successfully set to forceHighEfficiency(4),
UpsTopoPowerStrategy changes to value enableHighEfficiency(3).
UpsOutputSource will indicate if the UPS status is actually operating in
High Efficiency mode.""",
}, # scalar
"pdr" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4",
}, # node
"pdrIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1",
}, # node
"pdrName" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readwrite",
"description" :
"""The string identify the device.""",
}, # scalar
"pdrModel" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Model.""",
}, # scalar
"pdrManufacturer" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Manufacturer Name (e.g. Hewlett-Packard).""",
}, # scalar
"pdrFirmwareVersion" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The firmware revision level of the device.""",
}, # scalar
"pdrPartNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The device part number.""",
}, # scalar
"pdrSerialNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The PDR's serial number.""",
}, # scalar
"pdrVARating" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The VA Rating of this PDR (all phases)""",
}, # scalar
"pdrNominalOutputVoltage" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The nominal Output Voltage may differ from the nominal
Input Voltage if the PDR has an input transformer""",
}, # scalar
"pdrNumPhases" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "3"
},
],
"range" : {
"min" : "1",
"max" : "3"
},
},
},
"access" : "readonly",
"description" :
"""The number of phases for this PDR""",
}, # scalar
"pdrNumPanels" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""The number of panels or subfeeds in this PDR""",
}, # scalar
"pdrNumBreakers" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""The number of breakers in this PDR""",
}, # scalar
"pdrPanel" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2",
}, # node
"pdrPanelTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to
pdrNumPanels""",
}, # table
"pdrPanelEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1",
"status" : "current",
"linkage" : [
"pdrPanelIndex",
],
"description" :
"""The panel table entry containing all power parameters
for each panel.""",
}, # row
"pdrPanelIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "10"
},
],
"range" : {
"min" : "0",
"max" : "10"
},
},
},
"access" : "readonly",
"description" :
"""Index for the pdrPanelEntry table.""",
}, # column
"pdrPanelFrequency" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The present frequency reading for the panel voltage.""",
}, # column
"pdrPanelPower" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The present power of the panel.""",
}, # column
"pdrPanelRatedCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The present rated current of the panel.""",
}, # column
"pdrPanelMonthlyKWH" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The accumulated KWH for this panel since the beginning of this
calendar month or since the last reset.""",
}, # column
"pdrPanelYearlyKWH" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The accumulated KWH for this panel since the beginning of this
calendar year or since the last reset.""",
}, # column
"pdrPanelTotalKWH" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The accumulated KWH for this panel since it was put into service
or since the last reset.""",
}, # column
"pdrPanelVoltageA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured panel output voltage.""",
}, # column
"pdrPanelVoltageB" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured panel output voltage.""",
}, # column
"pdrPanelVoltageC" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured panel output voltage.""",
}, # column
"pdrPanelCurrentA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured panel output current.""",
}, # column
"pdrPanelCurrentB" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured panel output current.""",
}, # column
"pdrPanelCurrentC" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured panel output current.""",
}, # column
"pdrPanelLoadA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The percentage of load is the ratio of each output
current to the rated output current to the panel.""",
}, # column
"pdrPanelLoadB" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The percentage of load is the ratio of each output
current to the rated output current to the panel.""",
}, # column
"pdrPanelLoadC" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.2.1.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The percentage of load is the ratio of each output
current to the rated output current to the panel.""",
}, # column
"pdrBreaker" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3",
}, # node
"pdrBreakerTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1",
"status" : "current",
"description" :
"""List of breaker table entries. The number of entries
is given by pdrNumBreakers for this panel.""",
}, # table
"pdrBreakerEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1",
"status" : "current",
"linkage" : [
"pdrPanelIndex",
"pdrBreakerIndex",
],
"description" :
"""An entry containing information applicable to a
particular output breaker of a particular panel.""",
}, # row
"pdrBreakerIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The index of breakers. 42 breakers in each panel,
arranged in odd and even columns""",
}, # column
"pdrBreakerPanel" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The index of panel that these breakers are installed on.""",
}, # column
"pdrBreakerNumPosition" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The position of this breaker in the panel, 1-phase breaker
or n-m breaker for 2-phase or n-m-k breaker for 3-phase.""",
}, # column
"pdrBreakerNumPhases" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The number of phase for this particular breaker.""",
}, # column
"pdrBreakerNumSequence" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The sequence of this breaker. i.e. 1 for single phase
1,2 for 2-phase or 1,2,3 for 3-phase.""",
}, # column
"pdrBreakerRatedCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The rated current in Amps for this particular breaker.""",
}, # column
"pdrBreakerMonthlyKWH" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The accumulated KWH for this breaker since the beginning of this
calendar month or since the last reset.""",
}, # column
"pdrBreakerYearlyKWH" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The accumulated KWH for this breaker since the beginning of this
calendar year or since the last reset.""",
}, # column
"pdrBreakerTotalKWH" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The accumulated KWH for this breaker since it was put into service
or since the last reset.""",
}, # column
"pdrBreakerCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured output current for this breaker Current.""",
}, # column
"pdrBreakerCurrentPercent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The ratio of output current over rated
current for each breaker.""",
}, # column
"pdrBreakerPower" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The power for this breaker in Watts.""",
}, # column
"pdrBreakerPercentWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The percentage of Warning set for this breaker.""",
}, # column
"pdrBreakerPercentOverload" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.4.3.1.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The percentage of Overload set for this breaker.""",
}, # column
"mpdu" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5",
}, # node
"mpduIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1",
}, # node
"mpduNumMPDU" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The number of Modular PDUs (Core) detected.""",
}, # scalar
"mpduIdentTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2",
"status" : "current",
"description" :
"""Indexed by mpduNumMPDU. So aggregate objects for each entry equal to mpduNumMPDU
before go to the next entry. i.e if 4 MPDU detected, display 4 indexes, 4
Manufacturers, 4 Models..., start with 1.""",
}, # table
"mpduIdentEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1",
"status" : "current",
"linkage" : [
"mpduIdentIndex",
],
"description" :
"""The table entry containing the identifications of each MPDU as well
as names of PDR panel, location of MPDU in a Data Center.""",
}, # row
"mpduIdentIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the MpduIdentEntry table. Start with 1""",
}, # column
"mpduManufacturer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
""" The Core PDU manufacturer. StringLength=[21]""",
}, # column
"mpduModel" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU model, marketing name. StringLength=[40].""",
}, # column
"mpduName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU name, descriptive name or hostname. StringLength=[16].""",
}, # column
"mpduFirmwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU firmware version(s). StringLength=[12].""",
}, # column
"mpduHardwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU hardware version. StringLength=[8].""",
}, # column
"mpduPartNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU part number. StringLength=[21].""",
}, # column
"mpduSerialNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU external serial number. StringLength=[21].""",
}, # column
"mpduUUID" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's UUID equals PN add SN then canonical conversion. StringLength=[37].""",
}, # column
"mpduIP" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's assigned IP. StringLength=[16].""",
}, # column
"mpduMACAddress" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU MAC address. StringLength=[18].""",
}, # column
"mpduControlStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU Control Status: master or slave units.
StringLength=[31].""",
}, # column
"mpduRegion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Region: NA, Intl, Japan... StringLength=[7].""",
}, # column
"mpduType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Type: Delta, Wye, single, Wye+lowline.
StringLength=[24].""",
}, # column
"mpduPowerRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Power Rating in KVA (Name Plate). Value is presented
in 1/100 of the KVA. i.e. 1750 means 17.50 KVA""",
}, # column
"mpduInputRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Input Current Rating in A (Name Plate)""",
}, # column
"mpduInputPlug" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Input Plug: IEC60309 516P6,NEMA L15-30P...
StringLength=[16].""",
}, # column
"mpduNumBreakers" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The number of breakers used in each MPDU (6 by default).""",
}, # column
"mpduNumOutlet" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The number of outlets of each MPDU (6 by default).""",
}, # column
"mpduUHeight" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The MPDU U Height 1U or 2 U (1U by default).""",
}, # column
"mpduRedundantStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Status of Core PDU in paired or not: Stand-alone or Redundant.
StringLength=[12].""",
}, # column
"mpduNumSmartExtBar" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""total number of detected Smart Sticks connecting to each PDU.""",
}, # column
"mpduPanelName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Name of Panel that was entered by customer in GUI.
StringLength=[50].""",
}, # column
"mpduPanelBreakerName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Name of breakers or Panel Position entered by customer in GUI.
StringLength=[50].""",
}, # column
"mpduPanelBreakerRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Breaker rating of the PDR Panel.""",
}, # column
"mpduACFeedName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.26",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""it would be A, B, Red, or White...
StringLength=[50].""",
}, # column
"mpduFloorName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.27",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Name of building floor entered by customer in GUI.
StringLength=[50].""",
}, # column
"mpduRoomName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.28",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Name of the room entered by customer in GUI. StringLength=[50].""",
}, # column
"mpduRow" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.29",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Data Center Row entered by customer in GUI.
StringLength=[11].""",
}, # column
"mpduRowPosition" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.30",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Position in Row entered by customer in GUI. Null will be returned
if not detected. StringLength=[11].""",
}, # column
"mpduRackName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.31",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Name of the Rack entered by customer in GUI. Null will be returned
if not detected. StringLength=[50].""",
}, # column
"mpduRackHeight" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.32",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Rack Type (22U, 42U, 50U...). Null will be returned if not detected.
StringLength=[3].""",
}, # column
"mpduRackID" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.33",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""An unique ID to identify a rack, i.e. Rack SN. Null will be returned if
not detected. StringLength=[50].""",
}, # column
"mpduUPosition" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.34",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""PDU Rack bottom U Start position, core U position. Null will be returned if not detected.""",
}, # column
"mpduPairedPDUUUID" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.35",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""UUID of the PDU paired with this one. Null will be returned if not detected.
StringLength=[50].""",
}, # column
"mpduPairedPDUIP" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.36",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IP address of the PDU paired with this one. Null will be returned if not detected.
StringLength=[16].""",
}, # column
"mpduInstalledLocation" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.37",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""0=Left 0U side area or rack frame, 1= Right 0U side area or rack frame, 2= front RETMA rail,
3=back RETMA rail) - spatial location. A value of -1 will be returned if not detected.""",
}, # column
"mpduTotalPowerWatt" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.38",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total power for the whole iPDU in Watts. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"mpduTotalPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.39",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total power for the whole iPDU in VA. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"mpduTotalPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.40",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The whole iPDU's load in percent of rated capacity. Value is
presented in 1/10 of the percentage i.e. 125 is 12.5%. A value
of -1 will be returned if the current is unable to be measured.""",
}, # column
"mpduRegionalNominalVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.1.2.1.41",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""value is either America(208V), Japan(202V) or blank for International Model""",
}, # column
"mpduOutput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2",
}, # node
"mpduOutputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1",
"status" : "current",
"description" :
"""Indexed by mpduNumOutlet. So aggregate objects for each entry equal to (mpduNumMPDU
multiplies mpduNumOutlet) before go to the next entry. i.e if 4 MPDU detected,
display 6x4 indexes, 6x4 OutputStatus, 6x4 OutputBreakerRating...start with 1.""",
}, # table
"mpduOutputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1",
"status" : "current",
"linkage" : [
"mpduIdentIndex",
"mpduOutputIndex",
],
"description" :
"""The output table entry containing the voltage, current, PF,
percent load, VA, Watts for the Modular PDU.""",
}, # row
"mpduOutputIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the MpduOutputEntry table.""",
}, # column
"mpduOutputStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The overall status of the device: off, on, problem. StringLength=[7].""",
}, # column
"mpduOutputBreakerRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The rating of each MPDU Output in Ampere.""",
}, # column
"mpduOutputSmartDevice" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""A smart cord device detected: False, True. StringLength=[5].""",
}, # column
"mpduOutputPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The MPDU's each output load in percent of rated capacity. Value is
presented in 1/10 of the percentage i.e. 125 is 12.5%. A value
of -1 will be returned if the current is unable to be measured.""",
}, # column
"mpduOutputVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The measured output voltage from the PDU meters in volts. Value is
presented in 1/10 of the Volt i.e. 2095 is 209.5 V. A value
of -1 will be returned if the voltage is unable to be measured.""",
}, # column
"mpduOutputCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The measured output current from the PDU meters in amps. Value is
presented in 1/100 of the Ampere i.e. 175 is 1.75 A. A value
of -1 will be returned if the current is unable to be measured.""",
}, # column
"mpduOutputPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total VA load (VI) measured on the PDU in VA. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"mpduOutputPowerWatt" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total power load measured on the PDU in Watts. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"mpduOutputPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The power factor is presented in 1/100 of the unit i.e.
98 is 0.98. A value of -1 will be returned if the power factor is unable
to be measured.""",
}, # column
"mpduOutputWarningThreshold" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Report the User-set threshhold limit for Warning in percentage""",
}, # column
"mpduOutputCriticalThreshold" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Report the User-set threshhold limit for Critical in percentage""",
}, # column
"mpduOutputPowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.2.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Support Billing Grade Energy Metering in Watt-Hour. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"mpduDeviceIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3",
}, # node
"mpduDeviceIdentTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1",
"status" : "current",
"description" :
"""Indexed by mpduNumOutlet. So aggregate objects for each entry equal
to (mpduNumMPDU multiplies mpduNumOutlet) before go to the next entry.
i.e if 4 MPDU detected, display 6x4 indexes, 6x4 mpduDeviceStatus, 6x4
DeviceUIDStatus...start with 1.""",
}, # table
"mpduDeviceIdentEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1",
"status" : "current",
"linkage" : [
"mpduIdentIndex",
"mpduDeviceIdentIndex",
],
"description" :
"""The ident table entry containing the name, model,
manufacturer, firmware version, part number... for
each smart device, null value for unknown device.""",
}, # row
"mpduDeviceIdentIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index is based on the total number of Smart Extension Bars plus
smart devices (with smart power cord i.e. blades) detected.""",
}, # column
"mpduDeviceStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The overall status of the device: off, on, problem. StringLength=[8].""",
}, # column
"mpduDeviceUIDStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The overall status of the device UID: off, on, blink. StringLength=[8].""",
}, # column
"mpduDeviceNumOutlet" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The number of outlets of detected the device. This variable
indicates the number of rows in the breakers table. Default is 5""",
}, # column
"mpduDeviceUHeight" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The Unit height of ext bar or device. Some device supports non-integer.
Present value x100, when read back, divided by 100 to get 2 decimal points.""",
}, # column
"mpduDevicePowerRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's Power Rating in VA. Null will be returned if not detected.
StringLength=[8].""",
}, # column
"mpduDeviceManufacturer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device Manufacturer Name (e.g. Hewlett-Packard).
Null will be returned if not detected. StringLength=[16].""",
}, # column
"mpduDeviceType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device's Type: Ext Bar, server, or network device...
StringLength=[16].""",
}, # column
"mpduDeviceModel" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device Model is Managed or Non-managed Ext Bar.
Null will be returned if not detected. StringLength=[24].""",
}, # column
"mpduDeviceName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The string identify the Device. i.e. 'BAR#1' (descriptive name)
Null will be returned if not detected. StringLength=[32].""",
}, # column
"mpduDeviceFirmwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The firmware revision level of Sm Ext Bar. Null will be returned if
not detected. StringLength=[8].""",
}, # column
"mpduDeviceHardwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The hardware version of Sm Ext Bar. Null will be returned if not
detected. StringLength=[8].""",
}, # column
"mpduDevicePartNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's part number. Null will be returned if not detected.
StringLength=[21].""",
}, # column
"mpduDeviceSerialNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's serial number. Null will be returned if not detected.
StringLength=[21].""",
}, # column
"mpduDeviceUUID" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's UUID. Null will be returned if not detected.
StringLength=[37].""",
}, # column
"mpduDeviceIP" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device's IP Address. Null will be returned if not detected.
StringLength=[16].""",
}, # column
"mpduDeviceMAC" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The device mgmt processor MAC Address. Null will be returned if
not detected. StringLength=[18].""",
}, # column
"mpduDevicePSUSlotNo" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Device power supply logical slot number.""",
}, # column
"mpduDeviceUPosition" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Device U start position.""",
}, # column
"mpduDeviceDetectionThreshold" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.3.1.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Read device detection threshold between 10 and 50mA.""",
}, # column
"mpduSmExtBarOutlet" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4",
}, # node
"mpduSmExtBarOutletTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1",
"status" : "current",
"description" :
"""Indexed by mpduDeviceNumOutlet. So aggregate objects for each entry equal to
(mpduNumMPDU multiplies mpduNumOutlet multiplies mpduDeviceNumOutlet) before
go to the next entry. i.e if 4 MPDU detected, display 5x6x4 indexes, 5x6x4
mpduSmExtBarOutletStatus, 5x6x4 mpduSmExtBarOutletUIDStatus...start with 1.""",
}, # table
"mpduSmExtBarOutletEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1",
"status" : "current",
"linkage" : [
"mpduIdentIndex",
"mpduOutputIndex",
"mpduSmExtBarOutletIndex",
],
"description" :
"""Entries contain status, measurements for each outlet (no PowerVA)
Display null if it can't be measured or no device detected.""",
}, # row
"mpduSmExtBarOutletIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""The Index of Smart Extension Bar.""",
}, # column
"mpduSmExtBarOutletStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The overall status of the Smart Extension Bar's outlet: off, on, problem...
StringLength=[3].""",
}, # column
"mpduSmExtBarOutletUIDStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The overall status of the Smart Extension Bar's UID: off, on, blink...
StringLength=[8].""",
}, # column
"mpduSmExtBarOutletRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Sm Ext Bar Outlet's Rating in Amp. A value of -1 will be returned
if the stick is unable to detect. StringLength=[8].""",
}, # column
"mpduSmExtBarOutletVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The SM Ext Bar voltage in volts. Value is presented 1/10 of the Volt
i.e. 2098 means 209.8 V. A value of -1 will be returned if the voltage is unable
to be measured.""",
}, # column
"mpduSmExtBarOutletCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The SM Ext Bar current draw in Amps. Value is presented in 1/100 of the Ampere
i.e. 127 means 1.27 A. A value of -1 will be returned if the current is unable
to be measured.""",
}, # column
"mpduSmExtBarOutletPowerWatt" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The Sm Ext Bar's Power measured in Watts. A value of -1 will be returned if the
power is unable to be measured.""",
}, # column
"mpduSmExtBarOutletPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The outlet's power factor in 0.01 unit. It is presented in 1/100 of the unit.
i.e. 98 means 0.98. A value of -1 will be returned if the voltage is unable
to be measured.""",
}, # column
"mpduSmExtBarOutletDeviceName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device (Blade/Server/Storage) Name that connects to this
Sm Ext Bar Outlet. Null will be returned if not detected. StringLength=[50].""",
}, # column
"mpduSmExtBarOutletDeviceUUID" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device (Blade/Server/Storage) UUID that connects to this
Sm Ext Bar Outlet. Null will be returned if not detected. StringLength=[37].""",
}, # column
"mpduSmExtBarOutletDeviceProduct" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device (Blade/Server/Storage) Model that connects to this
Sm Ext Bar Outlet. Null will be returned if not detected. StringLength=[40].""",
}, # column
"mpduSmExtBarOutletDeviceIP" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device (Blade/Server/Storage) IP of iLO that connects to this
Sm Ext Bar Outlet. Null will be returned if not detected. StringLength=[16].""",
}, # column
"mpduSmExtBarOutletAutoDiscovered" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Boolean values of 0 or 1. 1 means this Sm Ext Bar Outlet is able to get information
from a blade with smart power supply (auto discovery is enabled)...""",
}, # column
"mpduSmExtBarOutletDeviceMAC" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device (Blade/Server/Storage) MAC address that connects to this
Sm Ext Bar Outlet. Null will be returned if not detected. StringLength=[18].""",
}, # column
"mpduSmExtBarOutletDeviceSN" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Device (Blade/Server/Storage) Serial Number that connects to this
Sm Ext Bar Outlet. Null will be returned if not detected. StringLength=[21].""",
}, # column
"mpduSmExtBarOutletDevicePSSlotNo" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The Device (Blade/Server/Storage) Power Supply' Slot Number that connects to this
Sm Ext Bar Outlet. A value of -1 will be returned if not detected.""",
}, # column
"mpduSmExtBarOutletDeviceUPosition" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Device U start position. A value of -1 will be returned if not detected.""",
}, # column
"mpduSmExtBarOutletDeviceUHeight" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Device U height. Enter -1 if not detected. Some device supports non-integer.
Present value x100, when read back, divided by 100 to get 2 decimal points.""",
}, # column
"mpduSmExtBarOutletDeviceInstalledLocation" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""0=Left 0U side area or rack frame, 1= Right 0U side area or rack frame, 2= front RETMA rail,
3=back RETMA rail) - spatial location. A value of -1 will be returned if not detected.""",
}, # column
"mpduSmExtBarOutletPowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.5.4.1.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The Sm Ext Bar's Power measured in Watt-Hour support Billing Grade Energy Metering.
A value of -1 will be returned if the power is unable to be measured.""",
}, # column
"oups" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6",
}, # node
"oupsIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1",
}, # node
"oupsIdentManufacturer" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "31"
},
],
"range" : {
"min" : "0",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""The UPS Manufacturer Name (e.g. Hewlett-Packard).""",
}, # scalar
"oupsIdentModel" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The UPS Model such as R-1500 G3.""",
}, # scalar
"oupsIdentSystemFWVersion" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Display System firmware version.""",
}, # scalar
"oupsIdentPowerModuleFWVersion" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Display Power Module firmware version.""",
}, # scalar
"oupsIdentOemCode" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""A binary code indicating vendor. This should be a ?0x0c? for HP""",
}, # scalar
"oupsIdentSerialNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get the UPS Serial Number (i.e. B00123456W).""",
}, # scalar
"oupsIdentPartNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get the UPS Part Number (i.e. 204451-B21).""",
}, # scalar
"oupsBattery" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2",
}, # node
"oupsBatTimeRemaining" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""Battery run time in seconds before UPS turns off due
to low battery.""",
}, # scalar
"oupsBatVoltage" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""Read the Total Battery voltage.""",
}, # scalar
"oupsBatCapacity" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Battery percent charge.""",
}, # scalar
"oupsBatAbmStatus" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"batteryCharging" : {
"nodetype" : "namednumber",
"number" : "1"
},
"batteryDischarging" : {
"nodetype" : "namednumber",
"number" : "2"
},
"batteryResting" : {
"nodetype" : "namednumber",
"number" : "3"
},
"batteryTesting" : {
"nodetype" : "namednumber",
"number" : "4"
},
"notAvailable" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Gives the status of the Advanced Battery Management; example:
batteryResting(3) is the state when the battery is fully charged and none of
the other actions (charging/discharging/floating) is being done.""",
}, # scalar
"oupsBatTestStatus" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"unknown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"passed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"failed" : {
"nodetype" : "namednumber",
"number" : "3"
},
"inProgress" : {
"nodetype" : "namednumber",
"number" : "4"
},
"notSupported" : {
"nodetype" : "namednumber",
"number" : "5"
},
"inhibited" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readonly",
"description" :
"""Battery Test Status. UPS will support schedule testing only
7, 14, 30, or 60 days. Default is 14 days.""",
}, # scalar
"oupsBatLatestTestDate" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get time and date of the latest battery test.""",
}, # scalar
"oupsBatReplacementDateBP1" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get the battery replacement date of BP1.""",
}, # scalar
"oupsBatReplacementDateBP2" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get the battery replacement date of BP2.""",
}, # scalar
"oupsBatToACDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "150"
},
],
"range" : {
"min" : "0",
"max" : "150"
},
},
},
"access" : "readonly",
"description" :
"""Get delay time switching from Battery mode to AC mode in ms.
Default is 0ms, unit is increased per 100ms""",
}, # scalar
"oupsBatChargeDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "9999"
},
],
"range" : {
"min" : "0",
"max" : "9999"
},
},
},
"access" : "readonly",
"description" :
"""Get battery charge delay in second. Default is 0 (random)
unit is increased per 10sec.""",
}, # scalar
"oupsBatNumModules" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2"
},
],
"range" : {
"min" : "0",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Number of battery modules detected by power unit.""",
}, # scalar
"oupsBatModel" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Report 1U, 2U, or 3U Battery Pack, both BPs must be the same model.""",
}, # scalar
"oupsBatChargingPowerLevelUtility" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""3 charging power levels: 1100W, 555W, 190W.""",
}, # scalar
"oupsBatChargingPowerLevelGenerator" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""4 charging power levels: 1100W, 555W, 190W, No Charge.""",
}, # scalar
"oupsBatSharedConfig" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""expect 3 values for shared BP configuration: PU single, PU master, PU slave.""",
}, # scalar
"oupsBatPackFWVerBP1" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Display Battery Pack 1 FW version.""",
}, # scalar
"oupsBatPackFWVerBP2" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.2.17",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Display Battery Pack 2 FW version.""",
}, # scalar
"oupsInput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3",
}, # node
"oupsInputFrequency" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The utility line frequency in tenths of Hz.""",
}, # scalar
"oupsInputLineBads" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number of times the Input was out of tolerance
in voltage or frequency.""",
}, # scalar
"oupsInputNumPhases" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
}, # scalar
"oupsInputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.4",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to
NumPhases and including the UpsInput group.""",
}, # table
"oupsInputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.4.1",
"status" : "current",
"linkage" : [
"oupsInputPhase",
],
"description" :
"""The input table entry containing the current, voltage, etc.""",
}, # row
"oupsInputPhase" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number of the phase. Serves as index for input table.""",
}, # column
"oupsInputVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input voltage from the UPS meters in volts.""",
}, # column
"oupsInputCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input current from the UPS meters in amps.""",
}, # column
"oupsInputWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input power in watts.""",
}, # column
"oupsInputPowerFactor" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured real Input power factor with 2 decimals.""",
}, # scalar
"oupsInputDBType" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Read the input Distribution Box type: 1- 480V-Y (US-5W NEMA plugs),
2- 480V-V (US-5W IEC plugs), 3- 415V-Y (EURO-5W IEC plugs),
4- 415V-Y (US/INTL-5W IEC plugs), 5- 380V-Y (China-5W hardwired).""",
}, # scalar
"oupsInputUpperVoltage" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "999"
},
],
"range" : {
"min" : "0",
"max" : "999"
},
},
},
"access" : "readonly",
"description" :
"""Read the upper voltage range, 5 to 10 percent above the nominal voltage.
Factory default is 10 percent.""",
}, # scalar
"oupsInputLowerVoltage" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "999"
},
],
"range" : {
"min" : "0",
"max" : "999"
},
},
},
"access" : "readonly",
"description" :
"""Read the lower voltage range, 5 to 15 percent below the nominal voltage.
Factory default is -15 percent.""",
}, # scalar
"oupsGeneratorDetection" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"onGenerator" : {
"nodetype" : "namednumber",
"number" : "1"
},
"offGenerator" : {
"nodetype" : "namednumber",
"number" : "2"
},
"noAction" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""ATS sets a value of 1 to signal UPS that a generator is started so the UPS will
charge Battery Pack using a different setting while it's on generator (intent to
save all power to back up for the load).""",
}, # scalar
"oupsInputWithGenerator" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.3.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"woGenerator" : {
"nodetype" : "namednumber",
"number" : "1"
},
"withGenerator" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""A value of 1 when user known for sure Input has no Generator backup; a value
of 2 tells Input Source could be from either Utility or Generator.""",
}, # scalar
"oupsOutput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4",
}, # node
"oupsOutputLoad" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The total UPS output load in percentage of rated capacity.""",
}, # scalar
"oupsOutputFrequency" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured UPS output frequency in tenths of Hz.""",
}, # scalar
"oupsOutputNumPhases" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number of metered output phases, serves as the table index.""",
}, # scalar
"oupsOutputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.4",
"status" : "current",
"description" :
"""The Aggregate Object with number of entries equal to NumPhases
and including the UpsOutput group.""",
}, # table
"oupsOutputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.4.1",
"status" : "current",
"linkage" : [
"oupsOutputPhase",
],
"description" :
"""Output Table Entry containing voltage, current, etc.""",
}, # row
"oupsOutputPhase" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "6"
},
],
"range" : {
"min" : "0",
"max" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The number {1..3} of the output phase.""",
}, # column
"oupsOutputVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured output voltage from the UPS metering in volts.""",
}, # column
"oupsOutputCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured UPS output current in amps.""",
}, # column
"oupsOutputWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured real output power in watts.""",
}, # column
"oupsOutputLoadPerPhase" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.4.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "200"
},
],
"range" : {
"min" : "0",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The UPS output load per phase in percentage of rated capacity.""",
}, # column
"oupsOutputPowerFactor" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured real output power factor with 2 decimals.""",
}, # scalar
"oupsOutputSource" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.4.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"acSource" : {
"nodetype" : "namednumber",
"number" : "1"
},
"battery" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Indicate the source that UPS operating from when providing output.""",
}, # scalar
"oupsMonitor" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5",
}, # node
"oupsMonitorAmbientTemp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the internal PU Ambient temperature.""",
}, # scalar
"oupsMonitorBypassSCRTemp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the internal PU Bypass SCR temperature.""",
}, # scalar
"oupsMonitorDDTemp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the internal PU DD temperature.""",
}, # scalar
"oupsMonitorInverterTemp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the internal PU DD temperature.""",
}, # scalar
"oupsMonitorChargerTemp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "200"
},
],
"range" : {
"min" : "-100",
"max" : "200"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the internal PU Charger temperature.""",
}, # scalar
"oupsMonitorBP1Temp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Display Battery Pack 1 Temeprature in F. If BP1 not connected the PU will
return 255 and displayed as -not connect-""",
}, # scalar
"oupsMonitorBP2Temp" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Display Battery Pack 2 Temperature in F. If BP2 not connected the PU will
return 255 and displayed as -not connect-""",
}, # scalar
"oupsMonitorRestartDelay" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "150"
},
],
"range" : {
"min" : "1",
"max" : "150"
},
},
},
"access" : "readonly",
"description" :
"""The reading of the AC Turn On Delay (Restart Delay). The value can be
changed via front LCD Menu or inserted Module GUI. Default is 0ms, unit is
increased per 100ms.""",
}, # scalar
"oupsMonitorACCLoadLevel" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Read the set load level that ACC would be active from 0 to this percentage.
Default is 20 percent. Select 0 to set it as Always On""",
}, # scalar
"oupsMonitorOperatingMode" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Check if UPS is normal op, or on bypass, on inverter or ACC Active, """,
}, # scalar
"oupsMonitorOperationType" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Check if UPS is operating as an UPS or ACC (PU only, no BP).""",
}, # scalar
"oupsTestTrap" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"startTestTrap" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Setting startTestTrap to 1 will initiate a TrapTest
is sent out from Management Module. All other set values are invalid.""",
}, # scalar
"oupsOnGenDuration" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"for30Min" : {
"nodetype" : "namednumber",
"number" : "1"
},
"for1Hr" : {
"nodetype" : "namednumber",
"number" : "2"
},
"for2Hr" : {
"nodetype" : "namednumber",
"number" : "3"
},
"for4Hr" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readwrite",
"description" :
"""When read, show the current On-Generator Duration set by user on UPS LCD or GUI .""",
}, # scalar
"oupsRuntimeLimitation" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.5.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"enable" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""show the current setting for Runtime Limitation. When enable, it can last for 70sec or 3min.""",
}, # scalar
"oupsRackDiscovery" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6",
}, # node
"oupsRackTagVersion" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "10"
},
],
"range" : {
"min" : "0",
"max" : "10"
},
},
},
"access" : "readonly",
"description" :
"""Get the 1-wire contact FW version from EEPROM's Tag Version. Initial tag is 01""",
}, # scalar
"oupsRackID" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get 10-digit Serial Number from the Location Discovery Strip mount in the front of the rack""",
}, # scalar
"oupsRackPartNumber" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get 10 or 12-digit Part Number xxxxxx-001 from the Location Discovery Strip mount in the front of the rack""",
}, # scalar
"oupsRackProductDescription" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Get spatial data of rack: description of the Rack Name.""",
}, # scalar
"oupsRackEncULocation" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""Tell where the Location Discovery Strip is mounted in the rack. B for Back, F for Front, L for Left
and R for Right.""",
}, # scalar
"oupsRackUHeight" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "50"
},
],
"range" : {
"min" : "0",
"max" : "50"
},
},
},
"access" : "readonly",
"description" :
"""Get spatial data of rack: U-height of the rack can be 36U, 42U, or 47U model.""",
}, # scalar
"oupsRackPUUPosition" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "50"
},
],
"range" : {
"min" : "0",
"max" : "50"
},
},
},
"access" : "readonly",
"description" :
"""Get the U-position of the PU where it is installed in the rack. The U position range is from 1-50.""",
}, # scalar
"oupsRackPUUHeight" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "10"
},
],
"range" : {
"min" : "0",
"max" : "10"
},
},
},
"access" : "readonly",
"description" :
"""Get the U-Height of the Power Unit. PU height is either 1U or 2U.""",
}, # scalar
"oupsRackBP1UPosition" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "50"
},
],
"range" : {
"min" : "0",
"max" : "50"
},
},
},
"access" : "readonly",
"description" :
"""Get the U-Position of the Battery Pack 1, installed right below the PU.
Range is from 1 to 50.""",
}, # scalar
"oupsRackBP1UHeight" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "10"
},
],
"range" : {
"min" : "0",
"max" : "10"
},
},
},
"access" : "readonly",
"description" :
"""Get the U-Height of the BP 1. BP is either 1U, 2U, and 3U""",
}, # scalar
"oupsRackBP2UPosition" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "50"
},
],
"range" : {
"min" : "0",
"max" : "50"
},
},
},
"access" : "readonly",
"description" :
"""Get the U-Position of the Battery Pack 2, installed right below the BP1.
Range is from 1 to 50.""",
}, # scalar
"oupsRackBP2UHeight" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.6.6.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "10"
},
],
"range" : {
"min" : "0",
"max" : "10"
},
},
},
"access" : "readonly",
"description" :
"""Get the U-Height of the BP 2. BP is either 1U, 2U, and 3U""",
}, # scalar
"pdu2" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7",
}, # node
"pdu2Ident" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1",
}, # node
"pdu2NumberPDU" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "2"
},
],
"range" : {
"min" : "1",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""The number of PDUs detected (in its daisy chain).""",
}, # scalar
"pdu2IdentTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to pdu2NumberPDU .""",
}, # table
"pdu2IdentEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
],
"description" :
"""The ident table entry containing the name, model, manufacturer,
firmware version, part number, etc.""",
}, # row
"pdu2IdentIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the Pdu2IdentEntry table.""",
}, # column
"pdu2Name" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The string identify the device in daisy chain. Example PDU A.""",
}, # column
"pdu2Model" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Model.""",
}, # column
"pdu2Manufacturer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Manufacturer Name (e.g. Hewlett-Packard).""",
}, # column
"pdu2FirmwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The firmware revision level of the device.""",
}, # column
"pdu2PartNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The device part number. Example AF400A.""",
}, # column
"pdu2SerialNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The device serial number, 10- or 16-digit.""",
}, # column
"pdu2Status" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"other" : {
"nodetype" : "namednumber",
"number" : "1"
},
"ok" : {
"nodetype" : "namednumber",
"number" : "2"
},
"degraded" : {
"nodetype" : "namednumber",
"number" : "3"
},
"failed" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""The overall status of the device. A value of OK(2) indicates the device is operating normally.
A value of degraded(3) indicates the device is operating with warning indicators. A value of
failed(4) indicates the device is operating with critical indicators.""",
}, # column
"pdu2Controllable" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"yes" : {
"nodetype" : "namednumber",
"number" : "1"
},
"no" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""This object indicates whether or not the device is controllable.""",
}, # column
"pdu2InputPhaseCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of input phase on this pdu 1,2,3.""",
}, # column
"pdu2GroupCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of groups (breakers) on this pdu.""",
}, # column
"pdu2OutletCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.1.2.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of outlets in this PDU.""",
}, # column
"pdu2Input" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2",
}, # node
"pdu2InputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to pdu2NumberPDU .""",
}, # table
"pdu2InputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
],
"description" :
"""The ident table entry containing the VA, Watts, WH, etc.""",
}, # row
"pdu2InputType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"splitPhase" : {
"nodetype" : "namednumber",
"number" : "2"
},
"threePhaseDelta" : {
"nodetype" : "namednumber",
"number" : "3"
},
"threePhaseWye" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""Type of input - single phase, split phase, three phase delta, or three
phase wye.""",
}, # column
"pdu2InputFrequency" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input frequency from the PDU meters in tenths of Hz.""",
}, # column
"pdu2InputFrequencyStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"outOfRange" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured input frequency relative to the nominal frequency and the admitted tolerance.""",
}, # column
"pdu2InputPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A total input VA of all phases. Units are VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPowerWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A total input Watts of all phases. Units are Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A total input Watt-Hour value for all phases. Units are WH. This value can be reset to 0
using GUI. In that case, the pdu2InputPowerWattHourTimer will be reset as well
A negative value indicates that this object is not available.""",
}, # column
"pdu2InputPowerWattHourTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""A Timestamp of when the Total Input WH was last reset.""",
}, # column
"pdu2InputPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input PF value. Units are in thousandths, for example a power factor
of 0.958 would be returned as 958, and 0.92 would be returned as 920.
A negative value indicates that this object is not available.""",
}, # column
"pdu2InputPowerVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input VAR value. Units are VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to number of PDUs (pdu2NumberPDU) and
number of input phase (pdu2InputPhaseCount).""",
}, # table
"pdu2InputPhaseEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
"pdu2InputPhaseIndex",
],
"description" :
"""The input table entry containing the voltage, current, frequency, power for each phase.
Entries are given with number of pdu and number of input phase 1, 2, or 3.""",
}, # row
"pdu2InputPhaseIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the Pdu2InputEntry table.""",
}, # column
"pdu2InputPhaseVoltageMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"phase1toN" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase2toN" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase3toN" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase1to2" : {
"nodetype" : "namednumber",
"number" : "5"
},
"phase2to3" : {
"nodetype" : "namednumber",
"number" : "6"
},
"phase3to1" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readonly",
"description" :
"""Value indicates what input voltage is being measured in this table row - single phase
voltage, phase 1 to neutral, phase 2 to neutral, phase 3 to neutral, phase 1 to phase 2,
phase 2 to phase 3, or phase 3 to phase 1.""",
}, # column
"pdu2InputPhaseVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input voltage measurement value. Units are millivolts.""",
}, # column
"pdu2InputPhaseVoltageThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured input voltage relative to the configured thresholds.""",
}, # column
"pdu2InputPhaseVoltageThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseVoltageThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseVoltageThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseVoltageThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseCurrentMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"neutral" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase1" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase2" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase3" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Which input wire is being measured in this table row - single phase, neutral, phase 1,
phase 2, or phase 3.""",
}, # column
"pdu2InputPhaseCurrentRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Rated current capacity of the input. A negative value indicates that
the hardware current capacity is unknown. Units are milliamps.""",
}, # column
"pdu2InputPhaseCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input current measurement value. Units are milliamps.""",
}, # column
"pdu2InputPhaseCurrentThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured input current relative to the configured thresholds.""",
}, # column
"pdu2InputPhaseCurrentThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseCurrentThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseCurrentThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseCurrentThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseCurrentCrestFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current crest factor. Units are in milli, for example a crest factor of
1.414 will be returned as 1414. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhaseCurrentPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current percent load, based on the rated current capacity. Units are
percentage, for example 80% will be returned as 80. A negative
value indicates that this object is not available.""",
}, # column
"pdu2InputPhasePowerMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.19",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"neutral" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase1" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase2" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase3" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Which input wire is being measured in this table row - single phase, neutral, phase 1,
phase 2, or phase 3.""",
}, # column
"pdu2InputPhasePowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input VA value. Units are VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhasePowerWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input Watts value. Units are Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2InputPhasePowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A Watt-Hour value for each Input phase. Units are WH. This value can be reset to 0
using GUI. In that case, the pdu2InputPhasePowerWattHourTimer will be reset as well
A negative value indicates that this object is not available.""",
}, # column
"pdu2InputPhasePowerWattHourTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.23",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""Timestamp of when input Watt-hours (inputWh) was last reset.""",
}, # column
"pdu2InputPhasePowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input PF value. Units are in thousandths, for example a power factor
of 0.958 would be returned as 958, and 0.92 would be returned
as 920. A negative value indicates that this object is not available.""",
}, # column
"pdu2InputPhasePowerVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.2.2.1.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input VAR value. Units are VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu2Group" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3",
}, # node
"pdu2GroupTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to number of PDUs
and number of breakers (pdu2GroupCount).""",
}, # table
"pdu2GroupEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
"pdu2GroupIndex",
],
"description" :
"""The input table entry containing the name, voltages, currents, power, etc.""",
}, # row
"pdu2GroupIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the Pdu2GroupEntry table.""",
}, # column
"pdu2GroupName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the group.""",
}, # column
"pdu2GroupType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"breaker1pole" : {
"nodetype" : "namednumber",
"number" : "2"
},
"breaker2pole" : {
"nodetype" : "namednumber",
"number" : "3"
},
"breaker3pole" : {
"nodetype" : "namednumber",
"number" : "4"
},
"outletSection" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""The type of the group. (5) has no breaker""",
}, # column
"pdu2GroupVoltageMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"phase1toN" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase2toN" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase3toN" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase1to2" : {
"nodetype" : "namednumber",
"number" : "5"
},
"phase2to3" : {
"nodetype" : "namednumber",
"number" : "6"
},
"phase3to1" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readonly",
"description" :
"""Value indicates what input voltage is being measured in this table row - single phase
voltage, phase 1 to neutral, phase 2 to neutral, phase 3 to neutral, phase 1 to phase 2,
phase 2 to phase 3, or phase 3 to phase 1.""",
}, # column
"pdu2GroupVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Units are millivolts.""",
}, # column
"pdu2GroupVoltageThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured group voltage relative to the configured thresholds.""",
}, # column
"pdu2GroupVoltageThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupVoltageThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupVoltageThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupVoltageThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are millivolts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2groupCurrentRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Rated current capacity of the group. Units are milliamps. A negative
value indicates that the hardware current capacity is unknown (it
will always be unknown for custom groups).""",
}, # column
"pdu2GroupCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group current measurement value. Units are milliamps.""",
}, # column
"pdu2GroupCurrentThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured group current relative to the configured thresholds.""",
}, # column
"pdu2GroupCurrentThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupCurrentThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupCurrentThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupCurrentThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.17",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupCurrentCrestFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current crest factor. Units are in milli, for example a crest factor of
1.414 will be returned as 1414. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupCurrentPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current percent load, based on the rated current capacity. Units are
percentage, for example 80% will be returned as 80. A negative
value indicates that this object is not available.""",
}, # column
"pdu2GroupPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group VA value. Units are VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupPowerWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group Watts value. Units are Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupPowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A Watt-Hour value for each section. Units are WH. This value can be reset to 0
using GUI. In that case, the pdu2GroupPowerWattHourTimer will be reset as well.
A negative value indicates that this object is not available.""",
}, # column
"pdu2GroupPowerWattHourTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.23",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""Timestamp of when group Watt-hours (groupWh) was last reset.""",
}, # column
"pdu2GroupPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group PF value. Units are in thousandths, for example a power factor
of 0.958 would be returned as 958, and 0.92 would be returned as 920.
A negative value indicates that this object is not available.""",
}, # column
"pdu2GroupPowerVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group VAR value. Units are VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu2GroupOutletCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.26",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of outlets in each group (breaker).""",
}, # column
"pdu2GroupBreakerStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.3.1.1.27",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"notApplicable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"breakerOn" : {
"nodetype" : "namednumber",
"number" : "2"
},
"breakerOff" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Only applicable to groups with breaker. Indicates whether a breaker is turned
off or on.""",
}, # column
"pdu2Environment" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4",
}, # node
"pdu2EnvProbeTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.1",
"status" : "current",
"description" :
"""A list of environment probe (1 per PDU) for temperature, humidity, and contacts.
The number of entries is given by number of PDUs in daisy chain.""",
}, # table
"pdu2EnvProbeEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.1.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
],
"description" :
"""Aggregate entries equal to number of PDUs.""",
}, # row
"pdu2TemperatureScale" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"celsius" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fahrenheit" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Scale used to return temperature objects.""",
}, # column
"pdu2TemperatureCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Max number of temperature measurements (1 per probe).""",
}, # column
"pdu2HumidityCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Max number of humidity measurements (1 per probe).""",
}, # column
"pdu2ContactCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Max number of contact sensors (1 per probe).""",
}, # column
"pdu2TemperatureTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2",
"status" : "current",
"description" :
"""A list of temperature probe measurements. The number of entries are
given by number of pdu and pdu2TemperatureCount.""",
}, # table
"pdu2TemperatureEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
"pdu2TemperatureIndex",
],
"description" :
"""An entry for a temperature measurement.""",
}, # row
"pdu2TemperatureIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "2"
},
],
"range" : {
"min" : "1",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""A unique value for each temperature probe measurement. Its value
ranges from 1 to temperatureCount.""",
}, # column
"pdu2TemperatureName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the temperature probe.""",
}, # column
"pdu2TemperatureProbeStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disconnected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"connected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"bad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Indicates whether a probe is connected or not.""",
}, # column
"pdu2TemperatureValue" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Units are in tenths of a degree (either Fahrenheit or Celsius).
Divide by ten to get degrees.""",
}, # column
"pdu2TemperatureThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured temperature relative to the configured thresholds.""",
}, # column
"pdu2TemperatureThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are tenths of a degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu2TemperatureThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are tenths of a degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu2TemperatureThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are tenths of a degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu2TemperatureThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are tenths of a degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu2HumidityTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3",
"status" : "current",
"description" :
"""A list of humidity probe measurements. The number of entries are
given by number of pdu and pdu2HumidityCount.""",
}, # table
"pdu2HumidityEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
"pdu2HumidityIndex",
],
"description" :
"""An entry for a humidity measurement.""",
}, # row
"pdu2HumidityIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "2"
},
],
"range" : {
"min" : "1",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""A unique value for each humidity probe measurement. Its value
ranges from 1 to pdu2HumidityCount per pdu.""",
}, # column
"pdu2HumidityName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the humidity probe.""",
}, # column
"pdu2HumidityProbeStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disconnected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"connected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"bad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Indicates whether a probe is connected or not.""",
}, # column
"pdu2HumidityValue" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Units are tenths of a percent relative humidity. Divide the value by 10 to get %RH.""",
}, # column
"pdu2HumidityThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured humidity relative to the configured thresholds.""",
}, # column
"pdu2HumidityThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are 0.1 %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu2HumidityThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are 0.1 %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu2HumidityThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are 0.1 %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu2HumidityThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.3.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are 0.1 %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu2ContactTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.4",
"status" : "current",
"description" :
"""A list of contact sensors. The number of entries are
given by number of pdu and pdu2ContactCount.""",
}, # table
"pdu2ContactEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.4.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
"pdu2ContactIndex",
],
"description" :
"""An entry for a contact sensor""",
}, # row
"pdu2ContactIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "3"
},
],
"range" : {
"min" : "1",
"max" : "3"
},
},
},
"access" : "readonly",
"description" :
"""A unique value for each contact sensor. Its value ranges from 1 to
contactCount.""",
}, # column
"pdu2ContactName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the contact sensor.""",
}, # column
"pdu2ContactProbeStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disconnected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"connected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"bad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Indicates whether a probe is connected or not.
Will not be returned if the contact sensor is internal to the ePDU,
in that case only contactState should be read.""",
}, # column
"pdu2ContactState" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.4.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"contactOpen" : {
"nodetype" : "namednumber",
"number" : "1"
},
"contactClosed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"contactBad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""The state of the contact sensor.""",
}, # column
"pdu2Outlet" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5",
}, # node
"pdu2OutletTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to Number of PDU (pdu2IdentIndex)
and Number of outlet per PDU (pdu2OutletIndex).""",
}, # table
"pdu2OutletEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
"pdu2OutletIndex",
],
"description" :
"""The Outlet table entry containing the type, voltage, current etc.""",
}, # row
"pdu2OutletIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for each outlet, value from 1 to the number of outlets per PDU.""",
}, # column
"pdu2OutletName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the outlet.""",
}, # column
"pdu2OutletType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"iecC13" : {
"nodetype" : "namednumber",
"number" : "1"
},
"iecC19" : {
"nodetype" : "namednumber",
"number" : "2"
},
"uk" : {
"nodetype" : "namednumber",
"number" : "10"
},
"french" : {
"nodetype" : "namednumber",
"number" : "11"
},
"schuko" : {
"nodetype" : "namednumber",
"number" : "12"
},
"nema515" : {
"nodetype" : "namednumber",
"number" : "20"
},
"nema51520" : {
"nodetype" : "namednumber",
"number" : "21"
},
"nema520" : {
"nodetype" : "namednumber",
"number" : "22"
},
"nemaL520" : {
"nodetype" : "namednumber",
"number" : "23"
},
"nemaL530" : {
"nodetype" : "namednumber",
"number" : "24"
},
"nema615" : {
"nodetype" : "namednumber",
"number" : "25"
},
"nema620" : {
"nodetype" : "namednumber",
"number" : "26"
},
"nemaL620" : {
"nodetype" : "namednumber",
"number" : "27"
},
"nemaL630" : {
"nodetype" : "namednumber",
"number" : "28"
},
"nemaL715" : {
"nodetype" : "namednumber",
"number" : "29"
},
"rf203p277" : {
"nodetype" : "namednumber",
"number" : "30"
},
},
},
"access" : "readonly",
"description" :
"""Type of this outlet - C13, C19.""",
}, # column
"pdu2OutletCurrentRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Rated current capacity of this outlet. Units are milliamps. A negative
value indicates that the hardware current capacity is unknown.""",
}, # column
"pdu2OutletCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A outlet current measurement value. Units are milliamps.""",
}, # column
"pdu2OutletCurrentThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured outlet current relative to the configured thresholds.""",
}, # column
"pdu2OutletCurrentThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletCurrentThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletCurrentThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletCurrentThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are milliamps. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletCurrentCrestFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current crest factor. Units are in milli, for example a crest factor of
1.414 will be returned as 1414. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletCurrentPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current percent load, based on the rated current capacity. Units are
percentage, for example 80% will be returned as 80. A negative
value indicates that this object is not available.""",
}, # column
"pdu2OutletVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A outlet VA value. Units are VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A outlet Watts value. Units are Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletWh" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A Watt-Hour value for each outlet. Units are WH.
This object is writable so that it can be reset to 0. When it is
written to, the pdu2OutletWhTimer will be reset updated as well
A negative value indicates that this object is not available.""",
}, # column
"pdu2OutletWhTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""Timestamp (date and time) of outlet Watt-hours was last reset.""",
}, # column
"pdu2OutletPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An outlet PF value. Units are in thousandths, for example a power factor
of 0.958 would be returned as 958, and 0.92 would be returned
as 920. A negative value indicates that this object is not available.""",
}, # column
"pdu2OutletVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.1.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An outlet VAR value. Units are VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu2OutletControlTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to Number of PDU (pdu2IdentIndex)
and Number of outlets per PDU (pdu2OutletIndex).""",
}, # table
"pdu2OutletControlEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1",
"status" : "current",
"linkage" : [
"pdu2IdentIndex",
"pdu2OutletIndex",
],
"description" :
"""The Outlet table entry containing the type, voltage, current etc.""",
}, # row
"pdu2OutletControlStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "1"
},
"on" : {
"nodetype" : "namednumber",
"number" : "2"
},
"pendingOff" : {
"nodetype" : "namednumber",
"number" : "3"
},
"pendingOn" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""current state of a control outlet.""",
}, # column
"pdu2OutletControlOffCmd" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""When write, once issued, the outlet will turn Off immediately.
0-n: Time in seconds until the outlet command is issued
-1: Cancel a pending outlet Off command
When read, returns -1 if no command is pending, or the current downcount in
seconds of a pending command.""",
}, # column
"pdu2OutletControlOnCmd" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""When write, once issued, the outlet will turn On immediately.
0-n: Time in seconds until the outlet command is issued
-1: Cancel a pending outlet On command
When read, returns -1 if no command is pending, or the current downcount in
seconds of a pending command.""",
}, # column
"pdu2OutletControlRebootCmd" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""When write, for outlets that are On prior to this Reboot command, they will
switch Off immediately when the command is issued, remain Off for
outletControlRebootOffTime seconds, and then turn back On.
For outlets that are Off prior to the Reboot command, they will turn On after
a delay of outletControlRebootOffTime seconds from when the command is issued.
0-n : Time in seconds until the Reboot command is issued
-1 : Cancel a pending outlet Reboot command
When read, returns -1 if no command is pending, or the current downcount in
seconds of a pending command.""",
}, # column
"pdu2OutletControlPowerOnState" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "1"
},
"on" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lastState" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Determines the outlet state when power is applied to the unit.
1 : not restart at device startup
2 : should sequence back ON in line with outletControlSequenceTime
3 : should take the state the outlet had when power was lost.
If the state was ON, should sequence back ON in line with outletControlSequenceTime.""",
}, # column
"pdu2OutletControlSequenceDelay" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""Time delay in seconds from when a Global Sequence On command is issued to
when the command is executed on this outlet. This delay is also used as a power-on
delay. Set to -1 to exclude this outlet from Global Sequence On commands.""",
}, # column
"pdu2OutletControlRebootOffTime" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""Time delay in seconds that the outlet should remain in the Off state when executing a Reboot command.""",
}, # column
"pdu2OutletControlSwitchable" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"switchable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"notSwitchable" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""Determines the outlet capability to be controlled On/Off from the communication channels.
1 : control On/Off enabled
2 : control On/Off disabled.""",
}, # column
"pdu2OutletControlShutoffDelay" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.7.5.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""Time delay in seconds that could be taken in account before shutting of the outlet.
An application which need to shutoff properly an outlet will read this parameter first
then write it to the command pdu2OutletControlOffCmd.""",
}, # column
"hpdu" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9",
}, # node
"hpduIdent" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1",
}, # node
"hpduNumPhase" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""for 3 phases""",
}, # scalar
"hpduIdentTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2",
"status" : "current",
"description" :
"""Indexed by 1 Hpdu unit.""",
}, # table
"hpduIdentEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1",
"status" : "current",
"linkage" : [
"hpduIdentIndex",
],
"description" :
"""The table entry containing the identifications of HPDU""",
}, # row
"hpduIdentIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the MpduIdentEntry table. Start with 1""",
}, # column
"hpduManufacturer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
""" The Core PDU manufacturer. StringLength=[21]""",
}, # column
"hpduModel" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU model, marketing name. StringLength=[40].""",
}, # column
"hpduName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU name, descriptive name or hostname. StringLength=[16].""",
}, # column
"hpduFirmwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU firmware version(s). StringLength=[12].""",
}, # column
"hpduHardwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU hardware version. StringLength=[8].""",
}, # column
"hpduPartNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The Core PDU part number. StringLength=[21].""",
}, # column
"hpduSerialNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU external serial number. StringLength=[21].""",
}, # column
"hpduUUID" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's UUID equals PN add SN, canonical conversion. StringLength=[37].""",
}, # column
"hpduType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Type: Delta, Wye, single, Wye+lowline.
StringLength=[24].""",
}, # column
"hpduPowerRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Power Rating in KVA (Name Plate). Value is presented
in 1/100 of the KVA. i.e. 1750 means 17.50 KVA""",
}, # column
"hpduInputRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU's Input Current Rating in A (Name Plate)""",
}, # column
"hpduRegionalNominalVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""value is either America(208V), Japan(202V) or blank for International Model""",
}, # column
"hpduNumOutputBreakers" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "10"
},
],
"range" : {
"min" : "0",
"max" : "10"
},
},
},
"access" : "readonly",
"description" :
"""The number of outlets with breakers of each hpdu.""",
}, # column
"hpduNumMonitoredOutlet" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "10"
},
],
"range" : {
"min" : "0",
"max" : "10"
},
},
},
"access" : "readonly",
"description" :
"""The number of measured outlets (with CT, VT) of each hpdu.""",
}, # column
"hpduFanStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.1.2.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Report fan status: OK or FAIL""",
}, # column
"hpduInput" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2",
}, # node
"hpduInputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1",
"status" : "current",
"description" :
"""Indexed by hpduNumPhase.""",
}, # table
"hpduInputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1",
"status" : "current",
"linkage" : [
"hpduInputIndex",
],
"description" :
"""The Input table entry containing the voltage, current, PF,
percent load, VA, Watts for the HPDU.""",
}, # row
"hpduInputIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the hpduInputEntry table.""",
}, # column
"hpduInputStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The overall status of the Input: off, on, problem. StringLength=[7].""",
}, # column
"hpduInputBreakerRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The rating of each hpdu Input in Ampere.""",
}, # column
"hpduInputVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The measured Input voltage from the PDU meters in volts. Value is
presented in 1/10 of the Volt i.e. 2095 is 209.5 V. A value
of -1 will be returned if the voltage is unable to be measured.""",
}, # column
"hpduInputCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The measured Input current from the PDU meters in amps. Value is
presented in 1/100 of the Ampere i.e. 175 is 1.75 A. A value
of -1 will be returned if the current is unable to be measured.""",
}, # column
"hpduInputPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total Input Power measured in VA. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"hpduInputPowerWatt" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total Input power measured in Watts. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"hpduInputPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The Input power factor is presented in 1/100 of the unit i.e.
98 is 0.98. A value of -1 will be returned if the power factor is unable
to be measured.""",
}, # column
"hpduInputWarningThreshold" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Report the User-setting Input threshhold limit for Warning in percentage""",
}, # column
"hpduInputCriticalThreshold" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Report the User-setting Input threshhold limit for Critical in percentage""",
}, # column
"hpduInputPowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Support Billing Grade Energy Metering in Watt-Hour. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"hpduInputTotalEnergySince" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Display the date and time that starts to accumulate the per Load segment energy in WH.
Date and Time can be set by user on GUI.""",
}, # column
"hpduInputEnergyMeteringTotalHours" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.2.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total hours of collecting measurement data. A value
of -1 will be returned if the value is unable to be calculated.""",
}, # column
"hpduOutlet" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3",
}, # node
"hpduOutletTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1",
"status" : "current",
"description" :
"""Indexed by hpduNumMonitoredOutlet and hpduNumPhase . So aggregate objects for each entry
equal to (hpduNumMonitoredOutlet x hpduNumPhase). Start with 1.""",
}, # table
"hpduOutletEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1",
"status" : "current",
"linkage" : [
"hpduNumMonitoredOutlet",
"hpduNumPhase",
"hpduOutletIndex",
],
"description" :
"""The Outlet table entry containing the voltage, current, PF,
percent load, VA, Watts for the Modular PDU.""",
}, # row
"hpduOutletIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the hpduOutletEntry table.""",
}, # column
"hpduOutletStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The overall status of the breaker: off, on, problem. StringLength=[7].""",
}, # column
"hpduOutletBreakerRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The rating of each hpdu Outlet in Ampere.""",
}, # column
"hpduOutletPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The hpdu's each Outlet load in percent of rated capacity. Value is
presented in 1/10 of the percentage i.e. 125 is 12.5%. A value
of -1 will be returned if the current is unable to be measured.""",
}, # column
"hpduOutletVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The measured Outlet voltage from the PDU meters in volts. Value is
presented in 1/10 of the Volt i.e. 2095 is 209.5 V. A value
of -1 will be returned if the voltage is unable to be measured.""",
}, # column
"hpduOutletCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The measured Outlet current from the PDU meters in amps. Value is
presented in 1/100 of the Ampere i.e. 175 is 1.75 A. A value
of -1 will be returned if the current is unable to be measured.""",
}, # column
"hpduOutletPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total VA load (VI) measured on the PDU in VA. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"hpduOutletPowerWatt" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The total power load measured on the PDU in Watts. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"hpduOutletPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The power factor is presented in 1/100 of the unit i.e.
98 is 0.98. A value of -1 will be returned if the power factor is unable
to be measured.""",
}, # column
"hpduOutletWarningThreshold" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Report the User-set threshhold limit for Warning in percentage""",
}, # column
"hpduOutletCriticalThreshold" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Report the User-set threshhold limit for Critical in percentage""",
}, # column
"hpduOutletPowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Support Billing Grade Energy Metering in Watt-Hour. A value
of -1 will be returned if the power is unable to be calculated.""",
}, # column
"hpduOutletTotalEnergySince" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Display the date and time that starts to accumulate the per Load segment energy in WH.
Date and Time can be set by user on GUI.""",
}, # column
"hpduOutletEnergyMeteringTotalHours" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.9.3.1.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total hours of collecting measurement data. A value
of -1 will be returned if the value is unable to be calculated.""",
}, # column
"pdu3" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11",
}, # node
"pdu3Ident" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1",
}, # node
"pdu3NumberPDU" : {
"nodetype" : "scalar",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "2"
},
],
"range" : {
"min" : "1",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""The number of PDUs detected (in its daisy chain).""",
}, # scalar
"pdu3IdentTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to pdu3NumberPDU .""",
}, # table
"pdu3IdentEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
],
"description" :
"""The ident table entry containing the name, model, manufacturer,
firmware version, part number, etc.""",
}, # row
"pdu3IdentIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the Pdu3IdentEntry table.""",
}, # column
"pdu3Name" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The string identify the device in daisy chain. Example PDU A.""",
}, # column
"pdu3Model" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Model.""",
}, # column
"pdu3Manufacturer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The Device Manufacturer Name (e.g. Hewlett-Packard).""",
}, # column
"pdu3FirmwareVersion" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The firmware revision level of the device.""",
}, # column
"pdu3FirmwareVersionTimeStamp" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Timestamp of when the PDU firmware was last updated""",
}, # column
"pdu3PartNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The device part number. Example AF400A.""",
}, # column
"pdu3SerialNumber" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The device serial number, 10- or 16-digit.""",
}, # column
"pdu3Status" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"other" : {
"nodetype" : "namednumber",
"number" : "1"
},
"ok" : {
"nodetype" : "namednumber",
"number" : "2"
},
"degraded" : {
"nodetype" : "namednumber",
"number" : "3"
},
"failed" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""The overall status of the device. A value of OK(2) indicates the device is operating normally.
A value of degraded(3) indicates the device is operating with warning indicators. A value of
failed(4) indicates the device is operating with critical indicators.""",
}, # column
"pdu3Controllable" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"yes" : {
"nodetype" : "namednumber",
"number" : "1"
},
"no" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""This object indicates whether or not the device is controllable.""",
}, # column
"pdu3InputPhaseCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of input phase on this pdu 1,2,3.""",
}, # column
"pdu3GroupCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of groups (breakers) on this pdu.""",
}, # column
"pdu3OutletCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of outlets in this PDU.""",
}, # column
"pdu3MACAddress" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The assigned MAC address for this PDU.""",
}, # column
"pdu3IPv4Address" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The current IPv4 Address.
A value of 0.0.0.0 indicates an error or an unset option.""",
}, # column
"pdu3IPv6Address" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.2.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The current IPv6 Address.
A vale of 0.0.0.0.0.0 indicates an error or an unset option.""",
}, # column
"pdu3ConfigTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to pdu3NumberPDU .""",
}, # table
"pdu3ConfigEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1",
"status" : "current",
"linkage" : [
"pdu3ConfigIndex",
],
"description" :
"""Read Configuration data from the PDU being queried.""",
}, # row
"pdu3ConfigIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "20"
},
],
"range" : {
"min" : "0",
"max" : "20"
},
},
},
"access" : "readonly",
"description" :
"""The index of PDU configuration entry.""",
}, # column
"pdu3ConfigSsh" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readonly",
"description" :
"""Setting this OID to off(0) will disable the SSH conneciton.
Setting this OID to on(1) will enable the SSH connection.""",
}, # column
"pdu3ConfigFtps" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readonly",
"description" :
"""Setting this OID to off(0) will disable the FTPS connection.
Setting this OID to on(1) will enable the FTPS connection.""",
}, # column
"pdu3ConfigHttp" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readonly",
"description" :
"""Setting this OID to off(0) will disable the HTTP connection.
Setting this OID to on(1) will enable the HTTP connection.""",
}, # column
"pdu3ConfigHttps" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readonly",
"description" :
"""Setting this OID to off(0) will disable the HTTPS connection.
Setting this OID to on(1) will enable the HTTPS connection.""",
}, # column
"pdu3ConfigIPv4IPv6Switch" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"iPv4" : {
"nodetype" : "namednumber",
"number" : "1"
},
"iPv6" : {
"nodetype" : "namednumber",
"number" : "2"
},
"iPv4IPv6" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Setting this OID to IPv4(1) will enable the IPv4 configuration.
Setting this OID to IPv6(2) will enable the IPv6 configuration.
Setting this OID to IPv4&IPv6(3) will enable both IPv4 and IPv6 configuration.""",
}, # column
"pdu3ConfigRedfishAPI" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this OID to off(0) will disable the Redfish API.
Setting this OID to on(1) will enable the Redfish API.""",
}, # column
"pdu3ConfigOledDispalyOrientation" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"displayNormal" : {
"nodetype" : "namednumber",
"number" : "1"
},
"displayReverse" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""Indicates the intended physical orientation of OLED display.
displayNormal(1) indicates normal orientation.
displayReverse(2) indicates upside down orientation.""",
}, # column
"pdu3ConfigEnergyReset" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"noOperation" : {
"nodetype" : "namednumber",
"number" : "1"
},
"reset" : {
"nodetype" : "namednumber",
"number" : "2"
},
"notSupported" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this OID to reset(2) will cause the device energy meter value
to be reset to zero. Getting this OID in models that support this feature
will do nothing and return the noOperation(1) value. Models that do not
supported this feature will respond to this OID with a value of notSupported(3).
Attempts to set this OID in these models will fail.""",
}, # column
"pdu3ConfigNetworkManagementCardReset" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"noOperation" : {
"nodetype" : "namednumber",
"number" : "0"
},
"reset" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Getting this OID will return noOperation(0).
Getting this OID to reset(1) will reset NMC.""",
}, # column
"pdu3ConfigDaisyChainStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.1.3.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"daisychain" : {
"nodetype" : "namednumber",
"number" : "0"
},
},
},
"access" : "readwrite",
"description" :
"""Setting this OID to daisychain(0) will enable daisychain mode.""",
}, # column
"pdu3Input" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2",
}, # node
"pdu3InputTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to pdu3NumberPDU .""",
}, # table
"pdu3InputEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
],
"description" :
"""The ident table entry containing the VA, Watts, WH, etc.""",
}, # row
"pdu3InputType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"splitPhase" : {
"nodetype" : "namednumber",
"number" : "2"
},
"threePhaseDelta" : {
"nodetype" : "namednumber",
"number" : "3"
},
"threePhaseWye" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""Type of input - single phase, split phase, three phase delta, or three
phase wye.""",
}, # column
"pdu3InputFrequency" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readonly",
"description" :
"""The measured input frequency from the PDU meters in tenths of Hz.""",
}, # column
"pdu3InputFrequencyStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"outOfRange" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured input frequency relative to the nominal frequency and the admitted tolerance.""",
}, # column
"pdu3InputPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A total input VA of all phases. Units are 0.001 VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPowerWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A total input Watts of all phases. Units are 0.001 Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputTotalEnergy" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A total input Watt-Hour value for all phases. Units are KWh. This value is accumulated since PDU in service.
A negative value indicates that this object is not available.""",
}, # column
"pdu3InputPowerWattHourTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""A Timestamp of when the Total Input WH was last reset.""",
}, # column
"pdu3InputResettableEnergy" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A total input Watt-Hour value for all phases. Units are 0.001 Wh. This value can be reset to 0
using GUI. In that case, the pdu3InputPowerWattHourTimer will be reset as well
A negative value indicates that this object is not available.""",
}, # column
"pdu3InputPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input PF value. Units are in thousandths, for example a power factor
of 0.958 would be returned as 958, and 0.92 would be returned as 920.
A negative value indicates that this object is not available.""",
}, # column
"pdu3InputPowerVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input VAR value. Units are 0.001 VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to number of PDUs (pdu3NumberPDU) and
number of input phase (pdu3InputPhaseCount).""",
}, # table
"pdu3InputPhaseEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
"pdu3InputPhaseIndex",
],
"description" :
"""The input table entry containing the voltage, current, frequency, power for each phase.
Entries are given with number of pdu and number of input phase 1, 2, or 3.""",
}, # row
"pdu3InputPhaseIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the Pdu3InputEntry table.""",
}, # column
"pdu3InputPhaseVoltageMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"phase1toN" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase2toN" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase3toN" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase1to2" : {
"nodetype" : "namednumber",
"number" : "5"
},
"phase2to3" : {
"nodetype" : "namednumber",
"number" : "6"
},
"phase3to1" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readonly",
"description" :
"""Value indicates what input voltage is being measured in this table row - single phase
voltage, phase 1 to neutral, phase 2 to neutral, phase 3 to neutral, phase 1 to phase 2,
phase 2 to phase 3, or phase 3 to phase 1.""",
}, # column
"pdu3InputPhaseVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input voltage measurement value. Return value is divided by 10.""",
}, # column
"pdu3InputPhaseVoltageThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured input voltage relative to the configured thresholds.""",
}, # column
"pdu3InputPhaseVoltageThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Return value is divided by 10. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseVoltageThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Return value is divided by 10. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseVoltageThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Return value is divided by 10. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseVoltageThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Return value is divided by 10. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseCurrentMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"neutral" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase1" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase2" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase3" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Which input wire is being measured in this table row - single phase, neutral, phase 1,
phase 2, or phase 3.""",
}, # column
"pdu3InputPhaseCurrentRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Rated current capacity of the input. A negative value indicates that
the hardware current capacity is unknown. Return value is divided by 100.""",
}, # column
"pdu3InputPhaseCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input current measurement value. Return value is divided by 100.""",
}, # column
"pdu3InputPhaseCurrentThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured input current relative to the configured thresholds.""",
}, # column
"pdu3InputPhaseCurrentThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Return value is divided by 100. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseCurrentThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Return value is divided by 100. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseCurrentThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Return value is divided by 100. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseCurrentThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Return value is divided by 100. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhaseCurrentPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current percent load, based on the rated current capacity. Units are
percentage, for example 80.9 % will be returned as 809. A negative
value indicates that this object is not available.""",
}, # column
"pdu3InputPhasePowerMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.18",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"neutral" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase1" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase2" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase3" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Which input wire is being measured in this table row - single phase, neutral, phase 1,
phase 2, or phase 3.""",
}, # column
"pdu3InputPhasePowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input VA value. Units are VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhasePowerWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input Watts value. Units are Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu3InputPhasePowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A kWatt-Hour value for each Input phase. Units are kWh. This value can be reset to 0
using GUI. In that case, the pdu3InputPhasePowerWattHourTimer will be reset as well
A negative value indicates that this object is not available.""",
}, # column
"pdu3InputPhasePowerWattHourTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.22",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""Timestamp of when input phase kWatt-hours (kWh) was last reset.""",
}, # column
"pdu3InputPhasePowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input PF value. Return value is divided by 100, for example a power factor
of 0.95 would be returned as 95. A negative value indicates that this object is not available.""",
}, # column
"pdu3InputPhasePowerVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.2.2.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An input VA Reactive value. Units are in VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu3Group" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3",
}, # node
"pdu3GroupTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to number of PDUs
and number of breakers (pdu3GroupCount).""",
}, # table
"pdu3GroupEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
"pdu3GroupIndex",
],
"description" :
"""The input table entry containing the name, voltages, currents, power, etc.""",
}, # row
"pdu3GroupIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for the Pdu3GroupEntry table.""",
}, # column
"pdu3GroupName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the group.""",
}, # column
"pdu3GroupType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"breaker1pole" : {
"nodetype" : "namednumber",
"number" : "2"
},
"breaker2pole" : {
"nodetype" : "namednumber",
"number" : "3"
},
"breaker3pole" : {
"nodetype" : "namednumber",
"number" : "4"
},
"outletSection" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""The type of the group. (5) has no breaker""",
}, # column
"pdu3GroupVoltageMeasType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"phase1toN" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phase2toN" : {
"nodetype" : "namednumber",
"number" : "3"
},
"phase3toN" : {
"nodetype" : "namednumber",
"number" : "4"
},
"phase1to2" : {
"nodetype" : "namednumber",
"number" : "5"
},
"phase2to3" : {
"nodetype" : "namednumber",
"number" : "6"
},
"phase3to1" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readonly",
"description" :
"""Value indicates what input voltage is being measured in this table row - single phase
voltage, phase 1 to neutral, phase 2 to neutral, phase 3 to neutral, phase 1 to phase 2,
phase 2 to phase 3, or phase 3 to phase 1.""",
}, # column
"pdu3GroupVoltage" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Return value is divided by 10 to get one decimal point, unit is in Volt.""",
}, # column
"pdu3GroupVoltageThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured group voltage relative to the configured thresholds.""",
}, # column
"pdu3GroupVoltageThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Return value is divided by 10 to get one decimal point,
unit is in Volt. A negative value indicates that this object is not available.""",
}, # column
"pdu3GroupVoltageThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Return value is divided by 10 to get one decimal point,
unit is in Volt. A negative value indicates that this object is not available.""",
}, # column
"pdu3GroupVoltageThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Return value is divided by 10 to get one decimal point,
unit is in Volt. A negative value indicates that this object is not available.""",
}, # column
"pdu3GroupVoltageThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "500000"
},
],
"range" : {
"min" : "-1",
"max" : "500000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Return value is divided by 10 to get one decimal point,
unit is in Volt. A negative value indicates that this object is not available.""",
}, # column
"pdu3groupCurrentRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Rated current capacity of the group. Return value is divided by 100 to get two decimal points,
unit is in Amps. A negative value indicates that the hardware current capacity is unknown (it
will always be unknown for custom groups).""",
}, # column
"pdu3GroupCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group current measurement value. Return value is divided by 100 to get two decimal points.""",
}, # column
"pdu3GroupCurrentThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured group current relative to the configured thresholds.""",
}, # column
"pdu3GroupCurrentThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Return value is divided by 100 to get two decimal points. A negative value indicates
that this object is not available.""",
}, # column
"pdu3GroupCurrentThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Return value is divided by 100 to get two decimal points. A negative value indicates
that this object is not available.""",
}, # column
"pdu3GroupCurrentThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Return value is divided by 100 to get two decimal points. A negative value indicates
that this object is not available.""",
}, # column
"pdu3GroupCurrentThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.17",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "100000"
},
],
"range" : {
"min" : "-1",
"max" : "100000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Return value is divided by 100 to get two decimal points. A negative value indicates
that this object is not available.""",
}, # column
"pdu3GroupCurrentPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current percent load, based on the rated current capacity. Units are
percentage, for example 80.9 % will be returned as 809. A negative
value indicates that this object is not available.""",
}, # column
"pdu3GroupPowerVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group VA value. Units are VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu3GroupPowerWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group power value. Units are Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu3GroupPowerWattHour" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An energy consumption value for each load segment. Units are kWh. This value can be reset to 0
using GUI. In that case, the pdu3GroupPowerWattHourTimer will be reset as well.
A negative value indicates that this object is not available.""",
}, # column
"pdu3GroupPowerWattHourTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.22",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""Timestamp when group kilo Watt-hours (group kWh) is reset.""",
}, # column
"pdu3GroupPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A group PF value. Return value is divided by 100, for example a power factor of 0.95 would be returned as 95.
A negative value indicates that this object is not available.""",
}, # column
"pdu3GroupPowerVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A measurement for group/load segment power reactive. Units are VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu3GroupOutletCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.25",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "63"
},
],
"range" : {
"min" : "0",
"max" : "63"
},
},
},
"access" : "readonly",
"description" :
"""The number of outlets in each group (breaker).""",
}, # column
"pdu3GroupBreakerStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.3.1.1.26",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"notApplicable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"breakerOn" : {
"nodetype" : "namednumber",
"number" : "2"
},
"breakerOff" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Only applicable to groups with breaker. Indicates whether a breaker is turned
off or on.""",
}, # column
"pdu3Environment" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4",
}, # node
"pdu3EnvProbeTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.1",
"status" : "current",
"description" :
"""A list of environment probe (1 per PDU) for temperature, humidity, and contacts.
The number of entries is given by number of PDUs in daisy chain.""",
}, # table
"pdu3EnvProbeEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.1.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
],
"description" :
"""Aggregate entries equal to number of PDUs.""",
}, # row
"pdu3TemperatureScale" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"celsius" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fahrenheit" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Scale used to return temperature objects.""",
}, # column
"pdu3TemperatureCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Max number of temperature measurements (1 per probe).""",
}, # column
"pdu3HumidityCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Max number of humidity measurements (1 per probe).""",
}, # column
"pdu3ContactCount" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Max number of contact sensors (1 per probe).""",
}, # column
"pdu3TemperatureTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2",
"status" : "current",
"description" :
"""A list of temperature probe measurements. The number of entries are
given by number of pdu and pdu3TemperatureCount.""",
}, # table
"pdu3TemperatureEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
"pdu3TemperatureIndex",
],
"description" :
"""An entry for a temperature measurement.""",
}, # row
"pdu3TemperatureIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "2"
},
],
"range" : {
"min" : "1",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""A unique value for each temperature probe measurement. Its value
ranges from 1 to temperatureCount.""",
}, # column
"pdu3TemperatureName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the temperature probe.""",
}, # column
"pdu3TemperatureProbeStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disconnected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"connected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"bad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Indicates whether a probe is connected or not.""",
}, # column
"pdu3TemperatureValue" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Units are in tenths of a degree (either Fahrenheit or Celsius).
Return value is divided by 10 to get degrees.""",
}, # column
"pdu3TemperatureThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured temperature relative to the configured thresholds.""",
}, # column
"pdu3TemperatureThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Return value is divided by 10 to get degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu3TemperatureThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Return value is divided by 10 to get degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu3TemperatureThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Return value is divided by 10 to get degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu3TemperatureThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "150000"
},
],
"range" : {
"min" : "-1",
"max" : "150000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Return value is divided by 10 to get degree. A negative value
indicates that this object is not available.""",
}, # column
"pdu3HumidityTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3",
"status" : "current",
"description" :
"""A list of humidity probe measurements. The number of entries are
given by number of pdu and pdu3HumidityCount.""",
}, # table
"pdu3HumidityEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
"pdu3HumidityIndex",
],
"description" :
"""An entry for a humidity measurement.""",
}, # row
"pdu3HumidityIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "2"
},
],
"range" : {
"min" : "1",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""A unique value for each humidity probe measurement. Its value
ranges from 1 to pdu3HumidityCount per pdu.""",
}, # column
"pdu3HumidityName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the humidity probe.""",
}, # column
"pdu3HumidityProbeStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disconnected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"connected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"bad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Indicates whether a probe is connected or not.""",
}, # column
"pdu3HumidityValue" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Units are tenths of a percent relative humidity. Return value is divided by 10 to get %RH.""",
}, # column
"pdu3HumidityThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured humidity relative to the configured thresholds.""",
}, # column
"pdu3HumidityThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Return value is divided by 10 to get %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu3HumidityThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Return value is divided by 10 to get %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu3HumidityThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Return value is divided by 10 to get %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu3HumidityThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.3.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "1000"
},
],
"range" : {
"min" : "-1",
"max" : "1000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Return value is divided by 10 to get %RH. A negative value
indicates that this object is not available.""",
}, # column
"pdu3ContactTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.4",
"status" : "current",
"description" :
"""A list of contact sensors. The number of entries are
given by number of pdu and pdu3ContactCount.""",
}, # table
"pdu3ContactEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.4.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
"pdu3ContactIndex",
],
"description" :
"""An entry for a contact sensor""",
}, # row
"pdu3ContactIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "3"
},
],
"range" : {
"min" : "1",
"max" : "3"
},
},
},
"access" : "readonly",
"description" :
"""A unique value for each contact sensor. Its value ranges from 1 to
contactCount.""",
}, # column
"pdu3ContactName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the contact sensor.""",
}, # column
"pdu3ContactProbeStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disconnected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"connected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"bad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Indicates whether a probe is connected or not.
Will not be returned if the contact sensor is internal to the ePDU,
in that case only contactState should be read.""",
}, # column
"pdu3ContactState" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.4.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"contactOpen" : {
"nodetype" : "namednumber",
"number" : "1"
},
"contactClosed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"contactBad" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""The state of the contact sensor.""",
}, # column
"pdu3Outlet" : {
"nodetype" : "node",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5",
}, # node
"pdu3OutletTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to Number of PDU (pdu3IdentIndex)
and Number of outlet per PDU (pdu3OutletIndex).""",
}, # table
"pdu3OutletEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
"pdu3OutletIndex",
],
"description" :
"""The Outlet table entry containing the type, voltage, current etc.""",
}, # row
"pdu3OutletIndex" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Index for each outlet, value from 1 to the number of outlets per PDU.""",
}, # column
"pdu3OutletName" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "1",
"max" : "31"
},
],
"range" : {
"min" : "1",
"max" : "31"
},
},
},
"access" : "readonly",
"description" :
"""A descriptive name for the outlet.""",
}, # column
"pdu3OutletType" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"iecC13" : {
"nodetype" : "namednumber",
"number" : "1"
},
"iecC19" : {
"nodetype" : "namednumber",
"number" : "2"
},
"uk" : {
"nodetype" : "namednumber",
"number" : "10"
},
"french" : {
"nodetype" : "namednumber",
"number" : "11"
},
"schuko" : {
"nodetype" : "namednumber",
"number" : "12"
},
"nema515" : {
"nodetype" : "namednumber",
"number" : "20"
},
"nema51520" : {
"nodetype" : "namednumber",
"number" : "21"
},
"nema520" : {
"nodetype" : "namednumber",
"number" : "22"
},
"nemaL520" : {
"nodetype" : "namednumber",
"number" : "23"
},
"nemaL530" : {
"nodetype" : "namednumber",
"number" : "24"
},
"nema615" : {
"nodetype" : "namednumber",
"number" : "25"
},
"nema620" : {
"nodetype" : "namednumber",
"number" : "26"
},
"nemaL620" : {
"nodetype" : "namednumber",
"number" : "27"
},
"nemaL630" : {
"nodetype" : "namednumber",
"number" : "28"
},
"nemaL715" : {
"nodetype" : "namednumber",
"number" : "29"
},
"rf203p277" : {
"nodetype" : "namednumber",
"number" : "30"
},
},
},
"access" : "readonly",
"description" :
"""Type of this outlet - C13, C19.""",
}, # column
"pdu3OutletCurrentRating" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Rated current capacity of this outlet. Return value is divided by 100 to get amps. A negative
value indicates that the hardware current capacity is unknown.""",
}, # column
"pdu3OutletCurrent" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A outlet current measurement value. Return value is divided by 100 to get amps.""",
}, # column
"pdu3OutletActivePowerThStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"good" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lowWarning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lowCritical" : {
"nodetype" : "namednumber",
"number" : "3"
},
"highWarning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"highCritical" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readonly",
"description" :
"""Status of the measured outlet active power relative to the configured thresholds.""",
}, # column
"pdu3OutletActivePowerThLowerWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "10000"
},
],
"range" : {
"min" : "-1",
"max" : "10000"
},
},
},
"access" : "readonly",
"description" :
"""Lower warning threshold. Units are Watts(W). A negative value indicates
that this object is not available.""",
}, # column
"pdu3OutletActivePowerThLowerCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "10000"
},
],
"range" : {
"min" : "-1",
"max" : "10000"
},
},
},
"access" : "readonly",
"description" :
"""Lower critical threshold. Units are Watts(W). A negative value indicates
that this object is not available.""",
}, # column
"pdu3OutletActivePowerThUpperWarning" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "10000"
},
],
"range" : {
"min" : "-1",
"max" : "10000"
},
},
},
"access" : "readonly",
"description" :
"""Upper warning threshold. Units are Watts(W). A negative value indicates
that this object is not available.""",
}, # column
"pdu3OutletActivePowerThUpperCritical" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "10000"
},
],
"range" : {
"min" : "-1",
"max" : "10000"
},
},
},
"access" : "readonly",
"description" :
"""Upper critical threshold. Units are Watts(W). A negative value indicates
that this object is not available.""",
}, # column
"pdu3OutletCurrentPercentLoad" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current percent load, based on the rated current capacity. Units are
percentage, for example 80.5% will be returned as 805. A negative
value indicates that this object is not available.""",
}, # column
"pdu3OutletVA" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A outlet power VA value. Units are VA. A negative value indicates
that this object is not available.""",
}, # column
"pdu3OutletWatts" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""A outlet power in Watts value. Units are Watts. A negative value indicates
that this object is not available.""",
}, # column
"pdu3OutletWh" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An energy in kWatt-Hour value for each outlet. Units are kWh.
This object is writable so that it can be reset to 0. When it is
written to, the pdu3OutletWhTimer will be reset updated as well
A negative value indicates that this object is not available.""",
}, # column
"pdu3OutletWhTimer" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "22"
},
],
"range" : {
"min" : "0",
"max" : "22"
},
},
},
"access" : "readonly",
"description" :
"""Timestamp (date and time) of outlet energy in kWh is reset.""",
}, # column
"pdu3OutletPowerFactor" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An outlet PF value. Return value is divided by 100, for example a power factor
of 0.95 would be returned as 95. A negative value indicates that this object is not available.""",
}, # column
"pdu3OutletVAR" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.1.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""An outlet power reactive in VAR value. Units are VAR. A negative value indicates
that this object is not available.""",
}, # column
"pdu3OutletControlTable" : {
"nodetype" : "table",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2",
"status" : "current",
"description" :
"""Aggregate Object with number of entries equal to Number of PDU (pdu3IdentIndex)
and Number of outlets per PDU (pdu3OutletIndex).""",
}, # table
"pdu3OutletControlEntry" : {
"nodetype" : "row",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1",
"status" : "current",
"linkage" : [
"pdu3IdentIndex",
"pdu3OutletIndex",
],
"description" :
"""The Outlet table entry containing the type, voltage, current etc.""",
}, # row
"pdu3OutletControlStatus" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "1"
},
"on" : {
"nodetype" : "namednumber",
"number" : "2"
},
"pendingOff" : {
"nodetype" : "namednumber",
"number" : "3"
},
"pendingOn" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""current state of a control outlet.""",
}, # column
"pdu3OutletControlOffCmd" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""When write, once issued, the outlet will turn Off immediately.
0-n: Time in seconds until the outlet command is issued
-1: Cancel a pending outlet Off command
When read, returns -1 if no command is pending, or the current downcount in
seconds of a pending command.""",
}, # column
"pdu3OutletControlOnCmd" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""When write, once issued, the outlet will turn On immediately.
0-n: Time in seconds until the outlet command is issued
-1: Cancel a pending outlet On command
When read, returns -1 if no command is pending, or the current downcount in
seconds of a pending command.""",
}, # column
"pdu3OutletControlRebootCmd" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""When write, for outlets that are On prior to this Reboot command, they will
switch Off immediately when the command is issued, remain Off for
outletControlRebootOffTime seconds, and then turn back On.
For outlets that are Off prior to the Reboot command, they will turn On after
a delay of outletControlRebootOffTime seconds from when the command is issued.
0-n : Time in seconds until the Reboot command is issued
-1 : Cancel a pending outlet Reboot command
When read, returns -1 if no command is pending, or the current downcount in
seconds of a pending command.""",
}, # column
"pdu3OutletControlPowerOnState" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "1"
},
"on" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lastState" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Determines the outlet state when power is applied to the unit.
1 : not restart at device startup
2 : should sequence back ON in line with outletControlSequenceTime
3 : should take the state the outlet had when power was lost.
If the state was ON, should sequence back ON in line with outletControlSequenceTime.""",
}, # column
"pdu3OutletControlSequenceDelay" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""Time delay in seconds from when a Global Sequence On command is issued to
when the command is executed on this outlet. This delay is also used as a power-on
delay. Set to -1 to exclude this outlet from Global Sequence On commands.""",
}, # column
"pdu3OutletControlRebootOffTime" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""Time delay in seconds that the outlet should remain in the Off state when executing a Reboot command.""",
}, # column
"pdu3OutletControlSwitchable" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"switchable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"notSwitchable" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""Determines the outlet capability to be controlled On/Off from the communication channels.
1 : control On/Off enabled
2 : control On/Off disabled.""",
}, # column
"pdu3OutletControlShutoffDelay" : {
"nodetype" : "column",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.11.5.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-1",
"max" : "99999"
},
],
"range" : {
"min" : "-1",
"max" : "99999"
},
},
},
"access" : "readwrite",
"description" :
"""Time delay in seconds that could be taken in account before shutting of the outlet.
An application which need to shutoff properly an outlet will read this parameter first
then write it to the command pdu3OutletControlOffCmd.""",
}, # column
}, # nodes
"notifications" : {
"trapCritical" : {
"nodetype" : "notification",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.0.1",
"status" : "current",
"objects" : {
"sysName" : {
"nodetype" : "object",
"module" : "RFC1213-MIB"
},
"trapCode" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDescription" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceName" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceDetails" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceMgmtUrl" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
},
"description" :
"""A critical alarm has occurred. Action: Check the Trap Details for more information.""",
}, # notification
"trapWarning" : {
"nodetype" : "notification",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.0.2",
"status" : "current",
"objects" : {
"sysName" : {
"nodetype" : "object",
"module" : "RFC1213-MIB"
},
"trapCode" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDescription" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceName" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceDetails" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceMgmtUrl" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
},
"description" :
"""A warning alarm has occurred. Action: Check the Trap Details for more information.""",
}, # notification
"trapInformation" : {
"nodetype" : "notification",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.0.3",
"status" : "current",
"objects" : {
"sysName" : {
"nodetype" : "object",
"module" : "RFC1213-MIB"
},
"trapCode" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDescription" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceName" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceDetails" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceMgmtUrl" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
},
"description" :
"""An informational alarm has occurred. Action: Check the Trap Details for more information.""",
}, # notification
"trapCleared" : {
"nodetype" : "notification",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.0.4",
"status" : "current",
"objects" : {
"sysName" : {
"nodetype" : "object",
"module" : "RFC1213-MIB"
},
"trapCode" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDescription" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceName" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceDetails" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceMgmtUrl" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
},
"description" :
"""An alarm has cleared. Action: Check the Trap Details for more information.""",
}, # notification
"trapTest" : {
"nodetype" : "notification",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.0.5",
"status" : "current",
"objects" : {
"sysName" : {
"nodetype" : "object",
"module" : "RFC1213-MIB"
},
"trapCode" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDescription" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceName" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceDetails" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
"trapDeviceMgmtUrl" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
},
"description" :
"""Test trap sent to a trap receiver to check proper reception of traps""",
}, # notification
"deviceTrapInitialization" : {
"nodetype" : "notification",
"moduleName" : "CPQPOWER-MIB",
"oid" : "1.3.6.1.4.1.232.165.0.6",
"status" : "current",
"objects" : {
"sysName" : {
"nodetype" : "object",
"module" : "RFC1213-MIB"
},
"deviceIdentName" : {
"nodetype" : "object",
"module" : "CPQPOWER-MIB"
},
},
"description" :
"""This trap is sent each time a power device is initialized.""",
}, # notification
}, # notifications
}
| gpl-3.0 |
julianwang/cinder | cinder/api/contrib/snapshot_actions.py | 5 | 4221 | # Copyright 2013, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import webob
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder import db
from cinder.i18n import _, _LI
LOG = logging.getLogger(__name__)
def authorize(context, action_name):
action = 'snapshot_actions:%s' % action_name
extensions.extension_authorizer('snapshot', action)(context)
class SnapshotActionsController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SnapshotActionsController, self).__init__(*args, **kwargs)
LOG.debug("SnapshotActionsController initialized")
@wsgi.action('os-update_snapshot_status')
def _update_snapshot_status(self, req, id, body):
"""Update database fields related to status of a snapshot.
Intended for creation of snapshots, so snapshot state
must start as 'creating' and be changed to 'available',
'creating', or 'error'.
"""
context = req.environ['cinder.context']
authorize(context, 'update_snapshot_status')
LOG.debug("body: %s", body)
try:
status = body['os-update_snapshot_status']['status']
except KeyError:
msg = _("'status' must be specified.")
raise webob.exc.HTTPBadRequest(explanation=msg)
# Allowed state transitions
status_map = {'creating': ['creating', 'available', 'error'],
'deleting': ['deleting', 'error_deleting']}
current_snapshot = db.snapshot_get(context, id)
if current_snapshot['status'] not in status_map:
msg = _("Snapshot status %(cur)s not allowed for "
"update_snapshot_status") % {
'cur': current_snapshot['status']}
raise webob.exc.HTTPBadRequest(explanation=msg)
if status not in status_map[current_snapshot['status']]:
msg = _("Provided snapshot status %(provided)s not allowed for "
"snapshot with status %(current)s.") % \
{'provided': status,
'current': current_snapshot['status']}
raise webob.exc.HTTPBadRequest(explanation=msg)
update_dict = {'id': id,
'status': status}
progress = body['os-update_snapshot_status'].get('progress', None)
if progress:
# This is expected to be a string like '73%'
msg = _('progress must be an integer percentage')
try:
integer = int(progress[:-1])
except ValueError:
raise webob.exc.HTTPBadRequest(explanation=msg)
if integer < 0 or integer > 100 or progress[-1] != '%':
raise webob.exc.HTTPBadRequest(explanation=msg)
update_dict.update({'progress': progress})
LOG.info(_LI("Updating snapshot %(id)s with info %(dict)s"),
{'id': id, 'dict': update_dict})
db.snapshot_update(context, id, update_dict)
return webob.Response(status_int=202)
class Snapshot_actions(extensions.ExtensionDescriptor):
"""Enable snapshot manager actions."""
name = "SnapshotActions"
alias = "os-snapshot-actions"
namespace = \
"http://docs.openstack.org/volume/ext/snapshot-actions/api/v1.1"
updated = "2013-07-16T00:00:00+00:00"
def get_controller_extensions(self):
controller = SnapshotActionsController()
extension = extensions.ControllerExtension(self,
'snapshots',
controller)
return [extension]
| apache-2.0 |
tchernomax/ansible | lib/ansible/modules/database/proxysql/proxysql_replication_hostgroups.py | 52 | 13353 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: proxysql_replication_hostgroups
version_added: "2.3"
author: "Ben Mildren (@bmildren)"
short_description: Manages replication hostgroups using the proxysql admin
interface.
description:
- Each row in mysql_replication_hostgroups represent a pair of
writer_hostgroup and reader_hostgroup. ProxySQL will monitor the value of
read_only for all the servers in specified hostgroups, and based on the
value of read_only will assign the server to the writer or reader
hostgroups.
options:
writer_hostgroup:
description:
- Id of the writer hostgroup.
required: True
reader_hostgroup:
description:
- Id of the reader hostgroup.
required: True
comment:
description:
- Text field that can be used for any purposed defined by the user.
state:
description:
- When C(present) - adds the replication hostgroup, when C(absent) -
removes the replication hostgroup.
choices: [ "present", "absent" ]
default: present
extends_documentation_fragment:
- proxysql.managing_config
- proxysql.connectivity
'''
EXAMPLES = '''
---
# This example adds a replication hostgroup, it saves the mysql server config
# to disk, but avoids loading the mysql server config to runtime (this might be
# because several replication hostgroup are being added and the user wants to
# push the config to runtime in a single batch using the
# M(proxysql_manage_config) module). It uses supplied credentials to connect
# to the proxysql admin interface.
- proxysql_replication_hostgroups:
login_user: 'admin'
login_password: 'admin'
writer_hostgroup: 1
reader_hostgroup: 2
state: present
load_to_runtime: False
# This example removes a replication hostgroup, saves the mysql server config
# to disk, and dynamically loads the mysql server config to runtime. It uses
# credentials in a supplied config file to connect to the proxysql admin
# interface.
- proxysql_replication_hostgroups:
config_file: '~/proxysql.cnf'
writer_hostgroup: 3
reader_hostgroup: 4
state: absent
'''
RETURN = '''
stdout:
description: The replication hostgroup modified or removed from proxysql
returned: On create/update will return the newly modified group, on delete
it will return the deleted record.
type: dict
"sample": {
"changed": true,
"msg": "Added server to mysql_hosts",
"repl_group": {
"comment": "",
"reader_hostgroup": "1",
"writer_hostgroup": "2"
},
"state": "present"
}
'''
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.mysql import mysql_connect, mysql_driver, mysql_driver_fail_msg
from ansible.module_utils._text import to_native
# ===========================================
# proxysql module specific support methods.
#
def perform_checks(module):
if module.params["login_port"] < 0 \
or module.params["login_port"] > 65535:
module.fail_json(
msg="login_port must be a valid unix port number (0-65535)"
)
if not module.params["writer_hostgroup"] >= 0:
module.fail_json(
msg="writer_hostgroup must be a integer greater than or equal to 0"
)
if not module.params["reader_hostgroup"] == \
module.params["writer_hostgroup"]:
if not module.params["reader_hostgroup"] > 0:
module.fail_json(
msg=("writer_hostgroup must be a integer greater than" +
" or equal to 0")
)
else:
module.fail_json(
msg="reader_hostgroup cannot equal writer_hostgroup"
)
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
def save_config_to_disk(cursor):
cursor.execute("SAVE MYSQL SERVERS TO DISK")
return True
def load_config_to_runtime(cursor):
cursor.execute("LOAD MYSQL SERVERS TO RUNTIME")
return True
class ProxySQLReplicationHostgroup(object):
def __init__(self, module):
self.state = module.params["state"]
self.save_to_disk = module.params["save_to_disk"]
self.load_to_runtime = module.params["load_to_runtime"]
self.writer_hostgroup = module.params["writer_hostgroup"]
self.reader_hostgroup = module.params["reader_hostgroup"]
self.comment = module.params["comment"]
def check_repl_group_config(self, cursor, keys):
query_string = \
"""SELECT count(*) AS `repl_groups`
FROM mysql_replication_hostgroups
WHERE writer_hostgroup = %s
AND reader_hostgroup = %s"""
query_data = \
[self.writer_hostgroup,
self.reader_hostgroup]
if self.comment and not keys:
query_string += "\n AND comment = %s"
query_data.append(self.comment)
cursor.execute(query_string, query_data)
check_count = cursor.fetchone()
return (int(check_count['repl_groups']) > 0)
def get_repl_group_config(self, cursor):
query_string = \
"""SELECT *
FROM mysql_replication_hostgroups
WHERE writer_hostgroup = %s
AND reader_hostgroup = %s"""
query_data = \
[self.writer_hostgroup,
self.reader_hostgroup]
cursor.execute(query_string, query_data)
repl_group = cursor.fetchone()
return repl_group
def create_repl_group_config(self, cursor):
query_string = \
"""INSERT INTO mysql_replication_hostgroups (
writer_hostgroup,
reader_hostgroup,
comment)
VALUES (%s, %s, %s)"""
query_data = \
[self.writer_hostgroup,
self.reader_hostgroup,
self.comment or '']
cursor.execute(query_string, query_data)
return True
def update_repl_group_config(self, cursor):
query_string = \
"""UPDATE mysql_replication_hostgroups
SET comment = %s
WHERE writer_hostgroup = %s
AND reader_hostgroup = %s"""
query_data = \
[self.comment,
self.writer_hostgroup,
self.reader_hostgroup]
cursor.execute(query_string, query_data)
return True
def delete_repl_group_config(self, cursor):
query_string = \
"""DELETE FROM mysql_replication_hostgroups
WHERE writer_hostgroup = %s
AND reader_hostgroup = %s"""
query_data = \
[self.writer_hostgroup,
self.reader_hostgroup]
cursor.execute(query_string, query_data)
return True
def manage_config(self, cursor, state):
if state:
if self.save_to_disk:
save_config_to_disk(cursor)
if self.load_to_runtime:
load_config_to_runtime(cursor)
def create_repl_group(self, check_mode, result, cursor):
if not check_mode:
result['changed'] = \
self.create_repl_group_config(cursor)
result['msg'] = "Added server to mysql_hosts"
result['repl_group'] = \
self.get_repl_group_config(cursor)
self.manage_config(cursor,
result['changed'])
else:
result['changed'] = True
result['msg'] = ("Repl group would have been added to" +
" mysql_replication_hostgroups, however" +
" check_mode is enabled.")
def update_repl_group(self, check_mode, result, cursor):
if not check_mode:
result['changed'] = \
self.update_repl_group_config(cursor)
result['msg'] = "Updated server in mysql_hosts"
result['repl_group'] = \
self.get_repl_group_config(cursor)
self.manage_config(cursor,
result['changed'])
else:
result['changed'] = True
result['msg'] = ("Repl group would have been updated in" +
" mysql_replication_hostgroups, however" +
" check_mode is enabled.")
def delete_repl_group(self, check_mode, result, cursor):
if not check_mode:
result['repl_group'] = \
self.get_repl_group_config(cursor)
result['changed'] = \
self.delete_repl_group_config(cursor)
result['msg'] = "Deleted server from mysql_hosts"
self.manage_config(cursor,
result['changed'])
else:
result['changed'] = True
result['msg'] = ("Repl group would have been deleted from" +
" mysql_replication_hostgroups, however" +
" check_mode is enabled.")
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None, type='str'),
login_password=dict(default=None, no_log=True, type='str'),
login_host=dict(default="127.0.0.1"),
login_unix_socket=dict(default=None),
login_port=dict(default=6032, type='int'),
config_file=dict(default="", type='path'),
writer_hostgroup=dict(required=True, type='int'),
reader_hostgroup=dict(required=True, type='int'),
comment=dict(type='str'),
state=dict(default='present', choices=['present',
'absent']),
save_to_disk=dict(default=True, type='bool'),
load_to_runtime=dict(default=True, type='bool')
),
supports_check_mode=True
)
perform_checks(module)
login_user = module.params["login_user"]
login_password = module.params["login_password"]
config_file = module.params["config_file"]
cursor = None
try:
cursor = mysql_connect(module,
login_user,
login_password,
config_file,
cursor_class=mysql_driver.cursors.DictCursor)
except mysql_driver.Error as e:
module.fail_json(
msg="unable to connect to ProxySQL Admin Module.. %s" % to_native(e)
)
proxysql_repl_group = ProxySQLReplicationHostgroup(module)
result = {}
result['state'] = proxysql_repl_group.state
if proxysql_repl_group.state == "present":
try:
if not proxysql_repl_group.check_repl_group_config(cursor,
keys=True):
proxysql_repl_group.create_repl_group(module.check_mode,
result,
cursor)
else:
if not proxysql_repl_group.check_repl_group_config(cursor,
keys=False):
proxysql_repl_group.update_repl_group(module.check_mode,
result,
cursor)
else:
result['changed'] = False
result['msg'] = ("The repl group already exists in" +
" mysql_replication_hostgroups and" +
" doesn't need to be updated.")
result['repl_group'] = \
proxysql_repl_group.get_repl_group_config(cursor)
except mysql_driver.Error as e:
module.fail_json(
msg="unable to modify replication hostgroup.. %s" % to_native(e)
)
elif proxysql_repl_group.state == "absent":
try:
if proxysql_repl_group.check_repl_group_config(cursor,
keys=True):
proxysql_repl_group.delete_repl_group(module.check_mode,
result,
cursor)
else:
result['changed'] = False
result['msg'] = ("The repl group is already absent from the" +
" mysql_replication_hostgroups memory" +
" configuration")
except mysql_driver.Error as e:
module.fail_json(
msg="unable to delete replication hostgroup.. %s" % to_native(e)
)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
envoyproxy/envoy | test/extensions/filters/network/thrift_proxy/driver/generated/example/Example.py | 8 | 20700 | #
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def ping(self):
pass
def poke(self):
pass
def add(self, a, b):
"""
Parameters:
- a
- b
"""
pass
def execute(self, input):
"""
Parameters:
- input
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def ping(self):
self.send_ping()
self.recv_ping()
def send_ping(self):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = ping_result()
result.read(iprot)
iprot.readMessageEnd()
return
def poke(self):
self.send_poke()
def send_poke(self):
self._oprot.writeMessageBegin('poke', TMessageType.ONEWAY, self._seqid)
args = poke_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def add(self, a, b):
"""
Parameters:
- a
- b
"""
self.send_add(a, b)
return self.recv_add()
def send_add(self, a, b):
self._oprot.writeMessageBegin('add', TMessageType.CALL, self._seqid)
args = add_args()
args.a = a
args.b = b
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = add_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "add failed: unknown result")
def execute(self, input):
"""
Parameters:
- input
"""
self.send_execute(input)
return self.recv_execute()
def send_execute(self, input):
self._oprot.writeMessageBegin('execute', TMessageType.CALL, self._seqid)
args = execute_args()
args.input = input
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_execute(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = execute_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.appex is not None:
raise result.appex
raise TApplicationException(TApplicationException.MISSING_RESULT, "execute failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
self._processMap["poke"] = Processor.process_poke
self._processMap["add"] = Processor.process_add
self._processMap["execute"] = Processor.process_execute
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
try:
self._handler.ping()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("ping", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_poke(self, seqid, iprot, oprot):
args = poke_args()
args.read(iprot)
iprot.readMessageEnd()
try:
self._handler.poke()
except TTransport.TTransportException:
raise
except Exception:
logging.exception('Exception in oneway handler')
def process_add(self, seqid, iprot, oprot):
args = add_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_result()
try:
result.success = self._handler.add(args.a, args.b)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("add", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_execute(self, seqid, iprot, oprot):
args = execute_args()
args.read(iprot)
iprot.readMessageEnd()
result = execute_result()
try:
result.success = self._handler.execute(args.input)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AppException as appex:
msg_type = TMessageType.REPLY
result.appex = appex
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("execute", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ping_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ping_args)
ping_args.thrift_spec = (
)
class ping_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ping_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(ping_result)
ping_result.thrift_spec = (
)
class poke_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('poke_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(poke_args)
poke_args.thrift_spec = (
)
class add_args(object):
"""
Attributes:
- a
- b
"""
def __init__(self, a=None, b=None,):
self.a = a
self.b = b
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.a = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.b = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('add_args')
if self.a is not None:
oprot.writeFieldBegin('a', TType.I32, 1)
oprot.writeI32(self.a)
oprot.writeFieldEnd()
if self.b is not None:
oprot.writeFieldBegin('b', TType.I32, 2)
oprot.writeI32(self.b)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(add_args)
add_args.thrift_spec = (
None, # 0
(1, TType.I32, 'a', None, None, ), # 1
(2, TType.I32, 'b', None, None, ), # 2
)
class add_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('add_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(add_result)
add_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
class execute_args(object):
"""
Attributes:
- input
"""
def __init__(self, input=None,):
self.input = input
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.input = Param()
self.input.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('execute_args')
if self.input is not None:
oprot.writeFieldBegin('input', TType.STRUCT, 1)
self.input.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(execute_args)
execute_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'input', [Param, None], None, ), # 1
)
class execute_result(object):
"""
Attributes:
- success
- appex
"""
def __init__(self, success=None, appex=None,):
self.success = success
self.appex = appex
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Result()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.appex = AppException()
self.appex.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('execute_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.appex is not None:
oprot.writeFieldBegin('appex', TType.STRUCT, 1)
self.appex.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(execute_result)
execute_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Result, None], None, ), # 0
(1, TType.STRUCT, 'appex', [AppException, None], None, ), # 1
)
fix_spec(all_structs)
del all_structs
| apache-2.0 |
kajarenc/python-twitter | tests/test_filecache.py | 22 | 1408 | import twitter
import unittest
import time
class FileCacheTest(unittest.TestCase):
def testInit(self):
"""Test the twitter._FileCache constructor"""
cache = twitter._FileCache()
self.assert_(cache is not None, 'cache is None')
def testSet(self):
"""Test the twitter._FileCache.Set method"""
cache = twitter._FileCache()
cache.Set("foo", 'Hello World!')
cache.Remove("foo")
def testRemove(self):
"""Test the twitter._FileCache.Remove method"""
cache = twitter._FileCache()
cache.Set("foo", 'Hello World!')
cache.Remove("foo")
data = cache.Get("foo")
self.assertEqual(data, None, 'data is not None')
def testGet(self):
"""Test the twitter._FileCache.Get method"""
cache = twitter._FileCache()
cache.Set("foo", 'Hello World!')
data = cache.Get("foo")
self.assertEqual('Hello World!', data)
cache.Remove("foo")
def testGetCachedTime(self):
"""Test the twitter._FileCache.GetCachedTime method"""
now = time.time()
cache = twitter._FileCache()
cache.Set("foo", 'Hello World!')
cached_time = cache.GetCachedTime("foo")
delta = cached_time - now
self.assert_(delta <= 1,
'Cached time differs from clock time by more than 1 second.')
cache.Remove("foo")
| apache-2.0 |
goldcoin/gldcoin | share/qt/make_spinner.py | 4415 | 1035 | #!/usr/bin/env python
# W.J. van der Laan, 2011
# Make spinning .mng animation from a .png
# Requires imagemagick 6.7+
from __future__ import division
from os import path
from PIL import Image
from subprocess import Popen
SRC='img/reload_scaled.png'
DST='../../src/qt/res/movies/update_spinner.mng'
TMPDIR='/tmp'
TMPNAME='tmp-%03i.png'
NUMFRAMES=35
FRAMERATE=10.0
CONVERT='convert'
CLOCKWISE=True
DSIZE=(16,16)
im_src = Image.open(SRC)
if CLOCKWISE:
im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT)
def frame_to_filename(frame):
return path.join(TMPDIR, TMPNAME % frame)
frame_files = []
for frame in xrange(NUMFRAMES):
rotation = (frame + 0.5) / NUMFRAMES * 360.0
if CLOCKWISE:
rotation = -rotation
im_new = im_src.rotate(rotation, Image.BICUBIC)
im_new.thumbnail(DSIZE, Image.ANTIALIAS)
outfile = frame_to_filename(frame)
im_new.save(outfile, 'png')
frame_files.append(outfile)
p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST])
p.communicate()
| mit |
iuliat/nova | nova/network/ldapdns.py | 68 | 13225 | # Copyright 2012 Andrew Bogott for the Wikimedia Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
import ldap
except ImportError:
# This module needs to be importable despite ldap not being a requirement
ldap = None
import time
from oslo_config import cfg
from oslo_log import log as logging
from nova import exception
from nova.i18n import _, _LW
from nova.network import dns_driver
from nova import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
ldap_dns_opts = [
cfg.StrOpt('ldap_dns_url',
default='ldap://ldap.example.com:389',
help='URL for LDAP server which will store DNS entries'),
cfg.StrOpt('ldap_dns_user',
default='uid=admin,ou=people,dc=example,dc=org',
help='User for LDAP DNS'),
cfg.StrOpt('ldap_dns_password',
default='password',
help='Password for LDAP DNS',
secret=True),
cfg.StrOpt('ldap_dns_soa_hostmaster',
default='hostmaster@example.org',
help='Hostmaster for LDAP DNS driver Statement of Authority'),
cfg.MultiStrOpt('ldap_dns_servers',
default=['dns.example.org'],
help='DNS Servers for LDAP DNS driver'),
cfg.StrOpt('ldap_dns_base_dn',
default='ou=hosts,dc=example,dc=org',
help='Base DN for DNS entries in LDAP'),
cfg.StrOpt('ldap_dns_soa_refresh',
default='1800',
help='Refresh interval (in seconds) for LDAP DNS driver '
'Statement of Authority'),
cfg.StrOpt('ldap_dns_soa_retry',
default='3600',
help='Retry interval (in seconds) for LDAP DNS driver '
'Statement of Authority'),
cfg.StrOpt('ldap_dns_soa_expiry',
default='86400',
help='Expiry interval (in seconds) for LDAP DNS driver '
'Statement of Authority'),
cfg.StrOpt('ldap_dns_soa_minimum',
default='7200',
help='Minimum interval (in seconds) for LDAP DNS driver '
'Statement of Authority'),
]
CONF.register_opts(ldap_dns_opts)
# Importing ldap.modlist breaks the tests for some reason,
# so this is an abbreviated version of a function from
# there.
def create_modlist(newattrs):
modlist = []
for attrtype in newattrs.keys():
utf8_vals = []
for val in newattrs[attrtype]:
utf8_vals.append(utils.utf8(val))
newattrs[attrtype] = utf8_vals
modlist.append((attrtype, newattrs[attrtype]))
return modlist
class DNSEntry(object):
def __init__(self, ldap_object):
"""ldap_object is an instance of ldap.LDAPObject.
It should already be initialized and bound before
getting passed in here.
"""
self.lobj = ldap_object
self.ldap_tuple = None
self.qualified_domain = None
@classmethod
def _get_tuple_for_domain(cls, lobj, domain):
entry = lobj.search_s(CONF.ldap_dns_base_dn, ldap.SCOPE_SUBTREE,
'(associatedDomain=%s)' % utils.utf8(domain))
if not entry:
return None
if len(entry) > 1:
LOG.warning(_LW("Found multiple matches for domain "
"%(domain)s.\n%(entry)s"),
domain, entry)
return entry[0]
@classmethod
def _get_all_domains(cls, lobj):
entries = lobj.search_s(CONF.ldap_dns_base_dn,
ldap.SCOPE_SUBTREE, '(sOARecord=*)')
domains = []
for entry in entries:
domain = entry[1].get('associatedDomain')
if domain:
domains.append(domain[0])
return domains
def _set_tuple(self, tuple):
self.ldap_tuple = tuple
def _qualify(self, name):
return '%s.%s' % (name, self.qualified_domain)
def _dequalify(self, name):
z = ".%s" % self.qualified_domain
if name.endswith(z):
dequalified = name[0:name.rfind(z)]
else:
LOG.warning(_LW("Unable to dequalify. %(name)s is not in "
"%(domain)s.\n"),
{'name': name,
'domain': self.qualified_domain})
dequalified = None
return dequalified
def _dn(self):
return self.ldap_tuple[0]
dn = property(_dn)
def _rdn(self):
return self.dn.partition(',')[0]
rdn = property(_rdn)
class DomainEntry(DNSEntry):
@classmethod
def _soa(cls):
date = time.strftime('%Y%m%d%H%M%S')
soa = '%s %s %s %s %s %s %s' % (
CONF.ldap_dns_servers[0],
CONF.ldap_dns_soa_hostmaster,
date,
CONF.ldap_dns_soa_refresh,
CONF.ldap_dns_soa_retry,
CONF.ldap_dns_soa_expiry,
CONF.ldap_dns_soa_minimum)
return utils.utf8(soa)
@classmethod
def create_domain(cls, lobj, domain):
"""Create a new domain entry, and return an object that wraps it."""
entry = cls._get_tuple_for_domain(lobj, domain)
if entry:
raise exception.FloatingIpDNSExists(name=domain, domain='')
newdn = 'dc=%s,%s' % (domain, CONF.ldap_dns_base_dn)
attrs = {'objectClass': ['domainrelatedobject', 'dnsdomain',
'domain', 'dcobject', 'top'],
'sOARecord': [cls._soa()],
'associatedDomain': [domain],
'dc': [domain]}
lobj.add_s(newdn, create_modlist(attrs))
return DomainEntry(lobj, domain)
def __init__(self, ldap_object, domain):
super(DomainEntry, self).__init__(ldap_object)
entry = self._get_tuple_for_domain(self.lobj, domain)
if not entry:
raise exception.NotFound()
self._set_tuple(entry)
assert(entry[1]['associatedDomain'][0] == domain)
self.qualified_domain = domain
def delete(self):
"""Delete the domain that this entry refers to."""
entries = self.lobj.search_s(self.dn,
ldap.SCOPE_SUBTREE,
'(aRecord=*)')
for entry in entries:
self.lobj.delete_s(entry[0])
self.lobj.delete_s(self.dn)
def update_soa(self):
mlist = [(ldap.MOD_REPLACE, 'sOARecord', self._soa())]
self.lobj.modify_s(self.dn, mlist)
def subentry_with_name(self, name):
entry = self.lobj.search_s(self.dn, ldap.SCOPE_SUBTREE,
'(associatedDomain=%s.%s)' %
(utils.utf8(name),
utils.utf8(self.qualified_domain)))
if entry:
return HostEntry(self, entry[0])
else:
return None
def subentries_with_ip(self, ip):
entries = self.lobj.search_s(self.dn, ldap.SCOPE_SUBTREE,
'(aRecord=%s)' % utils.utf8(ip))
objs = []
for entry in entries:
if 'associatedDomain' in entry[1]:
objs.append(HostEntry(self, entry))
return objs
def add_entry(self, name, address):
if self.subentry_with_name(name):
raise exception.FloatingIpDNSExists(name=name,
domain=self.qualified_domain)
entries = self.subentries_with_ip(address)
if entries:
# We already have an ldap entry for this IP, so we just
# need to add the new name.
existingdn = entries[0].dn
self.lobj.modify_s(existingdn, [(ldap.MOD_ADD,
'associatedDomain',
utils.utf8(self._qualify(name)))])
return self.subentry_with_name(name)
else:
# We need to create an entirely new entry.
newdn = 'dc=%s,%s' % (name, self.dn)
attrs = {'objectClass': ['domainrelatedobject', 'dnsdomain',
'domain', 'dcobject', 'top'],
'aRecord': [address],
'associatedDomain': [self._qualify(name)],
'dc': [name]}
self.lobj.add_s(newdn, create_modlist(attrs))
return self.subentry_with_name(name)
def remove_entry(self, name):
entry = self.subentry_with_name(name)
if not entry:
raise exception.NotFound()
entry.remove_name(name)
self.update_soa()
class HostEntry(DNSEntry):
def __init__(self, parent, tuple):
super(HostEntry, self).__init__(parent.lobj)
self.parent_entry = parent
self._set_tuple(tuple)
self.qualified_domain = parent.qualified_domain
def remove_name(self, name):
names = self.ldap_tuple[1]['associatedDomain']
if not names:
raise exception.NotFound()
if len(names) > 1:
# We just have to remove the requested domain.
self.lobj.modify_s(self.dn, [(ldap.MOD_DELETE, 'associatedDomain',
self._qualify(utils.utf8(name)))])
if (self.rdn[1] == name):
# We just removed the rdn, so we need to move this entry.
names.remove(self._qualify(name))
newrdn = 'dc=%s' % self._dequalify(names[0])
self.lobj.modrdn_s(self.dn, [newrdn])
else:
# We should delete the entire record.
self.lobj.delete_s(self.dn)
def modify_address(self, name, address):
names = self.ldap_tuple[1]['associatedDomain']
if not names:
raise exception.NotFound()
if len(names) == 1:
self.lobj.modify_s(self.dn, [(ldap.MOD_REPLACE, 'aRecord',
[utils.utf8(address)])])
else:
self.remove_name(name)
self.parent.add_entry(name, address)
def _names(self):
names = []
for domain in self.ldap_tuple[1]['associatedDomain']:
names.append(self._dequalify(domain))
return names
names = property(_names)
def _ip(self):
ip = self.ldap_tuple[1]['aRecord'][0]
return ip
ip = property(_ip)
def _parent(self):
return self.parent_entry
parent = property(_parent)
class LdapDNS(dns_driver.DNSDriver):
"""Driver for PowerDNS using ldap as a back end.
This driver assumes ldap-method=strict, with all domains
in the top-level, aRecords only.
"""
def __init__(self):
if not ldap:
raise ImportError(_('ldap not installed'))
self.lobj = ldap.initialize(CONF.ldap_dns_url)
self.lobj.simple_bind_s(CONF.ldap_dns_user,
CONF.ldap_dns_password)
def get_domains(self):
return DomainEntry._get_all_domains(self.lobj)
def create_entry(self, name, address, type, domain):
if type.lower() != 'a':
raise exception.InvalidInput(_("This driver only supports "
"type 'a' entries."))
dEntry = DomainEntry(self.lobj, domain)
dEntry.add_entry(name, address)
def delete_entry(self, name, domain):
dEntry = DomainEntry(self.lobj, domain)
dEntry.remove_entry(name)
def get_entries_by_address(self, address, domain):
try:
dEntry = DomainEntry(self.lobj, domain)
except exception.NotFound:
return []
entries = dEntry.subentries_with_ip(address)
names = []
for entry in entries:
names.extend(entry.names)
return names
def get_entries_by_name(self, name, domain):
try:
dEntry = DomainEntry(self.lobj, domain)
except exception.NotFound:
return []
nEntry = dEntry.subentry_with_name(name)
if nEntry:
return [nEntry.ip]
def modify_address(self, name, address, domain):
dEntry = DomainEntry(self.lobj, domain)
nEntry = dEntry.subentry_with_name(name)
nEntry.modify_address(name, address)
def create_domain(self, domain):
DomainEntry.create_domain(self.lobj, domain)
def delete_domain(self, domain):
dEntry = DomainEntry(self.lobj, domain)
dEntry.delete()
def delete_dns_file(self):
LOG.warning(_LW("This shouldn't be getting called except during "
"testing."))
pass
| apache-2.0 |
rynomad/CCNx-Federated-Wiki-Prototype | server/express/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py | 231 | 6979 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for Windows builds.
These functions are executed via gyp-win-tool when using the ninja generator.
"""
from ctypes import windll, wintypes
import os
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def main(args):
executor = WinTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class LinkLock(object):
"""A flock-style lock to limit the number of concurrent links to one.
Uses a session-local mutex based on the file's directory.
"""
def __enter__(self):
name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_')
self.mutex = windll.kernel32.CreateMutexW(
wintypes.c_int(0),
wintypes.c_int(0),
wintypes.create_unicode_buffer(name))
assert self.mutex
result = windll.kernel32.WaitForSingleObject(
self.mutex, wintypes.c_int(0xFFFFFFFF))
# 0x80 means another process was killed without releasing the mutex, but
# that this process has been given ownership. This is fine for our
# purposes.
assert result in (0, 0x80), (
"%s, %s" % (result, windll.kernel32.GetLastError()))
def __exit__(self, type, value, traceback):
windll.kernel32.ReleaseMutex(self.mutex)
windll.kernel32.CloseHandle(self.mutex)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
return name_string.title().replace('-', '')
def _GetEnv(self, arch):
"""Gets the saved environment from a file for a given architecture."""
# The environment is saved as an "environment block" (see CreateProcess
# and msvs_emulation for details). We convert to a dict here.
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
pairs = open(arch).read()[:-2].split('\0')
kvs = [item.split('=', 1) for item in pairs]
return dict(kvs)
def ExecStamp(self, path):
"""Simple stamp command."""
open(path, 'w').close()
def ExecRecursiveMirror(self, source, dest):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
shutil.rmtree(dest)
else:
os.unlink(dest)
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
shutil.copy2(source, dest)
def ExecLinkWrapper(self, arch, *args):
"""Filter diagnostic output from link that looks like:
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
with LinkLock():
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if not line.startswith(' Creating library '):
print line
return popen.returncode
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest
tool)."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if line and 'manifest authoring warning 81010002' not in line:
print line
return popen.returncode
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't
quietable via command line flags.
"""
args = ['midl', '/nologo'] + list(flags) + [
'/out', outdir,
'/tlb', tlb,
'/h', h,
'/dlldata', dlldata,
'/iid', iid,
'/proxy', proxy,
idl]
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefix = 'Processing '
processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
for line in lines:
if not line.startswith(prefix) and line not in processing:
print line
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
# MSVS doesn't assemble x64 asm files.
if arch == 'environment.x64':
return 0
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Copyright (C) Microsoft Corporation') and
not line.startswith('Microsoft (R) Macro Assembler') and
not line.startswith(' Assembling: ') and
line):
print line
return popen.returncode
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
not line.startswith('Copyright (C) Microsoft Corporation') and
line):
print line
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
args = open(rspfile).read()
dir = dir[0] if dir else None
popen = subprocess.Popen(args, shell=True, env=env, cwd=dir)
popen.wait()
return popen.returncode
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
Danfocus/Flexget | flexget/components/notify/notifiers/email.py | 4 | 7177 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.utils import text_to_native_str
import logging
import smtplib
import socket
import getpass
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
from smtplib import SMTPAuthenticationError, SMTPServerDisconnected, SMTPSenderRefused
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.event import event
from flexget.plugin import PluginWarning
plugin_name = 'email'
log = logging.getLogger(plugin_name)
class EmailNotifier(object):
"""
Send an e-mail with the list of all succeeded (downloaded) entries.
Configuration options
=============== ===================================================================
Option Description
=============== ===================================================================
from The email address from which the email will be sent (required)
to The email address of the recipient (required)
smtp_host The host of the smtp server
smtp_port The port of the smtp server
smtp_username The username to use to connect to the smtp server
smtp_password The password to use to connect to the smtp server
smtp_tls Should we use TLS to connect to the smtp server
smtp_ssl Should we use SSL to connect to the smtp server
=============== ===================================================================
Config basic example::
notify:
entries:
via:
- email:
from: xxx@xxx.xxx
to: xxx@xxx.xxx
smtp_host: smtp.host.com
Config example with smtp login::
notify:
entries:
via:
- email:
from: xxx@xxx.xxx
to: xxx@xxx.xxx
smtp_host: smtp.host.com
smtp_port: 25
smtp_login: true
smtp_username: my_smtp_login
smtp_password: my_smtp_password
smtp_tls: true
GMAIL example::
notify:
entries:
via:
- email:
from: from@gmail.com
to: to@gmail.com
smtp_host: smtp.gmail.com
smtp_port: 587
smtp_login: true
smtp_username: gmailUser
smtp_password: gmailPassword
smtp_tls: true
Default values for the config elements::
notify:
entries:
via:
- email:
smtp_host: localhost
smtp_port: 25
smtp_login: False
smtp_username:
smtp_password:
smtp_tls: False
smtp_ssl: False
"""
def __init__(self):
self.mail_server = None
self.host = None
self.port = None
self.username = None
self.password = None
self.ssl = None
self.tls = None
def connect_to_smtp_server(self, config):
self.host = config['smtp_host']
self.port = config['smtp_port']
self.ssl = config['smtp_ssl']
self.tls = config['smtp_tls']
self.username = config.get('smtp_username')
self.password = config.get('smtp_password')
try:
log.debug('connecting to smtp server %s:%s', self.host, self.port)
self.mail_server = smtplib.SMTP_SSL if self.ssl else smtplib.SMTP
self.mail_server = self.mail_server(self.host, self.port)
if self.tls:
self.mail_server.ehlo()
self.mail_server.starttls()
self.mail_server.ehlo()
except (socket.error, OSError) as e:
raise PluginWarning(str(e))
try:
if self.username:
# Forcing to use `str` type
log.debug('logging in to smtp server using username: %s', self.username)
self.mail_server.login(
text_to_native_str(self.username), text_to_native_str(self.password)
)
except (IOError, SMTPAuthenticationError) as e:
raise PluginWarning(str(e))
schema = {
'type': 'object',
'properties': {
'to': one_or_more({'type': 'string', 'format': 'email'}),
'from': {
'type': 'string',
'default': 'flexget_notifer@flexget.com',
'format': 'email',
},
'autofrom': {'type': 'boolean', 'default': False},
'smtp_host': {'type': 'string', 'default': 'localhost'},
'smtp_port': {'type': 'integer', 'default': 25},
'smtp_username': {'type': 'string'},
'smtp_password': {'type': 'string'},
'smtp_tls': {'type': 'boolean', 'default': False},
'smtp_ssl': {'type': 'boolean', 'default': False},
'html': {'type': 'boolean', 'default': False},
},
'required': ['to'],
'dependencies': {
'smtp_username': ['smtp_password'],
'smtp_password': ['smtp_username'],
'smtp_ssl': ['smtp_tls'],
},
'additionalProperties': False,
}
def notify(self, title, message, config):
"""
Send an email notification
:param str message: message body
:param str title: message subject
:param dict config: email plugin config
"""
if not isinstance(config['to'], list):
config['to'] = [config['to']]
email = MIMEMultipart('alternative')
email['To'] = ','.join(config['to'])
email['From'] = (
getpass.getuser() + '@' + socket.getfqdn() if config['autofrom'] else config['from']
)
email['Subject'] = title
email['Date'] = formatdate(localtime=True)
content_type = 'html' if config['html'] else 'plain'
email.attach(MIMEText(message.encode('utf-8'), content_type, _charset='utf-8'))
# Making sure mail server connection will remain open per host or username
# (in case several mail servers are used in the same task)
if not self.mail_server or not (
self.host == config['smtp_host'] and self.username == config.get('smtp_username')
):
self.connect_to_smtp_server(config)
connection_error = None
while True:
try:
self.mail_server.sendmail(email['From'], config['to'], email.as_string())
break
except (SMTPServerDisconnected, SMTPSenderRefused) as e:
if not connection_error:
self.connect_to_smtp_server(config)
connection_error = e
else:
raise PluginWarning('Could not connect to SMTP server: %s' % str(e))
@event('plugin.register')
def register_plugin():
plugin.register(EmailNotifier, plugin_name, api_ver=2, interfaces=['notifiers'])
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.