repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
Innovahn/cybex
|
refs/heads/master
|
addons/procurement/__openerp__.py
|
36
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Procurements',
'version' : '1.0',
'author' : 'OpenERP SA',
'website': 'https://www.odoo.com/page/manufacturing',
'category' : 'Hidden/Dependency',
'depends' : ['base', 'product'],
'description': """
This is the module for computing Procurements.
==============================================
This procurement module only depends on the product module and is not useful
on itself. Procurements represent needs that need to be solved by a procurement
rule. When a procurement is created, it is confirmed. When a rule is found,
it will be put in running state. After, it will check if what needed to be done
for the rule has been executed. Then it will go to the done state. A procurement
can also go into exception, for example when it can not find a rule and it can be cancelled.
The mechanism will be extended by several modules. The procurement rule of stock will
create a move and the procurement will be fulfilled when the move is done.
The procurement rule of sale_service will create a task. Those of purchase or
mrp will create a purchase order or a manufacturing order.
The scheduler will check if it can assign a rule to confirmed procurements and if
it can put running procurements to done.
Procurements in exception should be checked manually and can be re-run.
""",
'data': [
'security/ir.model.access.csv',
'security/procurement_security.xml',
'procurement_data.xml',
'wizard/schedulers_all_view.xml',
'procurement_view.xml',
'company_view.xml',
],
'demo': [],
'test': ['test/procurement.yml'],
'installable': True,
'auto_install': True,
'images': ['images/compute_schedulers.jpeg','images/config_companies_sched.jpeg', 'images/minimum_stock_rules.jpeg'],
}
|
timoguic/sp_hub
|
refs/heads/master
|
drf_sp_hub/spkeyword/models.py
|
1
|
from django.db import models
from django.contrib.postgres.fields import JSONField
class SPCategory(models.Model):
name = models.CharField(max_length=200, null=False, blank=False, db_index=True, unique=True)
class Meta:
verbose_name = "SP Category"
verbose_name_plural = "SP Categories"
def __str__(self):
return self.name
class SPKeyword(models.Model):
name = models.CharField(max_length=200, null=False, blank=False, db_index=True)
language = models.CharField(max_length=3, null=False, blank=False, default='fr')
is_translation = models.ForeignKey(
'self',
null=True, blank=True,
related_name='translations',
on_delete=models.SET_NULL,
limit_choices_to={'language': 'fr'},
)
data = JSONField(null=True, blank=True)
aligned = models.BooleanField(null=False, blank=False, default=False)
is_editor = models.BooleanField(null=False, blank=False, default=False)
category = models.ForeignKey(SPCategory, unique=False, related_name='keywords', db_index=True, null=True, blank=True, default=None, on_delete=models.SET_DEFAULT)
class Meta:
verbose_name = "SP Keyword"
def get_absolute_url(self):
return reverse('spkeyword:display', kwargs={'pk': self.pk})
def __str__(self):
if self.category:
return self.category.name + ': ' + self.name
else:
return self.name
|
lumig242/Hue-Integration-with-CDAP
|
refs/heads/pull3
|
desktop/core/ext-py/Django-1.6.10/django/contrib/gis/tests/geoadmin/urls.py
|
383
|
from django.conf.urls import patterns, include
from django.contrib import admin
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
)
|
dmckinney5/SlackOff
|
refs/heads/master
|
slackoff/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.py
|
395
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Utility functions for copying and archiving files and directory trees.
XXX The functions here don't copy the resource fork or other metadata on Mac.
"""
import os
import sys
import stat
from os.path import abspath
import fnmatch
import collections
import errno
from . import tarfile
try:
import bz2
_BZ2_SUPPORTED = True
except ImportError:
_BZ2_SUPPORTED = False
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"copytree", "move", "rmtree", "Error", "SpecialFileError",
"ExecError", "make_archive", "get_archive_formats",
"register_archive_format", "unregister_archive_format",
"get_unpack_formats", "register_unpack_format",
"unregister_unpack_format", "unpack_archive", "ignore_patterns"]
class Error(EnvironmentError):
pass
class SpecialFileError(EnvironmentError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
class ExecError(EnvironmentError):
"""Raised when a command could not be executed"""
class ReadError(EnvironmentError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
"""Raised when a registry operation with the archiving
and unpacking registries fails"""
try:
WindowsError
except NameError:
WindowsError = None
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path, 'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def copyfile(src, dst):
"""Copy data from src to dst"""
if _samefile(src, dst):
raise Error("`%s` and `%s` are the same file" % (src, dst))
for fn in [src, dst]:
try:
st = os.stat(fn)
except OSError:
# File most likely does not exist
pass
else:
# XXX What about other special files? (sockets, devices...)
if stat.S_ISFIFO(st.st_mode):
raise SpecialFileError("`%s` is a named pipe" % fn)
with open(src, 'rb') as fsrc:
with open(dst, 'wb') as fdst:
copyfileobj(fsrc, fdst)
def copymode(src, dst):
"""Copy mode bits from src to dst"""
if hasattr(os, 'chmod'):
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
os.chmod(dst, mode)
def copystat(src, dst):
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
try:
os.chflags(dst, st.st_flags)
except OSError as why:
if (not hasattr(errno, 'EOPNOTSUPP') or
why.errno != errno.EOPNOTSUPP):
raise
def copy(src, dst):
"""Copy data and mode bits ("cp src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copymode(src, dst)
def copy2(src, dst):
"""Copy data and all stat info ("cp -p src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copystat(src, dst)
def ignore_patterns(*patterns):
"""Function that can be used as copytree() ignore parameter.
Patterns is a sequence of glob-style patterns
that are used to exclude files"""
def _ignore_patterns(path, names):
ignored_names = []
for pattern in patterns:
ignored_names.extend(fnmatch.filter(names, pattern))
return set(ignored_names)
return _ignore_patterns
def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
ignore_dangling_symlinks=False):
"""Recursively copy a directory tree.
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied. If the file pointed by the symlink doesn't
exist, an exception will be added in the list of errors raised in
an Error exception at the end of the copy process.
You can set the optional ignore_dangling_symlinks flag to true if you
want to silence this exception. Notice that this has no effect on
platforms that don't support os.symlink.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
The optional copy_function argument is a callable that will be used
to copy each file. It will be called with the source path and the
destination path as arguments. By default, copy2() is used, but any
function that supports the same signature (like copy()) can be used.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.islink(srcname):
linkto = os.readlink(srcname)
if symlinks:
os.symlink(linkto, dstname)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and ignore_dangling_symlinks:
continue
# otherwise let the copy occurs. copy2 will raise an error
copy_function(srcname, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore, copy_function)
else:
# Will raise a SpecialFileError for unsupported file types
copy_function(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
except EnvironmentError as why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
except OSError as why:
if WindowsError is not None and isinstance(why, WindowsError):
# Copying file access times may fail on Windows
pass
else:
errors.extend((src, dst, str(why)))
if errors:
raise Error(errors)
def rmtree(path, ignore_errors=False, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if onerror
is set, it is called to handle the error with arguments (func,
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
is false and onerror is None, an exception is raised.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
try:
if os.path.islink(path):
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
# can't continue even if onerror hook returns
return
names = []
try:
names = os.listdir(path)
except os.error:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def _basename(path):
# A basename() variant which first strips the trailing slash, if present.
# Thus we always get the last component of the path, even for directories.
return os.path.basename(path.rstrip(os.path.sep))
def move(src, dst):
"""Recursively move a file or directory to another location. This is
similar to the Unix "mv" command.
If the destination is a directory or a symlink to a directory, the source
is moved inside the directory. The destination path must not already
exist.
If the destination already exists but is not a directory, it may be
overwritten depending on os.rename() semantics.
If the destination is on our current filesystem, then rename() is used.
Otherwise, src is copied to the destination and then removed.
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
"""
real_dst = dst
if os.path.isdir(dst):
if _samefile(src, dst):
# We might be on a case insensitive filesystem,
# perform the rename anyway.
os.rename(src, dst)
return
real_dst = os.path.join(dst, _basename(src))
if os.path.exists(real_dst):
raise Error("Destination path '%s' already exists" % real_dst)
try:
os.rename(src, real_dst)
except OSError:
if os.path.isdir(src):
if _destinsrc(src, dst):
raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
copytree(src, real_dst, symlinks=True)
rmtree(src)
else:
copy2(src, real_dst)
os.unlink(src)
def _destinsrc(src, dst):
src = abspath(src)
dst = abspath(dst)
if not src.endswith(os.path.sep):
src += os.path.sep
if not dst.endswith(os.path.sep):
dst += os.path.sep
return dst.startswith(src)
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
owner=None, group=None, logger=None):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
'compress' must be "gzip" (the default), "bzip2", or None.
'owner' and 'group' can be used to define an owner and a group for the
archive that is being built. If not provided, the current owner and group
will be used.
The output tar file will be named 'base_name' + ".tar", possibly plus
the appropriate compression extension (".gz", or ".bz2").
Returns the output filename.
"""
tar_compression = {'gzip': 'gz', None: ''}
compress_ext = {'gzip': '.gz'}
if _BZ2_SUPPORTED:
tar_compression['bzip2'] = 'bz2'
compress_ext['bzip2'] = '.bz2'
# flags for compression program, each element of list will be an argument
if compress is not None and compress not in compress_ext:
raise ValueError("bad value for 'compress', or compression format not "
"supported : {0}".format(compress))
archive_name = base_name + '.tar' + compress_ext.get(compress, '')
archive_dir = os.path.dirname(archive_name)
if not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# creating the tarball
if logger is not None:
logger.info('Creating tar archive')
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
return archive_name
def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
# XXX see if we want to keep an external call here
if verbose:
zipoptions = "-r"
else:
zipoptions = "-rq"
from distutils.errors import DistutilsExecError
from distutils.spawn import spawn
try:
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
except DistutilsExecError:
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise ExecError("unable to create zip file '%s': "
"could neither import the 'zipfile' module nor "
"find a standalone zip utility") % zip_filename
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Uses either the
"zipfile" Python module (if available) or the InfoZIP "zip" utility
(if installed and found on the default search path). If neither tool is
available, raises ExecError. Returns the name of the output zip
file.
"""
zip_filename = base_name + ".zip"
archive_dir = os.path.dirname(base_name)
if not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# If zipfile module is not available, try spawning an external 'zip'
# command.
try:
import zipfile
except ImportError:
zipfile = None
if zipfile is None:
_call_external_zip(base_dir, zip_filename, verbose, dry_run)
else:
if logger is not None:
logger.info("creating '%s' and adding '%s' to it",
zip_filename, base_dir)
if not dry_run:
zip = zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zip.write(path, path)
if logger is not None:
logger.info("adding '%s'", path)
zip.close()
return zip_filename
_ARCHIVE_FORMATS = {
'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
'zip': (_make_zipfile, [], "ZIP file"),
}
if _BZ2_SUPPORTED:
_ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
"bzip2'ed tar-file")
def get_archive_formats():
"""Returns a list of supported formats for archiving and unarchiving.
Each element of the returned sequence is a tuple (name, description)
"""
formats = [(name, registry[2]) for name, registry in
_ARCHIVE_FORMATS.items()]
formats.sort()
return formats
def register_archive_format(name, function, extra_args=None, description=''):
"""Registers an archive format.
name is the name of the format. function is the callable that will be
used to create archives. If provided, extra_args is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_archive_formats() function.
"""
if extra_args is None:
extra_args = []
if not isinstance(function, collections.Callable):
raise TypeError('The %s object is not callable' % function)
if not isinstance(extra_args, (tuple, list)):
raise TypeError('extra_args needs to be a sequence')
for element in extra_args:
if not isinstance(element, (tuple, list)) or len(element) !=2:
raise TypeError('extra_args elements are : (arg_name, value)')
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
def unregister_archive_format(name):
del _ARCHIVE_FORMATS[name]
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None, logger=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one of "zip", "tar", "bztar"
or "gztar".
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
save_cwd = os.getcwd()
if root_dir is not None:
if logger is not None:
logger.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = {'dry_run': dry_run, 'logger': logger}
try:
format_info = _ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError("unknown archive format '%s'" % format)
func = format_info[0]
for arg, val in format_info[1]:
kwargs[arg] = val
if format != 'zip':
kwargs['owner'] = owner
kwargs['group'] = group
try:
filename = func(base_name, base_dir, **kwargs)
finally:
if root_dir is not None:
if logger is not None:
logger.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
def get_unpack_formats():
"""Returns a list of supported formats for unpacking.
Each element of the returned sequence is a tuple
(name, extensions, description)
"""
formats = [(name, info[0], info[3]) for name, info in
_UNPACK_FORMATS.items()]
formats.sort()
return formats
def _check_unpack_options(extensions, function, extra_args):
"""Checks what gets registered as an unpacker."""
# first make sure no other unpacker is registered for this extension
existing_extensions = {}
for name, info in _UNPACK_FORMATS.items():
for ext in info[0]:
existing_extensions[ext] = name
for extension in extensions:
if extension in existing_extensions:
msg = '%s is already registered for "%s"'
raise RegistryError(msg % (extension,
existing_extensions[extension]))
if not isinstance(function, collections.Callable):
raise TypeError('The registered function must be a callable')
def register_unpack_format(name, extensions, function, extra_args=None,
description=''):
"""Registers an unpack format.
`name` is the name of the format. `extensions` is a list of extensions
corresponding to the format.
`function` is the callable that will be
used to unpack archives. The callable will receive archives to unpack.
If it's unable to handle an archive, it needs to raise a ReadError
exception.
If provided, `extra_args` is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_unpack_formats() function.
"""
if extra_args is None:
extra_args = []
_check_unpack_options(extensions, function, extra_args)
_UNPACK_FORMATS[name] = extensions, function, extra_args, description
def unregister_unpack_format(name):
"""Removes the pack format from the registry."""
del _UNPACK_FORMATS[name]
def _ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def _unpack_zipfile(filename, extract_dir):
"""Unpack zip `filename` to `extract_dir`
"""
try:
import zipfile
except ImportError:
raise ReadError('zlib not supported, cannot unpack this archive.')
if not zipfile.is_zipfile(filename):
raise ReadError("%s is not a zip file" % filename)
zip = zipfile.ZipFile(filename)
try:
for info in zip.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name:
continue
target = os.path.join(extract_dir, *name.split('/'))
if not target:
continue
_ensure_directory(target)
if not name.endswith('/'):
# file
data = zip.read(info.filename)
f = open(target, 'wb')
try:
f.write(data)
finally:
f.close()
del data
finally:
zip.close()
def _unpack_tarfile(filename, extract_dir):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise ReadError(
"%s is not a compressed or uncompressed tar file" % filename)
try:
tarobj.extractall(extract_dir)
finally:
tarobj.close()
_UNPACK_FORMATS = {
'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")
}
if _BZ2_SUPPORTED:
_UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
"bzip2'ed tar-file")
def _find_unpack_format(filename):
for name, info in _UNPACK_FORMATS.items():
for extension in info[0]:
if filename.endswith(extension):
return name
return None
def unpack_archive(filename, extract_dir=None, format=None):
"""Unpack an archive.
`filename` is the name of the archive.
`extract_dir` is the name of the target directory, where the archive
is unpacked. If not provided, the current working directory is used.
`format` is the archive format: one of "zip", "tar", or "gztar". Or any
other registered format. If not provided, unpack_archive will use the
filename extension and see if an unpacker was registered for that
extension.
In case none is found, a ValueError is raised.
"""
if extract_dir is None:
extract_dir = os.getcwd()
if format is not None:
try:
format_info = _UNPACK_FORMATS[format]
except KeyError:
raise ValueError("Unknown unpack format '{0}'".format(format))
func = format_info[1]
func(filename, extract_dir, **dict(format_info[2]))
else:
# we need to look at the registered unpackers supported extensions
format = _find_unpack_format(filename)
if format is None:
raise ReadError("Unknown archive format '{0}'".format(filename))
func = _UNPACK_FORMATS[format][1]
kwargs = dict(_UNPACK_FORMATS[format][2])
func(filename, extract_dir, **kwargs)
|
Featuretools/featuretools
|
refs/heads/master
|
featuretools/selection/selection.py
|
1
|
def remove_low_information_features(feature_matrix, features=None):
"""Select features that have at least 2 unique values and that are not all null
Args:
feature_matrix (:class:`pd.DataFrame`): DataFrame whose columns are feature names and rows are instances
features (list[:class:`featuretools.FeatureBase`] or list[str], optional): List of features to select
Returns:
(feature_matrix, features)
"""
keep = [c for c in feature_matrix
if (feature_matrix[c].nunique(dropna=False) > 1 and
feature_matrix[c].dropna().shape[0] > 0)]
feature_matrix = feature_matrix[keep]
if features is not None:
features = [f for f in features
if f.get_name() in feature_matrix.columns]
return feature_matrix, features
return feature_matrix
|
MakeHer/edx-platform
|
refs/heads/dashboard.2
|
lms/djangoapps/debug/views.py
|
119
|
"""Views for debugging and diagnostics"""
import pprint
import traceback
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.contrib.auth.decorators import login_required
from django.utils.html import escape
from django.views.decorators.csrf import ensure_csrf_cookie
from edxmako.shortcuts import render_to_response
from codejail.safe_exec import safe_exec
from mako.exceptions import TopLevelLookupException
@login_required
@ensure_csrf_cookie
def run_python(request):
"""A page to allow testing the Python sandbox on a production server."""
if not request.user.is_staff:
raise Http404
c = {}
c['code'] = ''
c['results'] = None
if request.method == 'POST':
py_code = c['code'] = request.POST.get('code')
g = {}
try:
safe_exec(py_code, g)
except Exception as e:
c['results'] = traceback.format_exc()
else:
c['results'] = pprint.pformat(g)
return render_to_response("debug/run_python_form.html", c)
@login_required
def show_parameters(request):
"""A page that shows what parameters were on the URL and post."""
html = []
for name, value in sorted(request.GET.items()):
html.append(escape("GET {}: {!r}".format(name, value)))
for name, value in sorted(request.POST.items()):
html.append(escape("POST {}: {!r}".format(name, value)))
return HttpResponse("\n".join("<p>{}</p>".format(h) for h in html))
def show_reference_template(request, template):
"""
Shows the specified template as an HTML page. This is used only in debug mode to allow the UX team
to produce and work with static reference templates.
e.g. /template/ux/reference/container.html shows the template under ux/reference/container.html
Note: dynamic parameters can also be passed to the page.
e.g. /template/ux/reference/container.html?name=Foo
"""
try:
return render_to_response(template, request.GET.dict())
except TopLevelLookupException:
return HttpResponseNotFound("Couldn't find template {template}".format(template=template))
|
florianjacob/linkchecker
|
refs/heads/master
|
third_party/dnspython/dns/rdtypes/ANY/SOA.py
|
100
|
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.name
class SOA(dns.rdata.Rdata):
"""SOA record
@ivar mname: the SOA MNAME (master name) field
@type mname: dns.name.Name object
@ivar rname: the SOA RNAME (responsible name) field
@type rname: dns.name.Name object
@ivar serial: The zone's serial number
@type serial: int
@ivar refresh: The zone's refresh value (in seconds)
@type refresh: int
@ivar retry: The zone's retry value (in seconds)
@type retry: int
@ivar expire: The zone's expiration value (in seconds)
@type expire: int
@ivar minimum: The zone's negative caching time (in seconds, called
"minimum" for historical reasons)
@type minimum: int
@see: RFC 1035"""
__slots__ = ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire',
'minimum']
def __init__(self, rdclass, rdtype, mname, rname, serial, refresh, retry,
expire, minimum):
super(SOA, self).__init__(rdclass, rdtype)
self.mname = mname
self.rname = rname
self.serial = serial
self.refresh = refresh
self.retry = retry
self.expire = expire
self.minimum = minimum
def to_text(self, origin=None, relativize=True, **kw):
mname = self.mname.choose_relativity(origin, relativize)
rname = self.rname.choose_relativity(origin, relativize)
return '%s %s %d %d %d %d %d' % (
mname, rname, self.serial, self.refresh, self.retry,
self.expire, self.minimum )
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
mname = tok.get_name()
rname = tok.get_name()
mname = mname.choose_relativity(origin, relativize)
rname = rname.choose_relativity(origin, relativize)
serial = tok.get_uint32()
refresh = tok.get_ttl()
retry = tok.get_ttl()
expire = tok.get_ttl()
minimum = tok.get_ttl()
tok.get_eol()
return cls(rdclass, rdtype, mname, rname, serial, refresh, retry,
expire, minimum )
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
self.mname.to_wire(file, compress, origin)
self.rname.to_wire(file, compress, origin)
five_ints = struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
file.write(five_ints)
def to_digestable(self, origin = None):
return self.mname.to_digestable(origin) + \
self.rname.to_digestable(origin) + \
struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
(mname, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
(rname, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
if rdlen != 20:
raise dns.exception.FormError
five_ints = struct.unpack('!IIIII',
wire[current : current + rdlen])
if not origin is None:
mname = mname.relativize(origin)
rname = rname.relativize(origin)
return cls(rdclass, rdtype, mname, rname,
five_ints[0], five_ints[1], five_ints[2], five_ints[3],
five_ints[4])
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
self.mname = self.mname.choose_relativity(origin, relativize)
self.rname = self.rname.choose_relativity(origin, relativize)
def _cmp(self, other):
v = cmp(self.mname, other.mname)
if v == 0:
v = cmp(self.rname, other.rname)
if v == 0:
self_ints = struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
other_ints = struct.pack('!IIIII', other.serial, other.refresh,
other.retry, other.expire,
other.minimum)
v = cmp(self_ints, other_ints)
return v
|
potash/scikit-learn
|
refs/heads/master
|
benchmarks/bench_rcv1_logreg_convergence.py
|
149
|
# Authors: Tom Dupre la Tour <tom.dupre-la-tour@m4x.org>
# Olivier Grisel <olivier.grisel@ensta.org>
#
# License: BSD 3 clause
import matplotlib.pyplot as plt
import numpy as np
import gc
import time
from sklearn.externals.joblib import Memory
from sklearn.linear_model import (LogisticRegression, SGDClassifier)
from sklearn.datasets import fetch_rcv1
from sklearn.linear_model.sag import get_auto_step_size
from sklearn.linear_model.sag_fast import get_max_squared_sum
try:
import lightning.classification as lightning_clf
except ImportError:
lightning_clf = None
m = Memory(cachedir='.', verbose=0)
# compute logistic loss
def get_loss(w, intercept, myX, myy, C):
n_samples = myX.shape[0]
w = w.ravel()
p = np.mean(np.log(1. + np.exp(-myy * (myX.dot(w) + intercept))))
print("%f + %f" % (p, w.dot(w) / 2. / C / n_samples))
p += w.dot(w) / 2. / C / n_samples
return p
# We use joblib to cache individual fits. Note that we do not pass the dataset
# as argument as the hashing would be too slow, so we assume that the dataset
# never changes.
@m.cache()
def bench_one(name, clf_type, clf_params, n_iter):
clf = clf_type(**clf_params)
try:
clf.set_params(max_iter=n_iter, random_state=42)
except:
clf.set_params(n_iter=n_iter, random_state=42)
st = time.time()
clf.fit(X, y)
end = time.time()
try:
C = 1.0 / clf.alpha / n_samples
except:
C = clf.C
try:
intercept = clf.intercept_
except:
intercept = 0.
train_loss = get_loss(clf.coef_, intercept, X, y, C)
train_score = clf.score(X, y)
test_score = clf.score(X_test, y_test)
duration = end - st
return train_loss, train_score, test_score, duration
def bench(clfs):
for (name, clf, iter_range, train_losses, train_scores,
test_scores, durations) in clfs:
print("training %s" % name)
clf_type = type(clf)
clf_params = clf.get_params()
for n_iter in iter_range:
gc.collect()
train_loss, train_score, test_score, duration = bench_one(
name, clf_type, clf_params, n_iter)
train_losses.append(train_loss)
train_scores.append(train_score)
test_scores.append(test_score)
durations.append(duration)
print("classifier: %s" % name)
print("train_loss: %.8f" % train_loss)
print("train_score: %.8f" % train_score)
print("test_score: %.8f" % test_score)
print("time for fit: %.8f seconds" % duration)
print("")
print("")
return clfs
def plot_train_losses(clfs):
plt.figure()
for (name, _, _, train_losses, _, _, durations) in clfs:
plt.plot(durations, train_losses, '-o', label=name)
plt.legend(loc=0)
plt.xlabel("seconds")
plt.ylabel("train loss")
def plot_train_scores(clfs):
plt.figure()
for (name, _, _, _, train_scores, _, durations) in clfs:
plt.plot(durations, train_scores, '-o', label=name)
plt.legend(loc=0)
plt.xlabel("seconds")
plt.ylabel("train score")
plt.ylim((0.92, 0.96))
def plot_test_scores(clfs):
plt.figure()
for (name, _, _, _, _, test_scores, durations) in clfs:
plt.plot(durations, test_scores, '-o', label=name)
plt.legend(loc=0)
plt.xlabel("seconds")
plt.ylabel("test score")
plt.ylim((0.92, 0.96))
def plot_dloss(clfs):
plt.figure()
pobj_final = []
for (name, _, _, train_losses, _, _, durations) in clfs:
pobj_final.append(train_losses[-1])
indices = np.argsort(pobj_final)
pobj_best = pobj_final[indices[0]]
for (name, _, _, train_losses, _, _, durations) in clfs:
log_pobj = np.log(abs(np.array(train_losses) - pobj_best)) / np.log(10)
plt.plot(durations, log_pobj, '-o', label=name)
plt.legend(loc=0)
plt.xlabel("seconds")
plt.ylabel("log(best - train_loss)")
rcv1 = fetch_rcv1()
X = rcv1.data
n_samples, n_features = X.shape
# consider the binary classification problem 'CCAT' vs the rest
ccat_idx = rcv1.target_names.tolist().index('CCAT')
y = rcv1.target.tocsc()[:, ccat_idx].toarray().ravel().astype(np.float64)
y[y == 0] = -1
# parameters
C = 1.
fit_intercept = True
tol = 1.0e-14
# max_iter range
sgd_iter_range = list(range(1, 121, 10))
newton_iter_range = list(range(1, 25, 3))
lbfgs_iter_range = list(range(1, 242, 12))
liblinear_iter_range = list(range(1, 37, 3))
liblinear_dual_iter_range = list(range(1, 85, 6))
sag_iter_range = list(range(1, 37, 3))
clfs = [
("LR-liblinear",
LogisticRegression(C=C, tol=tol,
solver="liblinear", fit_intercept=fit_intercept,
intercept_scaling=1),
liblinear_iter_range, [], [], [], []),
("LR-liblinear-dual",
LogisticRegression(C=C, tol=tol, dual=True,
solver="liblinear", fit_intercept=fit_intercept,
intercept_scaling=1),
liblinear_dual_iter_range, [], [], [], []),
("LR-SAG",
LogisticRegression(C=C, tol=tol,
solver="sag", fit_intercept=fit_intercept),
sag_iter_range, [], [], [], []),
("LR-newton-cg",
LogisticRegression(C=C, tol=tol, solver="newton-cg",
fit_intercept=fit_intercept),
newton_iter_range, [], [], [], []),
("LR-lbfgs",
LogisticRegression(C=C, tol=tol,
solver="lbfgs", fit_intercept=fit_intercept),
lbfgs_iter_range, [], [], [], []),
("SGD",
SGDClassifier(alpha=1.0 / C / n_samples, penalty='l2', loss='log',
fit_intercept=fit_intercept, verbose=0),
sgd_iter_range, [], [], [], [])]
if lightning_clf is not None and not fit_intercept:
alpha = 1. / C / n_samples
# compute the same step_size than in LR-sag
max_squared_sum = get_max_squared_sum(X)
step_size = get_auto_step_size(max_squared_sum, alpha, "log",
fit_intercept)
clfs.append(
("Lightning-SVRG",
lightning_clf.SVRGClassifier(alpha=alpha, eta=step_size,
tol=tol, loss="log"),
sag_iter_range, [], [], [], []))
clfs.append(
("Lightning-SAG",
lightning_clf.SAGClassifier(alpha=alpha, eta=step_size,
tol=tol, loss="log"),
sag_iter_range, [], [], [], []))
# We keep only 200 features, to have a dense dataset,
# and compare to lightning SAG, which seems incorrect in the sparse case.
X_csc = X.tocsc()
nnz_in_each_features = X_csc.indptr[1:] - X_csc.indptr[:-1]
X = X_csc[:, np.argsort(nnz_in_each_features)[-200:]]
X = X.toarray()
print("dataset: %.3f MB" % (X.nbytes / 1e6))
# Split training and testing. Switch train and test subset compared to
# LYRL2004 split, to have a larger training dataset.
n = 23149
X_test = X[:n, :]
y_test = y[:n]
X = X[n:, :]
y = y[n:]
clfs = bench(clfs)
plot_train_scores(clfs)
plot_test_scores(clfs)
plot_train_losses(clfs)
plot_dloss(clfs)
plt.show()
|
zzzombat/lucid-python-django
|
refs/heads/master
|
django/forms/util.py
|
311
|
from django.utils.html import conditional_escape
from django.utils.encoding import StrAndUnicode, force_unicode
from django.utils.safestring import mark_safe
# Import ValidationError so that it can be imported from this
# module to maintain backwards compatibility.
from django.core.exceptions import ValidationError
def flatatt(attrs):
"""
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. It is assumed that the keys do not need to be XML-escaped.
If the passed dictionary is empty, then return an empty string.
"""
return u''.join([u' %s="%s"' % (k, conditional_escape(v)) for k, v in attrs.items()])
class ErrorDict(dict, StrAndUnicode):
"""
A collection of errors that knows how to display itself in various formats.
The dictionary keys are the field names, and the values are the errors.
"""
def __unicode__(self):
return self.as_ul()
def as_ul(self):
if not self: return u''
return mark_safe(u'<ul class="errorlist">%s</ul>'
% ''.join([u'<li>%s%s</li>' % (k, force_unicode(v))
for k, v in self.items()]))
def as_text(self):
return u'\n'.join([u'* %s\n%s' % (k, u'\n'.join([u' * %s' % force_unicode(i) for i in v])) for k, v in self.items()])
class ErrorList(list, StrAndUnicode):
"""
A collection of errors that knows how to display itself in various formats.
"""
def __unicode__(self):
return self.as_ul()
def as_ul(self):
if not self: return u''
return mark_safe(u'<ul class="errorlist">%s</ul>'
% ''.join([u'<li>%s</li>' % conditional_escape(force_unicode(e)) for e in self]))
def as_text(self):
if not self: return u''
return u'\n'.join([u'* %s' % force_unicode(e) for e in self])
def __repr__(self):
return repr([force_unicode(e) for e in self])
|
jimcunderwood/MissionPlanner
|
refs/heads/master
|
Lib/encodings/cp861.py
|
93
|
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP861.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp861',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00d0, # LATIN CAPITAL LETTER ETH
0x008c: 0x00f0, # LATIN SMALL LETTER ETH
0x008d: 0x00de, # LATIN CAPITAL LETTER THORN
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00fe, # LATIN SMALL LETTER THORN
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x0098: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00a5: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00a6: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00a7: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x2310, # REVERSED NOT SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
u'\xd0' # 0x008b -> LATIN CAPITAL LETTER ETH
u'\xf0' # 0x008c -> LATIN SMALL LETTER ETH
u'\xde' # 0x008d -> LATIN CAPITAL LETTER THORN
u'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
u'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xfe' # 0x0095 -> LATIN SMALL LETTER THORN
u'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xdd' # 0x0097 -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xfd' # 0x0098 -> LATIN SMALL LETTER Y WITH ACUTE
u'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
u'\xa3' # 0x009c -> POUND SIGN
u'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
u'\u20a7' # 0x009e -> PESETA SIGN
u'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\xc1' # 0x00a4 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xcd' # 0x00a5 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xd3' # 0x00a6 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xda' # 0x00a7 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
u'\u2310' # 0x00a9 -> REVERSED NOT SIGN
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
u'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
u'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
u'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
u'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
u'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
u'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
u'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
u'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
u'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
u'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
u'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
u'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
u'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
u'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u258c' # 0x00dd -> LEFT HALF BLOCK
u'\u2590' # 0x00de -> RIGHT HALF BLOCK
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
u'\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA
u'\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI
u'\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA
u'\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU
u'\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI
u'\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA
u'\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA
u'\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA
u'\u221e' # 0x00ec -> INFINITY
u'\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI
u'\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON
u'\u2229' # 0x00ef -> INTERSECTION
u'\u2261' # 0x00f0 -> IDENTICAL TO
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO
u'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO
u'\u2320' # 0x00f4 -> TOP HALF INTEGRAL
u'\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\u2248' # 0x00f7 -> ALMOST EQUAL TO
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\u2219' # 0x00f9 -> BULLET OPERATOR
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\u221a' # 0x00fb -> SQUARE ROOT
u'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a3: 0x009c, # POUND SIGN
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b5: 0x00e6, # MICRO SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c1: 0x00a4, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00cd: 0x00a5, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d0: 0x008b, # LATIN CAPITAL LETTER ETH
0x00d3: 0x00a6, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00da: 0x00a7, # LATIN CAPITAL LETTER U WITH ACUTE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x0097, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00de: 0x008d, # LATIN CAPITAL LETTER THORN
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00f0: 0x008c, # LATIN SMALL LETTER ETH
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x0098, # LATIN SMALL LETTER Y WITH ACUTE
0x00fe: 0x0095, # LATIN SMALL LETTER THORN
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA
0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA
0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA
0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI
0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA
0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA
0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA
0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON
0x03c0: 0x00e3, # GREEK SMALL LETTER PI
0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA
0x03c4: 0x00e7, # GREEK SMALL LETTER TAU
0x03c6: 0x00ed, # GREEK SMALL LETTER PHI
0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N
0x20a7: 0x009e, # PESETA SIGN
0x2219: 0x00f9, # BULLET OPERATOR
0x221a: 0x00fb, # SQUARE ROOT
0x221e: 0x00ec, # INFINITY
0x2229: 0x00ef, # INTERSECTION
0x2248: 0x00f7, # ALMOST EQUAL TO
0x2261: 0x00f0, # IDENTICAL TO
0x2264: 0x00f3, # LESS-THAN OR EQUAL TO
0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO
0x2310: 0x00a9, # REVERSED NOT SIGN
0x2320: 0x00f4, # TOP HALF INTEGRAL
0x2321: 0x00f5, # BOTTOM HALF INTEGRAL
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
|
Schrolli91/BOSWatch
|
refs/heads/master
|
plugins/BosMon/BosMon.py
|
1
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
BOSWatch-Plugin to dispatch FMS-, ZVEI- and POCSAG - messages to BosMon
-U {The BosMon hompage<http://www.bosmon.de>}
@author: Jens Herrmann
@requires: BosMon-Configuration has to be set in the config.ini
"""
import logging # Global logger
import httplib #for the HTTP request
import urllib #for the HTTP request with parameters
import base64 #for the HTTP request with User/Password
from includes import globalVars # Global variables
from includes.helper import configHandler
##
#
# onLoad (init) function of plugin
# will be called one time by the pluginLoader on start
#
def onLoad():
"""
While loading the plugins by pluginLoader.loadPlugins()
this onLoad() routine is called one time for initialize the plugin
@requires: nothing
@return: nothing
"""
# nothing to do for this plugin
return
##
#
# do BosMon-Request
#
def bosMonRequest(httprequest, params, headers):
"""
This local function dispatch the BosMon-Request
@type httprequest: HTTPConnection
@param httprequest: An HTTPConnection-Object that represents an open connection to a BosMon-Instance
@type params: string of urlencoded data
@param params: Contains the parameter for transfer to BosMon.
@type headers: map
@param headers: The headers argument should be a mapping of extra HTTP headers to send with the request.
@return: nothing
@exception: Exception if HTTP-Request failed
"""
try:
#
# BosMon/HTTP-Request
#
httprequest.request("POST", "/telegramin/"+globalVars.config.get("BosMon", "bosmon_channel")+"/input.xml", params, headers)
except:
logging.error("request to BosMon failed")
logging.debug("request to BosMon failed", exc_info=True)
raise
else:
#
# check HTTP-Response
#
httpresponse = httprequest.getresponse()
if str(httpresponse.status) == "200": #Check HTTP Response an print a Log or Error
logging.debug("BosMon response: %s - %s", str(httpresponse.status), str(httpresponse.reason))
else:
logging.warning("BosMon response: %s - %s", str(httpresponse.status), str(httpresponse.reason))
##
#
# Main function of BosMon-plugin
# will be called by the alarmHandler
#
def run(typ,freq,data):
"""
This function is the implementation of the BosMon-Plugin.
It will send the data to an BosMon-Instance via http
The configuration for the BosMon-Connection is set in the config.ini.
If an user is set, the HTTP-Request is authenticatet.
@type typ: string (FMS|ZVEI|POC)
@param typ: Typ of the dataset for sending to BosMon
@type data: map of data (structure see readme.md in plugin folder)
@param data: Contains the parameter for dispatch to BosMon.
@type freq: string
@keyword freq: frequency is not used in this plugin
@requires: BosMon-Configuration has to be set in the config.ini
@return: nothing
"""
try:
if configHandler.checkConfig("BosMon"): #read and debug the config
try:
#
# Initialize header an connect to BosMon-Server
#
headers = {}
headers['Content-type'] = "application/x-www-form-urlencoded"
headers['Accept'] = "text/plain"
# if an user is set in the config.ini we will use HTTP-Authorization
if globalVars.config.get("BosMon", "bosmon_user"):
# generate b64encoded autorization-token for HTTP-request
headers['Authorization'] = "Basic {0}".format(base64.b64encode("{0}:{1}".format(globalVars.config.get("BosMon", "bosmon_user"), globalVars.config.get("BosMon", "bosmon_password"))))
logging.debug("connect to BosMon")
# open connection to BosMon-Server
httprequest = httplib.HTTPConnection(globalVars.config.get("BosMon", "bosmon_server"), globalVars.config.get("BosMon", "bosmon_port"), timeout=5)
# debug-level to shell (0=no debug|1)
httprequest.set_debuglevel(0)
except:
logging.error("cannot connect to BosMon")
logging.debug("cannot connect to BosMon", exc_info=True)
# Without connection, plugin couldn't work
return
else:
#
# Format given data-structure to compatible BosMon string
#
if typ == "FMS":
logging.debug("Start FMS to BosMon")
try:
# BosMon-Telegramin expected assembly group, direction and tsi in one field
# structure (binary as hex in base10):
# Byte 1: assembly group; Byte 2: Direction; Byte 3+4: tactic short info
info = 0
# assembly group:
info = info + 1 # + b0001 (Assumption: is in every time 1 (no output from multimon-ng))
# direction:
if data["direction"] == "1":
info = info + 2 # + b0010
# tsi:
if "IV" in data["tsi"]:
info = info + 12 # + b1100
elif "III" in data["tsi"]:
info = info + 8 # + b1000
elif "II" in data["tsi"]:
info = info + 4 # + b0100
# "I" is nothing to do + b0000
params = urllib.urlencode({'type':'fms', 'address':data["fms"], 'status':data["status"], 'info':info, 'flags':'0'})
logging.debug(" - Params: %s", params)
# dispatch the BosMon-request
bosMonRequest(httprequest, params, headers)
except:
logging.error("FMS to BosMon failed")
logging.debug("FMS to BosMon failed", exc_info=True)
return
elif typ == "ZVEI":
logging.debug("Start ZVEI to BosMon")
try:
params = urllib.urlencode({'type':'zvei', 'address':data["zvei"], 'flags':'0'})
logging.debug(" - Params: %s", params)
# dispatch the BosMon-request
bosMonRequest(httprequest, params, headers)
except:
logging.error("ZVEI to BosMon failed")
logging.debug("ZVEI to BosMon failed", exc_info=True)
return
elif typ == "POC":
logging.debug("Start POC to BosMon")
try:
# BosMon-Telegramin expected "a-d" as RIC-sub/function
params = urllib.urlencode({'type':'pocsag', 'address':data["ric"], 'flags':'0', 'function':data["functionChar"], 'message':data["msg"]})
logging.debug(" - Params: %s", params)
# dispatch the BosMon-request
bosMonRequest(httprequest, params, headers)
except:
logging.error("POC to BosMon failed")
logging.debug("POC to BosMon failed", exc_info=True)
return
else:
logging.warning("Invalid Typ: %s", typ)
finally:
logging.debug("close BosMon-Connection")
try:
httprequest.close()
except:
pass
except:
# something very mysterious
logging.error("unknown error")
logging.debug("unknown error", exc_info=True)
|
sunils34/buffer-django-nonrel
|
refs/heads/master
|
tests/modeltests/signals/models.py
|
87
|
"""
Testing signals before/after saving and deleting.
"""
from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Car(models.Model):
make = models.CharField(max_length=20)
model = models.CharField(max_length=20)
def __unicode__(self):
return u"%s %s" % (self.make, self.model)
|
xtiankisutsa/MARA_Framework
|
refs/heads/master
|
tools/qark/qark/lib/html5lib/serializer/__init__.py
|
1731
|
from __future__ import absolute_import, division, unicode_literals
from .. import treewalkers
from .htmlserializer import HTMLSerializer
def serialize(input, tree="etree", format="html", encoding=None,
**serializer_opts):
# XXX: Should we cache this?
walker = treewalkers.getTreeWalker(tree)
if format == "html":
s = HTMLSerializer(**serializer_opts)
else:
raise ValueError("type must be html")
return s.render(walker(input), encoding)
|
tylertian/Openstack
|
refs/heads/master
|
openstack F/keystone/keystone/contrib/stats/backends/__init__.py
|
12133432
| |
caseyrollins/osf.io
|
refs/heads/develop
|
addons/dropbox/migrations/__init__.py
|
12133432
| |
indictranstech/omnitech-frappe
|
refs/heads/master
|
frappe/templates/includes/list/__init__.py
|
12133432
| |
woylaski/notebook
|
refs/heads/master
|
graphic/kivy-master/kivy/tests/test_properties.py
|
12
|
'''
Test properties attached to a widget
'''
import unittest
from kivy.event import EventDispatcher
from functools import partial
class TestProperty(EventDispatcher):
pass
wid = TestProperty()
class PropertiesTestCase(unittest.TestCase):
def test_base(self):
from kivy.properties import Property
a = Property(-1)
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), -1)
a.set(wid, 0)
self.assertEqual(a.get(wid), 0)
a.set(wid, 1)
self.assertEqual(a.get(wid), 1)
def test_observer(self):
from kivy.properties import Property
a = Property(-1)
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), -1)
global observe_called
observe_called = 0
def observe(obj, value):
global observe_called
observe_called = 1
a.bind(wid, observe)
a.set(wid, 0)
self.assertEqual(a.get(wid), 0)
self.assertEqual(observe_called, 1)
observe_called = 0
a.set(wid, 0)
self.assertEqual(a.get(wid), 0)
self.assertEqual(observe_called, 0)
a.set(wid, 1)
self.assertEqual(a.get(wid), 1)
self.assertEqual(observe_called, 1)
def test_objectcheck(self):
from kivy.properties import ObjectProperty
a = ObjectProperty(False)
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), False)
a.set(wid, True)
self.assertEqual(a.get(wid), True)
def test_stringcheck(self):
from kivy.properties import StringProperty
a = StringProperty()
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), '')
a.set(wid, 'hello')
self.assertEqual(a.get(wid), 'hello')
try:
a.set(wid, 88) # number shouldn't be accepted
self.fail('string accept number, fail.')
except ValueError:
pass
def test_numericcheck(self):
from kivy.properties import NumericProperty
a = NumericProperty()
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), 0)
a.set(wid, 99)
self.assertEqual(a.get(wid), 99)
#try:
# a.set(wid, '') # string shouldn't be accepted
# self.fail('number accept string, fail.')
#except ValueError:
# pass
def test_listcheck(self):
from kivy.properties import ListProperty
a = ListProperty()
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), [])
a.set(wid, [1, 2, 3])
self.assertEqual(a.get(wid), [1, 2, 3])
def test_dictcheck(self):
from kivy.properties import DictProperty
a = DictProperty()
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), {})
a.set(wid, {'foo': 'bar'})
self.assertEqual(a.get(wid), {'foo': 'bar'})
def test_propertynone(self):
from kivy.properties import NumericProperty
a = NumericProperty(0, allownone=True)
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), 0)
try:
a.set(wid, None)
self.assertEqual(a.get(wid), None)
except ValueError:
pass
a.set(wid, 1)
self.assertEqual(a.get(wid), 1)
def test_alias(self):
from kivy.properties import NumericProperty, AliasProperty
wid.__class__.x = x = NumericProperty(0)
x.link(wid, 'x')
x.link_deps(wid, 'x')
wid.__class__.width = width = NumericProperty(100)
width.link(wid, 'width')
width.link_deps(wid, 'width')
def get_right(self):
return x.get(self) + width.get(self)
def set_right(self, value):
x.set(self, value - width.get(self))
right = AliasProperty(get_right, set_right, bind=('x', 'width'))
right.link(wid, 'right')
right.link_deps(wid, 'right')
self.assertEqual(right.get(wid), 100)
x.set(wid, 500)
self.assertEqual(right.get(wid), 600)
width.set(wid, 50)
self.assertEqual(right.get(wid), 550)
right.set(wid, 100)
self.assertEqual(width.get(wid), 50)
self.assertEqual(x.get(wid), 50)
# test observer
global observe_called
observe_called = 0
def observe(obj, value):
global observe_called
observe_called = 1
right.bind(wid, observe)
x.set(wid, 100)
self.assertEqual(observe_called, 1)
observe_called = 0
x.set(wid, 100)
self.assertEqual(observe_called, 0)
width.set(wid, 900)
self.assertEqual(observe_called, 1)
observe_called = 0
right.set(wid, 700)
self.assertEqual(observe_called, 1)
observe_called = 0
right.set(wid, 700)
self.assertEqual(observe_called, 0)
def test_reference(self):
from kivy.properties import NumericProperty, ReferenceListProperty
x = NumericProperty(0)
x.link(wid, 'x')
x.link_deps(wid, 'x')
y = NumericProperty(0)
y.link(wid, 'y')
y.link_deps(wid, 'y')
pos = ReferenceListProperty(x, y)
pos.link(wid, 'pos')
pos.link_deps(wid, 'pos')
self.assertEqual(x.get(wid), 0)
self.assertEqual(y.get(wid), 0)
self.assertEqual(pos.get(wid), [0, 0])
x.set(wid, 50)
self.assertEqual(pos.get(wid), [50, 0])
y.set(wid, 50)
self.assertEqual(pos.get(wid), [50, 50])
pos.set(wid, [0, 0])
self.assertEqual(pos.get(wid), [0, 0])
self.assertEqual(x.get(wid), 0)
self.assertEqual(y.get(wid), 0)
# test observer
global observe_called
observe_called = 0
def observe(obj, value):
global observe_called
observe_called = 1
pos.bind(wid, observe)
self.assertEqual(observe_called, 0)
x.set(wid, 99)
self.assertEqual(observe_called, 1)
def test_reference_child_update(self):
from kivy.properties import NumericProperty, ReferenceListProperty
x = NumericProperty(0)
x.link(wid, 'x')
x.link_deps(wid, 'x')
y = NumericProperty(0)
y.link(wid, 'y')
y.link_deps(wid, 'y')
pos = ReferenceListProperty(x, y)
pos.link(wid, 'pos')
pos.link_deps(wid, 'pos')
pos.get(wid)[0] = 10
self.assertEqual(pos.get(wid), [10, 0])
pos.get(wid)[:] = (20, 30)
self.assertEqual(pos.get(wid), [20, 30])
def test_dict(self):
from kivy.properties import DictProperty
x = DictProperty()
x.link(wid, 'x')
x.link_deps(wid, 'x')
# test observer
global observe_called
observe_called = 0
def observe(obj, value):
global observe_called
observe_called = 1
x.bind(wid, observe)
observe_called = 0
x.get(wid)['toto'] = 1
self.assertEqual(observe_called, 1)
observe_called = 0
x.get(wid)['toto'] = 2
self.assertEqual(observe_called, 1)
observe_called = 0
x.get(wid)['youupi'] = 2
self.assertEqual(observe_called, 1)
observe_called = 0
del x.get(wid)['toto']
self.assertEqual(observe_called, 1)
observe_called = 0
x.get(wid).update({'bleh': 5})
self.assertEqual(observe_called, 1)
def test_aliasproperty_with_cache(self):
from kivy.properties import NumericProperty, AliasProperty
global observe_called
observe_called = 0
class CustomAlias(EventDispatcher):
basevalue = NumericProperty(1)
def _get_prop(self):
global observe_called
observe_called += 1
return self.basevalue * 2
def _set_prop(self, value):
self.basevalue = value / 2
prop = AliasProperty(_get_prop, _set_prop,
bind=('basevalue', ), cache=True)
# initial checks
wid = CustomAlias()
self.assertEqual(observe_called, 0)
self.assertEqual(wid.basevalue, 1)
self.assertEqual(observe_called, 0)
# first call, goes in cache
self.assertEqual(wid.prop, 2)
self.assertEqual(observe_called, 1)
# second call, cache used
self.assertEqual(wid.prop, 2)
self.assertEqual(observe_called, 1)
# change the base value, should trigger an update for the cache
wid.basevalue = 4
self.assertEqual(observe_called, 2)
# now read the value again, should use the cache too
self.assertEqual(wid.prop, 8)
self.assertEqual(observe_called, 2)
# change the prop itself, should trigger an update for the cache
wid.prop = 4
self.assertEqual(observe_called, 3)
self.assertEqual(wid.basevalue, 2)
self.assertEqual(wid.prop, 4)
self.assertEqual(observe_called, 3)
def test_bounded_numeric_property(self):
from kivy.properties import BoundedNumericProperty
bnp = BoundedNumericProperty(0.0, min=0.0, max=3.5)
bnp.link(wid, 'bnp')
bnp.set(wid, 1)
bnp.set(wid, 0.0)
bnp.set(wid, 3.1)
bnp.set(wid, 3.5)
self.assertRaises(ValueError, partial(bnp.set, wid, 3.6))
self.assertRaises(ValueError, partial(bnp.set, wid, -3))
def test_bounded_numeric_property_error_value(self):
from kivy.properties import BoundedNumericProperty
bnp = BoundedNumericProperty(0, min=-5, max=5, errorvalue=1)
bnp.link(wid, 'bnp')
bnp.set(wid, 1)
self.assertEqual(bnp.get(wid), 1)
bnp.set(wid, 5)
self.assertEqual(bnp.get(wid), 5)
bnp.set(wid, 6)
self.assertEqual(bnp.get(wid), 1)
bnp.set(wid, -5)
self.assertEqual(bnp.get(wid), -5)
bnp.set(wid, -6)
self.assertEqual(bnp.get(wid), 1)
def test_bounded_numeric_property_error_handler(self):
from kivy.properties import BoundedNumericProperty
bnp = BoundedNumericProperty(
0, min=-5, max=5,
errorhandler=lambda x: 5 if x > 5 else -5)
bnp.link(wid, 'bnp')
bnp.set(wid, 1)
self.assertEqual(bnp.get(wid), 1)
bnp.set(wid, 5)
self.assertEqual(bnp.get(wid), 5)
bnp.set(wid, 10)
self.assertEqual(bnp.get(wid), 5)
bnp.set(wid, -5)
self.assertEqual(bnp.get(wid), -5)
bnp.set(wid, -10)
self.assertEqual(bnp.get(wid), -5)
def test_numeric_string_with_units_check(self):
from kivy.properties import NumericProperty
a = NumericProperty()
a.link(wid, 'a')
a.link_deps(wid, 'a')
self.assertEqual(a.get(wid), 0)
a.set(wid, '55dp')
self.assertEqual(a.get(wid), 55)
self.assertEqual(a.get_format(wid), 'dp')
a.set(wid, u'55dp')
self.assertEqual(a.get(wid), 55)
self.assertEqual(a.get_format(wid), 'dp')
a.set(wid, '99in')
self.assertEqual(a.get(wid), 9504.0)
self.assertEqual(a.get_format(wid), 'in')
a.set(wid, u'99in')
self.assertEqual(a.get(wid), 9504.0)
self.assertEqual(a.get_format(wid), 'in')
def test_property_rebind(self):
from kivy.uix.label import Label
from kivy.uix.togglebutton import ToggleButton
from kivy.lang import Builder
from kivy.properties import ObjectProperty, DictProperty, AliasProperty
from kivy.clock import Clock
class ObjWidget(Label):
button = ObjectProperty(None, rebind=True, allownone=True)
class ObjWidgetRebindFalse(Label):
button = ObjectProperty(None, rebind=False, allownone=True)
class DictWidget(Label):
button = DictProperty({'button': None}, rebind=True,
allownone=True)
class DictWidgetFalse(Label):
button = DictProperty({'button': None}, rebind=False)
class AliasWidget(Label):
_button = None
def setter(self, value):
self._button = value
return True
def getter(self):
return self._button
button = AliasProperty(getter, setter, rebind=True)
Builder.load_string('''
<ObjWidget>:
text: self.button.state if self.button is not None else 'Unset'
<ObjWidgetRebindFalse>:
text: self.button.state if self.button is not None else 'Unset'
<AliasWidget>:
text: self.button.state if self.button is not None else 'Unset'
<DictWidget>:
text: self.button.button.state if self.button.button is not None\
else 'Unset'
<DictWidgetFalse>:
text: self.button.button.state if self.button.button is not None\
else 'Unset'
''')
obj = ObjWidget()
obj_false = ObjWidgetRebindFalse()
dict_rebind = DictWidget()
dict_false = DictWidgetFalse()
alias_rebind = AliasWidget()
button = ToggleButton()
Clock.tick()
self.assertEqual(obj.text, 'Unset')
self.assertEqual(obj_false.text, 'Unset')
self.assertEqual(dict_rebind.text, 'Unset')
self.assertEqual(dict_false.text, 'Unset')
self.assertEqual(alias_rebind.text, 'Unset')
obj.button = button
obj_false.button = button
dict_rebind.button.button = button
dict_false.button.button = button
alias_rebind.button = button
Clock.tick()
self.assertEqual(obj.text, 'normal')
self.assertEqual(obj_false.text, 'normal')
self.assertEqual(dict_rebind.text, 'normal')
self.assertEqual(dict_false.text, 'Unset')
self.assertEqual(alias_rebind.text, 'normal')
button.state = 'down'
Clock.tick()
self.assertEqual(obj.text, 'down')
self.assertEqual(obj_false.text, 'normal')
self.assertEqual(dict_rebind.text, 'down')
self.assertEqual(dict_false.text, 'Unset')
self.assertEqual(alias_rebind.text, 'down')
button.state = 'normal'
Clock.tick()
self.assertEqual(obj.text, 'normal')
self.assertEqual(obj_false.text, 'normal')
self.assertEqual(dict_rebind.text, 'normal')
self.assertEqual(dict_false.text, 'Unset')
self.assertEqual(alias_rebind.text, 'normal')
obj.button = None
obj_false.button = None
dict_rebind.button.button = None
dict_false.button.button = None
alias_rebind.button = None
Clock.tick()
self.assertEqual(obj.text, 'Unset')
self.assertEqual(obj_false.text, 'Unset')
self.assertEqual(dict_rebind.text, 'Unset')
self.assertEqual(dict_false.text, 'Unset')
self.assertEqual(alias_rebind.text, 'Unset')
|
EKiefer/edge-starter
|
refs/heads/master
|
py34env/Lib/site-packages/django/contrib/auth/middleware.py
|
258
|
from django.contrib import auth
from django.contrib.auth import load_backend
from django.contrib.auth.backends import RemoteUserBackend
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import SimpleLazyObject
def get_user(request):
if not hasattr(request, '_cached_user'):
request._cached_user = auth.get_user(request)
return request._cached_user
class AuthenticationMiddleware(object):
def process_request(self, request):
assert hasattr(request, 'session'), (
"The Django authentication middleware requires session middleware "
"to be installed. Edit your MIDDLEWARE_CLASSES setting to insert "
"'django.contrib.sessions.middleware.SessionMiddleware' before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
)
request.user = SimpleLazyObject(lambda: get_user(request))
class SessionAuthenticationMiddleware(object):
"""
Formerly, a middleware for invalidating a user's sessions that don't
correspond to the user's current session authentication hash. However, it
caused the "Vary: Cookie" header on all responses.
Now a backwards compatibility shim that enables session verification in
auth.get_user() if this middleware is in MIDDLEWARE_CLASSES.
"""
def process_request(self, request):
pass
class RemoteUserMiddleware(object):
"""
Middleware for utilizing Web-server-provided authentication.
If request.user is not authenticated, then this middleware attempts to
authenticate the username passed in the ``REMOTE_USER`` request header.
If authentication is successful, the user is automatically logged in to
persist the user in the session.
The header used is configurable and defaults to ``REMOTE_USER``. Subclass
this class and change the ``header`` attribute if you need to use a
different header.
"""
# Name of request header to grab username from. This will be the key as
# used in the request.META dictionary, i.e. the normalization of headers to
# all uppercase and the addition of "HTTP_" prefix apply.
header = "REMOTE_USER"
force_logout_if_no_header = True
def process_request(self, request):
# AuthenticationMiddleware is required so that request.user exists.
if not hasattr(request, 'user'):
raise ImproperlyConfigured(
"The Django remote user auth middleware requires the"
" authentication middleware to be installed. Edit your"
" MIDDLEWARE_CLASSES setting to insert"
" 'django.contrib.auth.middleware.AuthenticationMiddleware'"
" before the RemoteUserMiddleware class.")
try:
username = request.META[self.header]
except KeyError:
# If specified header doesn't exist then remove any existing
# authenticated remote-user, or return (leaving request.user set to
# AnonymousUser by the AuthenticationMiddleware).
if self.force_logout_if_no_header and request.user.is_authenticated():
self._remove_invalid_user(request)
return
# If the user is already authenticated and that user is the user we are
# getting passed in the headers, then the correct user is already
# persisted in the session and we don't need to continue.
if request.user.is_authenticated():
if request.user.get_username() == self.clean_username(username, request):
return
else:
# An authenticated user is associated with the request, but
# it does not match the authorized user in the header.
self._remove_invalid_user(request)
# We are seeing this user for the first time in this session, attempt
# to authenticate the user.
user = auth.authenticate(remote_user=username)
if user:
# User is valid. Set request.user and persist user in the session
# by logging the user in.
request.user = user
auth.login(request, user)
def clean_username(self, username, request):
"""
Allows the backend to clean the username, if the backend defines a
clean_username method.
"""
backend_str = request.session[auth.BACKEND_SESSION_KEY]
backend = auth.load_backend(backend_str)
try:
username = backend.clean_username(username)
except AttributeError: # Backend has no clean_username method.
pass
return username
def _remove_invalid_user(self, request):
"""
Removes the current authenticated user in the request which is invalid
but only if the user is authenticated via the RemoteUserBackend.
"""
try:
stored_backend = load_backend(request.session.get(auth.BACKEND_SESSION_KEY, ''))
except ImportError:
# backend failed to load
auth.logout(request)
else:
if isinstance(stored_backend, RemoteUserBackend):
auth.logout(request)
class PersistentRemoteUserMiddleware(RemoteUserMiddleware):
"""
Middleware for Web-server provided authentication on logon pages.
Like RemoteUserMiddleware but keeps the user authenticated even if
the header (``REMOTE_USER``) is not found in the request. Useful
for setups when the external authentication via ``REMOTE_USER``
is only expected to happen on some "logon" URL and the rest of
the application wants to use Django's authentication mechanism.
"""
force_logout_if_no_header = False
|
endolith/scikit-image
|
refs/heads/master
|
viewer_examples/plugins/canny.py
|
43
|
from skimage import data
from skimage.viewer import ImageViewer
from skimage.viewer.plugins.canny import CannyPlugin
image = data.camera()
viewer = ImageViewer(image)
viewer += CannyPlugin()
canny_edges = viewer.show()[0][0]
|
philanthropy-u/edx-platform
|
refs/heads/master
|
lms/djangoapps/course_blocks/usage_info.py
|
93
|
"""
Declares CourseUsageInfo class to be used by the transform method in
Transformers.
"""
from lms.djangoapps.courseware.access import _has_access_to_course
class CourseUsageInfo(object):
'''
A class object that encapsulates the course and user context to be
used as currency across block structure transformers, by passing
an instance of it in calls to BlockStructureTransformer.transform
methods.
'''
def __init__(self, course_key, user):
# Course identifier (opaque_keys.edx.keys.CourseKey)
self.course_key = course_key
# User object (django.contrib.auth.models.User)
self.user = user
# Cached value of whether the user has staff access (bool/None)
self._has_staff_access = None
@property
def has_staff_access(self):
'''
Returns whether the user has staff access to the course
associated with this CourseUsageInfo instance.
For performance reasons (minimizing multiple SQL calls), the
value is cached within this instance.
'''
if self._has_staff_access is None:
self._has_staff_access = _has_access_to_course(self.user, 'staff', self.course_key)
return self._has_staff_access
|
flashycud/timestack
|
refs/heads/master
|
django/contrib/auth/admin.py
|
122
|
from django.db import transaction
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AdminPasswordChangeForm
from django.contrib.auth.models import User, Group
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.utils.html import escape
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext, ugettext_lazy as _
from django.views.decorators.csrf import csrf_protect
csrf_protect_m = method_decorator(csrf_protect)
class GroupAdmin(admin.ModelAdmin):
search_fields = ('name',)
ordering = ('name',)
filter_horizontal = ('permissions',)
class UserAdmin(admin.ModelAdmin):
add_form_template = 'admin/auth/user/add_form.html'
change_user_password_template = None
fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name', 'email')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
(_('Groups'), {'fields': ('groups',)}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'password1', 'password2')}
),
)
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff')
list_filter = ('is_staff', 'is_superuser', 'is_active')
search_fields = ('username', 'first_name', 'last_name', 'email')
ordering = ('username',)
filter_horizontal = ('user_permissions',)
def __call__(self, request, url):
# this should not be here, but must be due to the way __call__ routes
# in ModelAdmin.
if url is None:
return self.changelist_view(request)
if url.endswith('password'):
return self.user_change_password(request, url.split('/')[0])
return super(UserAdmin, self).__call__(request, url)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(UserAdmin, self).get_fieldsets(request, obj)
def get_form(self, request, obj=None, **kwargs):
"""
Use special form during user creation
"""
defaults = {}
if obj is None:
defaults.update({
'form': self.add_form,
'fields': admin.util.flatten_fieldsets(self.add_fieldsets),
})
defaults.update(kwargs)
return super(UserAdmin, self).get_form(request, obj, **defaults)
def get_urls(self):
from django.conf.urls.defaults import patterns
return patterns('',
(r'^(\d+)/password/$', self.admin_site.admin_view(self.user_change_password))
) + super(UserAdmin, self).get_urls()
@csrf_protect_m
@transaction.commit_on_success
def add_view(self, request, form_url='', extra_context=None):
# It's an error for a user to have add permission but NOT change
# permission for users. If we allowed such users to add users, they
# could create superusers, which would mean they would essentially have
# the permission to change users. To avoid the problem entirely, we
# disallow users from adding users if they don't have change
# permission.
if not self.has_change_permission(request):
if self.has_add_permission(request) and settings.DEBUG:
# Raise Http404 in debug mode so that the user gets a helpful
# error message.
raise Http404('Your user does not have the "Change user" permission. In order to add users, Django requires that your user account have both the "Add user" and "Change user" permissions set.')
raise PermissionDenied
if extra_context is None:
extra_context = {}
defaults = {
'auto_populated_fields': (),
'username_help_text': self.model._meta.get_field('username').help_text,
}
extra_context.update(defaults)
return super(UserAdmin, self).add_view(request, form_url, extra_context)
def user_change_password(self, request, id):
if not self.has_change_permission(request):
raise PermissionDenied
user = get_object_or_404(self.model, pk=id)
if request.method == 'POST':
form = self.change_password_form(user, request.POST)
if form.is_valid():
new_user = form.save()
msg = ugettext('Password changed successfully.')
messages.success(request, msg)
return HttpResponseRedirect('..')
else:
form = self.change_password_form(user)
fieldsets = [(None, {'fields': form.base_fields.keys()})]
adminForm = admin.helpers.AdminForm(form, fieldsets, {})
return render_to_response(self.change_user_password_template or 'admin/auth/user/change_password.html', {
'title': _('Change password: %s') % escape(user.username),
'adminForm': adminForm,
'form': form,
'is_popup': '_popup' in request.REQUEST,
'add': True,
'change': False,
'has_delete_permission': False,
'has_change_permission': True,
'has_absolute_url': False,
'opts': self.model._meta,
'original': user,
'save_as': False,
'show_save': True,
'root_path': self.admin_site.root_path,
}, context_instance=RequestContext(request))
def response_add(self, request, obj, post_url_continue='../%s/'):
"""
Determines the HttpResponse for the add_view stage. It mostly defers to
its superclass implementation but is customized because the User model
has a slightly different workflow.
"""
if '_addanother' not in request.POST:
# The 'Save' button should act like the 'Save and continue
# editing' button
request.POST['_continue'] = 1
return super(UserAdmin, self).response_add(request, obj, post_url_continue)
admin.site.register(Group, GroupAdmin)
admin.site.register(User, UserAdmin)
|
jinie/sublime-wakatime
|
refs/heads/master
|
packages/wakatime/packages/pygments_py2/pygments/lexers/agile.py
|
77
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.agile
~~~~~~~~~~~~~~~~~~~~~
Just export lexer classes previously contained in this module.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexers.lisp import SchemeLexer
from pygments.lexers.jvm import IokeLexer, ClojureLexer
from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
from pygments.lexers.perl import PerlLexer, Perl6Lexer
from pygments.lexers.d import CrocLexer, MiniDLexer
from pygments.lexers.iolang import IoLexer
from pygments.lexers.tcl import TclLexer
from pygments.lexers.factor import FactorLexer
from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
__all__ = []
|
yencarnacion/jaikuengine
|
refs/heads/master
|
.google_appengine/lib/django-1.4/django/contrib/localflavor/is_/is_postalcodes.py
|
438
|
# -*- coding: utf-8 -*-
IS_POSTALCODES = (
('101', u'101 Reykjavík'),
('103', u'103 Reykjavík'),
('104', u'104 Reykjavík'),
('105', u'105 Reykjavík'),
('107', u'107 Reykjavík'),
('108', u'108 Reykjavík'),
('109', u'109 Reykjavík'),
('110', u'110 Reykjavík'),
('111', u'111 Reykjavík'),
('112', u'112 Reykjavík'),
('113', u'113 Reykjavík'),
('116', u'116 Kjalarnes'),
('121', u'121 Reykjavík'),
('123', u'123 Reykjavík'),
('124', u'124 Reykjavík'),
('125', u'125 Reykjavík'),
('127', u'127 Reykjavík'),
('128', u'128 Reykjavík'),
('129', u'129 Reykjavík'),
('130', u'130 Reykjavík'),
('132', u'132 Reykjavík'),
('150', u'150 Reykjavík'),
('155', u'155 Reykjavík'),
('170', u'170 Seltjarnarnes'),
('172', u'172 Seltjarnarnes'),
('190', u'190 Vogar'),
('200', u'200 Kópavogur'),
('201', u'201 Kópavogur'),
('202', u'202 Kópavogur'),
('203', u'203 Kópavogur'),
('210', u'210 Garðabær'),
('212', u'212 Garðabær'),
('220', u'220 Hafnarfjörður'),
('221', u'221 Hafnarfjörður'),
('222', u'222 Hafnarfjörður'),
('225', u'225 Álftanes'),
('230', u'230 Reykjanesbær'),
('232', u'232 Reykjanesbær'),
('233', u'233 Reykjanesbær'),
('235', u'235 Keflavíkurflugvöllur'),
('240', u'240 Grindavík'),
('245', u'245 Sandgerði'),
('250', u'250 Garður'),
('260', u'260 Reykjanesbær'),
('270', u'270 Mosfellsbær'),
('300', u'300 Akranes'),
('301', u'301 Akranes'),
('302', u'302 Akranes'),
('310', u'310 Borgarnes'),
('311', u'311 Borgarnes'),
('320', u'320 Reykholt í Borgarfirði'),
('340', u'340 Stykkishólmur'),
('345', u'345 Flatey á Breiðafirði'),
('350', u'350 Grundarfjörður'),
('355', u'355 Ólafsvík'),
('356', u'356 Snæfellsbær'),
('360', u'360 Hellissandur'),
('370', u'370 Búðardalur'),
('371', u'371 Búðardalur'),
('380', u'380 Reykhólahreppur'),
('400', u'400 Ísafjörður'),
('401', u'401 Ísafjörður'),
('410', u'410 Hnífsdalur'),
('415', u'415 Bolungarvík'),
('420', u'420 Súðavík'),
('425', u'425 Flateyri'),
('430', u'430 Suðureyri'),
('450', u'450 Patreksfjörður'),
('451', u'451 Patreksfjörður'),
('460', u'460 Tálknafjörður'),
('465', u'465 Bíldudalur'),
('470', u'470 Þingeyri'),
('471', u'471 Þingeyri'),
('500', u'500 Staður'),
('510', u'510 Hólmavík'),
('512', u'512 Hólmavík'),
('520', u'520 Drangsnes'),
('522', u'522 Kjörvogur'),
('523', u'523 Bær'),
('524', u'524 Norðurfjörður'),
('530', u'530 Hvammstangi'),
('531', u'531 Hvammstangi'),
('540', u'540 Blönduós'),
('541', u'541 Blönduós'),
('545', u'545 Skagaströnd'),
('550', u'550 Sauðárkrókur'),
('551', u'551 Sauðárkrókur'),
('560', u'560 Varmahlíð'),
('565', u'565 Hofsós'),
('566', u'566 Hofsós'),
('570', u'570 Fljót'),
('580', u'580 Siglufjörður'),
('600', u'600 Akureyri'),
('601', u'601 Akureyri'),
('602', u'602 Akureyri'),
('603', u'603 Akureyri'),
('610', u'610 Grenivík'),
('611', u'611 Grímsey'),
('620', u'620 Dalvík'),
('621', u'621 Dalvík'),
('625', u'625 Ólafsfjörður'),
('630', u'630 Hrísey'),
('640', u'640 Húsavík'),
('641', u'641 Húsavík'),
('645', u'645 Fosshóll'),
('650', u'650 Laugar'),
('660', u'660 Mývatn'),
('670', u'670 Kópasker'),
('671', u'671 Kópasker'),
('675', u'675 Raufarhöfn'),
('680', u'680 Þórshöfn'),
('681', u'681 Þórshöfn'),
('685', u'685 Bakkafjörður'),
('690', u'690 Vopnafjörður'),
('700', u'700 Egilsstaðir'),
('701', u'701 Egilsstaðir'),
('710', u'710 Seyðisfjörður'),
('715', u'715 Mjóifjörður'),
('720', u'720 Borgarfjörður eystri'),
('730', u'730 Reyðarfjörður'),
('735', u'735 Eskifjörður'),
('740', u'740 Neskaupstaður'),
('750', u'750 Fáskrúðsfjörður'),
('755', u'755 Stöðvarfjörður'),
('760', u'760 Breiðdalsvík'),
('765', u'765 Djúpivogur'),
('780', u'780 Höfn í Hornafirði'),
('781', u'781 Höfn í Hornafirði'),
('785', u'785 Öræfi'),
('800', u'800 Selfoss'),
('801', u'801 Selfoss'),
('802', u'802 Selfoss'),
('810', u'810 Hveragerði'),
('815', u'815 Þorlákshöfn'),
('820', u'820 Eyrarbakki'),
('825', u'825 Stokkseyri'),
('840', u'840 Laugarvatn'),
('845', u'845 Flúðir'),
('850', u'850 Hella'),
('851', u'851 Hella'),
('860', u'860 Hvolsvöllur'),
('861', u'861 Hvolsvöllur'),
('870', u'870 Vík'),
('871', u'871 Vík'),
('880', u'880 Kirkjubæjarklaustur'),
('900', u'900 Vestmannaeyjar'),
('902', u'902 Vestmannaeyjar')
)
|
iiSeymour/pandashells
|
refs/heads/master
|
pandashells/bin/p_lomb_scargle.py
|
3
|
#! /usr/bin/env python
# standard library imports
import argparse
import textwrap
import sys # noqa
from pandashells.lib import arg_lib, module_checker_lib
module_checker_lib.check_for_modules(['pandas', 'gatspy'])
from pandashells.lib import io_lib, lomb_scargle_lib
def main():
msg = textwrap.dedent(
"""
Computes a spectrogram using the lomb-scargle algorithm provided by
the gatspy module. The input time series need not have evenly spaced
time-stamps. The FFT-based algorithm has complexity O[N*log(N)].
-----------------------------------------------------------------------
Examples:
* Plot the spectrum of a simple sine wave
p.linspace 0 10 100 \\
| p.df 'df["value"] = 7 * np.sin(2*np.pi*df.time / 1.5)'\\
--names time\\
| p.lomb_scargle -t time -y value --interp_exp 3\\
| p.plot -x period -y amp --xlim 0 3
* Show the annual and 59-day peaks in the sealevel spectrum
p.example_data -d sealevel\\
| p.df 'df["day"] = 365.25 * df.year'\\
'df["day"] = df.day - df.day.iloc[0]'\\
| p.lomb_scargle -t day -y sealevel_mm --interp_exp 3\\
| p.df 'df[df.period < 720]'\\
| p.plot -x period -y amp --xlim 1 400\\
--title 'Sea-surface height spectrum'\\
--xlabel 'period (days)'
-----------------------------------------------------------------------
"""
)
# read command line arguments
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter, description=msg)
arg_lib.add_args(parser, 'io_in', 'io_out')
parser.add_argument('-t', '--time_col', help='Time Column',
nargs=1, required=True, type=str)
parser.add_argument('-y', '--observation_col', help='Observation column',
nargs=1, dest='val_col', required=True, type=str)
parser.add_argument('--interp_exp', help='Interpolate by this power of 2',
nargs=1, type=int, default=[1])
parser.add_argument(
'--freq_order', action='store_true', dest='freq_order', default=False,
help='Order output by freqency instead of period')
# parse arguments
args = parser.parse_args()
# get the input dataframe
df = io_lib.df_from_input(args)
df = lomb_scargle_lib.lomb_scargle(
df, args.time_col[0], args.val_col[0], args.interp_exp[0],
args.freq_order)
# write dataframe to output
io_lib.df_to_output(args, df)
if __name__ == '__main__': # pragma: no cover
main()
|
elkingtonmcb/h2o-2
|
refs/heads/master
|
py/testdir_single_jvm/test_many_cols.py
|
9
|
import unittest, random, sys, time
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i
# the shared exec expression creator and executor
import h2o_exec as h2e
def write_syn_dataset(csvPathname, rowCount, colCount, SEED):
r1 = random.Random(SEED)
dsf = open(csvPathname, "w+")
for i in range(rowCount):
rowData = []
for j in range(colCount):
# just all zeroes
r = 0
rowData.append(r)
rowDataCsv = ",".join(map(str,rowData))
dsf.write(rowDataCsv + "\n")
dsf.close()
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
SEED = h2o.setup_random_seed()
h2o.init(1,java_heap_GB=14)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_many_cols(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
tryList = [
(100, 10000, 'cI', 5),
(100, 5000, 'cA', 5),
(100, 6000, 'cB', 5),
(100, 7000, 'cC', 5),
(100, 8000, 'cD', 5),
(100, 8200, 'cE', 5),
(100, 8500, 'cF', 5),
(100, 9000, 'cG', 5),
(100, 11000, 'cH', 5),
]
### h2b.browseTheCloud()
lenNodes = len(h2o.nodes)
cnum = 0
for (rowCount, colCount, hex_key, timeoutSecs) in tryList:
cnum += 1
csvFilename = 'syn_' + str(SEED) + "_" + str(rowCount) + 'x' + str(colCount) + '.csv'
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
print "Creating random", csvPathname
write_syn_dataset(csvPathname, rowCount, colCount, SEED)
parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=hex_key,
timeoutSecs=120, doSummary=False)
print "Parse result['destination_key']:", parseResult['destination_key']
# We should be able to see the parse result?
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
print "\n" + csvFilename
if not h2o.browse_disable:
h2b.browseJsonHistoryAsUrlLastMatch("Inspect")
time.sleep(5)
# try new offset/view
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], offset=100, view=100)
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], offset=99, view=89)
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], offset=-1, view=53)
if __name__ == '__main__':
h2o.unit_main()
|
PanDAWMS/panda-bigmon-core
|
refs/heads/master
|
core/admin/urls.py
|
2
|
from django.conf.urls import include, url
from django.conf import settings
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.conf import settings
from core.admin import views as adviews
urlpatterns = [
url(r'^$', adviews.adMain, name='adMain'),
url(r'^reqplot/$', adviews.listReqPlot, name='reqPlot'),
]
|
NewEconomyMovement/blockexplorer
|
refs/heads/master
|
toolbox/hash_converter.py
|
1
|
'''
Distributed under the MIT License, see accompanying file LICENSE.txt
'''
import base64
import hashlib
import sys
import binascii
sys.path.insert(0, 'python-sha3')
from python_sha3 import *
def convert_to_address(signer, isTestNet=True):
pubkey = binascii.unhexlify(signer)
s = sha3_256() #hashlib.new('sha3_256')
s.update(pubkey)
sha3_pubkey = s.digest()
h = hashlib.new('ripemd160')
h.update(sha3_pubkey)
ripe = h.digest()
if isTestNet:
version = "\x98" + ripe
else:
version = "\x68" + ripe
#s2 = hashlib.new('sha3_256')
s2 = sha3_256() #hashlib.new('sha3_256')
s2.update(version)
checksum = s2.digest()[0:4]
address = base64.b32encode(version + checksum)
return address
|
xhqu1981/pymatgen
|
refs/heads/master
|
pymatgen/analysis/elasticity/tests/test_tensors.py
|
1
|
from __future__ import absolute_import
import unittest
import math
import os
import numpy as np
from monty.serialization import loadfn
from pymatgen.analysis.elasticity.tensors import *
from pymatgen.core.operations import SymmOp
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.util.testing import PymatgenTest
from pymatgen import Structure
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
class TensorTest(PymatgenTest):
def setUp(self):
self.vec = Tensor([1., 0., 0.])
self.rand_rank2 = Tensor(np.random.randn(3,3))
self.rand_rank3 = Tensor(np.random.randn(3,3,3))
self.rand_rank4 = Tensor(np.random.randn(3,3,3,3))
a = 3.14 * 42.5 / 180
self.non_symm = SquareTensor([[0.1, 0.2, 0.3],
[0.4, 0.5, 0.6],
[0.2, 0.5, 0.5]])
self.rotation = SquareTensor([[math.cos(a), 0, math.sin(a)],
[0, 1, 0],
[-math.sin(a), 0, math.cos(a)]])
self.low_val = Tensor([[1e-6, 1 + 1e-5, 1e-6],
[1 + 1e-6, 1e-6, 1e-6],
[1e-7, 1e-7, 1 + 1e-5]])
self.symm_rank2 = Tensor([[1, 2, 3],
[2, 4, 5],
[3, 5, 6]])
self.symm_rank3 = Tensor([[[1, 2, 3],
[2, 4, 5],
[3, 5, 6]],
[[2, 4, 5],
[4, 7, 8],
[5, 8, 9]],
[[3, 5, 6],
[5, 8, 9],
[6, 9, 10]]])
self.symm_rank4 = Tensor([[[[1.2, 0.4, -0.92],
[0.4, 0.05, 0.11],
[-0.92, 0.11, -0.02]],
[[0.4, 0.05, 0.11],
[0.05, -0.47, 0.09],
[0.11, 0.09, -0.]],
[[-0.92, 0.11, -0.02],
[0.11, 0.09, 0.],
[-0.02, 0., -0.3]]],
[[[0.4, 0.05, 0.11],
[0.05, -0.47, 0.09],
[0.11, 0.09, 0.]],
[[0.05, -0.47, 0.09],
[-0.47, 0.17, 0.62],
[0.09, 0.62, 0.3]],
[[0.11, 0.09, 0.],
[0.09, 0.62, 0.3],
[0., 0.3, -0.18]]],
[[[-0.92, 0.11, -0.02],
[0.11, 0.09, 0.],
[-0.02, 0, -0.3]],
[[0.11, 0.09, 0.],
[0.09, 0.62, 0.3],
[0., 0.3, -0.18]],
[[-0.02, 0., -0.3],
[0., 0.3, -0.18],
[-0.3, -0.18, -0.51]]]])
# Structural symmetries tested using BaNiO3 piezo/elastic tensors
self.fit_r3 = Tensor([[[0., 0., 0.03839],
[0., 0., 0.],
[0.03839, 0., 0.]],
[[0., 0., 0.],
[0., 0., 0.03839],
[0., 0.03839, 0.]],
[[6.89822, 0., 0.],
[0., 6.89822, 0.],
[0., 0., 27.4628]]])
self.fit_r4 = Tensor([[[[157.9, 0., 0.],
[0., 63.1, 0.],
[0., 0., 29.4]],
[[0., 47.4, 0.],
[47.4, 0., 0.],
[0., 0., 0.]],
[[0., 0., 4.3],
[0., 0., 0.],
[4.3, 0., 0.]]],
[[[0., 47.4, 0.],
[47.4, 0., 0.],
[0., 0., 0.]],
[[63.1, 0., 0.],
[0., 157.9, 0.],
[0., 0., 29.4]],
[[0., 0., 0.],
[0., 0., 4.3],
[0., 4.3, 0.]]],
[[[0., 0., 4.3],
[0., 0., 0.],
[4.3, 0., 0.]],
[[0., 0., 0.],
[0., 0., 4.3],
[0., 4.3, 0.]],
[[29.4, 0., 0.],
[0., 29.4, 0.],
[0., 0., 207.6]]]])
self.unfit4 = Tensor([[[[161.26, 0., 0.],
[0., 62.76, 0.],
[0., 0., 30.18]],
[[0., 47.08, 0.],
[47.08, 0., 0.],
[0., 0., 0.]],
[[0., 0., 4.23],
[0., 0., 0.],
[4.23, 0., 0.]]],
[[[0., 47.08, 0.],
[47.08, 0., 0.],
[0., 0., 0.]],
[[62.76, 0., 0.],
[0., 155.28, -0.06],
[0., -0.06, 28.53]],
[[0., 0., 0.],
[0., -0.06, 4.44],
[0., 4.44, 0.]]],
[[[0., 0., 4.23],
[0., 0., 0.],
[4.23, 0., 0.]],
[[0., 0., 0.],
[0., -0.06, 4.44],
[0., 4.44, 0.]],
[[30.18, 0., 0.],
[0., 28.53, 0.],
[0., 0., 207.57]]]])
self.structure = self.get_structure('BaNiO3')
ieee_file_path = os.path.join(test_dir, "ieee_conversion_data.json")
self.ieee_data = loadfn(ieee_file_path)
def test_new(self):
bad_2 = np.zeros((4, 4))
bad_3 = np.zeros((4, 4, 4))
self.assertRaises(ValueError, Tensor, bad_2)
self.assertRaises(ValueError, Tensor, bad_3)
self.assertEqual(self.rand_rank2.rank, 2)
self.assertEqual(self.rand_rank3.rank, 3)
self.assertEqual(self.rand_rank4.rank, 4)
def test_zeroed(self):
self.assertArrayEqual(self.low_val.zeroed(),
Tensor([[0, 1 + 1e-5, 0],
[1 + 1e-6, 0, 0],
[0, 0, 1 + 1e-5]]))
self.assertArrayEqual(self.low_val.zeroed(tol=1e-6),
Tensor([[1e-6, 1 + 1e-5, 1e-6],
[1 + 1e-6, 1e-6, 1e-6],
[0, 0, 1 + 1e-5]]))
self.assertArrayEqual(Tensor([[1e-6, -30, 1],
[1e-7, 1, 0],
[1e-8, 0, 1]]).zeroed(),
Tensor([[0, -30, 1],
[0, 1, 0],
[0, 0, 1]]))
def test_transform(self):
# Rank 3
tensor = Tensor(np.arange(0, 27).reshape(3, 3, 3))
symm_op = SymmOp.from_axis_angle_and_translation([0, 0, 1], 30,
False, [0, 0, 1])
new_tensor = tensor.transform(symm_op)
self.assertArrayAlmostEqual(new_tensor,
[[[-0.871, -2.884, -1.928],
[-2.152, -6.665, -4.196],
[-1.026, -2.830, -1.572]],
[[0.044, 1.531, 1.804],
[4.263, 21.008, 17.928],
[5.170, 23.026, 18.722]],
[[1.679, 7.268, 5.821],
[9.268, 38.321, 29.919],
[8.285, 33.651, 26.000]]], 3)
def test_rotate(self):
self.assertArrayEqual(self.vec.rotate([[0, -1, 0],
[1, 0, 0],
[0, 0, 1]]),
[0, 1, 0])
self.assertArrayAlmostEqual(self.non_symm.rotate(self.rotation),
SquareTensor([[0.531, 0.485, 0.271],
[0.700, 0.5, 0.172],
[0.171, 0.233, 0.068]]),
decimal=3)
self.assertRaises(ValueError, self.non_symm.rotate,
self.symm_rank2)
def test_symmetrized(self):
self.assertTrue(self.rand_rank2.symmetrized.is_symmetric())
self.assertTrue(self.rand_rank3.symmetrized.is_symmetric())
self.assertTrue(self.rand_rank4.symmetrized.is_symmetric())
def test_is_symmetric(self):
self.assertTrue(self.symm_rank2.is_symmetric())
self.assertTrue(self.symm_rank3.is_symmetric())
self.assertTrue(self.symm_rank4.is_symmetric())
tol_test = self.symm_rank4
tol_test[0, 1, 2, 2] += 1e-6
self.assertFalse(self.low_val.is_symmetric(tol=1e-8))
def test_fit_to_structure(self):
new_fit = self.unfit4.fit_to_structure(self.structure)
self.assertArrayAlmostEqual(new_fit, self.fit_r4, 1)
def test_is_fit_to_structure(self):
self.assertFalse(self.unfit4.is_fit_to_structure(self.structure))
self.assertTrue(self.fit_r3.is_fit_to_structure(self.structure))
self.assertTrue(self.fit_r4.is_fit_to_structure(self.structure))
def test_convert_to_ieee(self):
for entry in self.ieee_data:
xtal = entry['xtal']
struct = entry['structure']
orig = Tensor(entry['original_tensor'])
ieee = Tensor(entry['ieee_tensor'])
diff = np.max(abs(ieee - orig.convert_to_ieee(struct)))
err_msg = "{} IEEE conversion failed with max diff {}. "\
"Numpy version: {}".format(xtal, diff, np.__version__)
self.assertArrayAlmostEqual(ieee, orig.convert_to_ieee(struct),
err_msg=err_msg, decimal=3)
def test_from_voigt(self):
with self.assertRaises(ValueError):
Tensor.from_voigt([[59.33, 28.08, 28.08, 0],
[28.08, 59.31, 28.07, 0],
[28.08, 28.07, 59.32, 0, 0],
[0, 0, 0, 26.35, 0],
[0, 0, 0, 0, 26.35]])
# Rank 4
Tensor.from_voigt([[59.33, 28.08, 28.08, 0, 0, 0],
[28.08, 59.31, 28.07, 0, 0, 0],
[28.08, 28.07, 59.32, 0, 0, 0],
[0, 0, 0, 26.35, 0, 0],
[0, 0, 0, 0, 26.35, 0],
[0, 0, 0, 0, 0, 26.35]])
# Rank 3
Tensor.from_voigt(np.zeros((3, 6)))
# Rank 2
Tensor.from_voigt(np.zeros(6))
# Addresses occasional cast issues for integers
Tensor.from_voigt(np.arange(6))
def test_symmetry_reduce(self):
tbs = [Tensor.from_voigt(row) for row in np.eye(6)*0.01]
reduced = symmetry_reduce(tbs, self.get_structure("Sn"))
self.assertEqual(len(reduced), 2)
self.assertArrayEqual([len(i) for i in reduced.values()], [2, 2])
reconstructed = []
for k, v in reduced.items():
reconstructed.extend([k.voigt] + [k.transform(op).voigt for op in v])
reconstructed = sorted(reconstructed, key = lambda x: np.argmax(x))
self.assertArrayAlmostEqual([tb for tb in reconstructed], np.eye(6)*0.01)
class TensorCollectionTest(PymatgenTest):
def setUp(self):
self.seq_tc = [t for t in np.arange(4*3**3).reshape((4, 3, 3, 3))]
self.seq_tc = TensorCollection(self.seq_tc)
self.rand_tc = TensorCollection([t for t in np.random.random((4, 3, 3))])
self.diff_rank = TensorCollection([np.ones([3]*i) for i in range(2, 5)])
self.struct = self.get_structure("Si")
ieee_file_path = os.path.join(test_dir, "ieee_conversion_data.json")
self.ieee_data = loadfn(ieee_file_path)
def list_based_function_check(self, attribute, coll, *args, **kwargs):
"""
This function allows for more efficient testing of list-based
functions in a "collection"-style class like TensorCollection
It ensures that the test function
"""
tc_orig = TensorCollection(coll)
tc_mod = getattr(tc_orig, attribute)
if callable(tc_mod):
tc_mod = tc_mod(*args, **kwargs)
for t_orig, t_mod in zip(tc_orig, tc_mod):
this_mod = getattr(t_orig, attribute)
if callable(this_mod):
this_mod = this_mod(*args, **kwargs)
if isinstance(this_mod, np.ndarray):
self.assertArrayAlmostEqual(this_mod, t_mod)
def test_list_based_functions(self):
# zeroed
tc = TensorCollection([1e-4*Tensor(np.eye(3))]*4)
for t in tc.zeroed():
self.assertArrayEqual(t, np.zeros((3, 3)))
for t in tc.zeroed(1e-5):
self.assertArrayEqual(t, 1e-4*np.eye(3))
self.list_based_function_check("zeroed", tc)
self.list_based_function_check("zeroed", tc, tol=1e-5)
# transform
symm_op = SymmOp.from_axis_angle_and_translation([0, 0, 1], 30,
False, [0, 0, 1])
self.list_based_function_check("transform", self.seq_tc, symm_op=symm_op)
# symmetrized
self.list_based_function_check("symmetrized", self.seq_tc)
# rotation
a = 3.14 * 42.5 / 180
rotation = SquareTensor([[math.cos(a), 0, math.sin(a)], [0, 1, 0],
[-math.sin(a), 0, math.cos(a)]])
self.list_based_function_check("rotate", self.diff_rank, matrix=rotation)
# is_symmetric
self.assertFalse(self.seq_tc.is_symmetric())
self.assertTrue(self.diff_rank.is_symmetric())
# fit_to_structure
self.list_based_function_check("fit_to_structure", self.diff_rank, self.struct)
self.list_based_function_check("fit_to_structure", self.seq_tc, self.struct)
# fit_to_structure
self.list_based_function_check("fit_to_structure", self.diff_rank, self.struct)
self.list_based_function_check("fit_to_structure", self.seq_tc, self.struct)
# voigt
self.list_based_function_check("voigt", self.diff_rank)
# is_voigt_symmetric
self.assertTrue(self.diff_rank.is_voigt_symmetric())
self.assertFalse(self.seq_tc.is_voigt_symmetric())
# Convert to ieee
for entry in self.ieee_data[:2]:
xtal = entry['xtal']
tc = TensorCollection([entry['original_tensor']]*3)
struct = entry['structure']
self.list_based_function_check("convert_to_ieee", tc, struct)
# from_voigt
tc_input = [t for t in np.random.random((3, 6, 6))]
tc = TensorCollection.from_voigt(tc_input)
for t_input, t in zip(tc_input, tc):
self.assertArrayAlmostEqual(Tensor.from_voigt(t_input), t)
class SquareTensorTest(PymatgenTest):
def setUp(self):
self.rand_sqtensor = SquareTensor(np.random.randn(3, 3))
self.symm_sqtensor = SquareTensor([[0.1, 0.3, 0.4],
[0.3, 0.5, 0.2],
[0.4, 0.2, 0.6]])
self.non_invertible = SquareTensor([[0.1, 0, 0],
[0.2, 0, 0],
[0, 0, 0]])
self.non_symm = SquareTensor([[0.1, 0.2, 0.3],
[0.4, 0.5, 0.6],
[0.2, 0.5, 0.5]])
self.low_val = SquareTensor([[1e-6, 1 + 1e-5, 1e-6],
[1 + 1e-6, 1e-6, 1e-6],
[1e-7, 1e-7, 1 + 1e-5]])
self.low_val_2 = SquareTensor([[1e-6, -1 - 1e-6, 1e-6],
[1 + 1e-7, 1e-6, 1e-6],
[1e-7, 1e-7, 1 + 1e-6]])
a = 3.14 * 42.5 / 180
self.rotation = SquareTensor([[math.cos(a), 0, math.sin(a)],
[0, 1, 0],
[-math.sin(a), 0, math.cos(a)]])
def test_new(self):
non_sq_matrix = [[0.1, 0.2, 0.1],
[0.1, 0.2, 0.3],
[0.1, 0.2, 0.3],
[0.1, 0.1, 0.1]]
bad_matrix = [[0.1, 0.2],
[0.2, 0.3, 0.4],
[0.2, 0.3, 0.5]]
too_high_rank = np.zeros((3,3,3))
self.assertRaises(ValueError, SquareTensor, non_sq_matrix)
self.assertRaises(ValueError, SquareTensor, bad_matrix)
self.assertRaises(ValueError, SquareTensor, too_high_rank)
def test_properties(self):
# transpose
self.assertArrayEqual(self.non_symm.trans,
SquareTensor([[0.1, 0.4, 0.2],
[0.2, 0.5, 0.5],
[0.3, 0.6, 0.5]]))
self.assertArrayEqual(self.rand_sqtensor.trans,
np.transpose(self.rand_sqtensor))
self.assertArrayEqual(self.symm_sqtensor,
self.symm_sqtensor.trans)
# inverse
self.assertArrayEqual(self.non_symm.inv,
np.linalg.inv(self.non_symm))
with self.assertRaises(ValueError):
self.non_invertible.inv
# determinant
self.assertEqual(self.rand_sqtensor.det,
np.linalg.det(self.rand_sqtensor))
self.assertEqual(self.non_invertible.det,
0.0)
self.assertEqual(self.non_symm.det, 0.009)
# symmetrized
self.assertArrayEqual(self.rand_sqtensor.symmetrized,
0.5 * (self.rand_sqtensor + self.rand_sqtensor.trans))
self.assertArrayEqual(self.symm_sqtensor,
self.symm_sqtensor.symmetrized)
self.assertArrayAlmostEqual(self.non_symm.symmetrized,
SquareTensor([[0.1, 0.3, 0.25],
[0.3, 0.5, 0.55],
[0.25, 0.55, 0.5]]))
# invariants
i1 = np.trace(self.rand_sqtensor)
i2 = self.rand_sqtensor[0, 0] * self.rand_sqtensor[1, 1] + \
self.rand_sqtensor[1, 1] * self.rand_sqtensor[2, 2] + \
self.rand_sqtensor[2, 2] * self.rand_sqtensor[0, 0] - \
self.rand_sqtensor[0, 1] * self.rand_sqtensor[1, 0] - \
self.rand_sqtensor[0, 2] * self.rand_sqtensor[2, 0] - \
self.rand_sqtensor[2, 1] * self.rand_sqtensor[1, 2]
i3 = np.linalg.det(self.rand_sqtensor)
self.assertArrayAlmostEqual([i1, i2, i3],
self.rand_sqtensor.principal_invariants)
def test_is_rotation(self):
self.assertTrue(self.rotation.is_rotation())
self.assertFalse(self.symm_sqtensor.is_rotation())
self.assertTrue(self.low_val_2.is_rotation())
self.assertFalse(self.low_val_2.is_rotation(tol=1e-8))
def test_get_scaled(self):
self.assertArrayEqual(self.non_symm.get_scaled(10.),
SquareTensor([[1, 2, 3], [4, 5, 6], [2, 5, 5]]))
def test_polar_decomposition(self):
u, p = self.rand_sqtensor.polar_decomposition()
self.assertArrayAlmostEqual(np.dot(u, p), self.rand_sqtensor)
self.assertArrayAlmostEqual(np.eye(3),
np.dot(u, np.conjugate(np.transpose(u))))
if __name__ == '__main__':
unittest.main()
|
chouseknecht/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/aci/aci_tenant_span_src_group_to_dst_group.py
|
8
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_tenant_span_src_group_to_dst_group
short_description: Bind SPAN source groups to destination groups (span:SpanLbl)
description:
- Bind SPAN source groups to associated destination groups on Cisco ACI fabrics.
version_added: '2.4'
options:
description:
description:
- The description for Span source group to destination group binding.
type: str
aliases: [ descr ]
dst_group:
description:
- The Span destination group to associate with the source group.
type: str
src_group:
description:
- The name of the Span source group.
type: str
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
tenant:
description:
- The name of the Tenant.
type: str
aliases: [ tenant_name ]
extends_documentation_fragment: aci
notes:
- The C(tenant), C(src_group), and C(dst_group) must exist before using this module in your playbook.
The M(aci_tenant), M(aci_tenant_span_src_group), and M(aci_tenant_span_dst_group) modules can be used for this.
seealso:
- module: aci_tenant
- module: aci_tenant_span_src_group
- module: aci_tenant_span_dst_group
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(span:SrcGrp).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Jacob McGill (@jmcgill298)
'''
EXAMPLES = r'''
- aci_tenant_span_src_group_to_dst_group:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
src_group: "{{ src_group }}"
dst_group: "{{ dst_group }}"
description: "{{ description }}"
delegate_to: localhost
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects
dst_group=dict(type='str'), # Not required for querying all objects
src_group=dict(type='str'), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['dst_group', 'src_group', 'tenant']],
['state', 'present', ['dst_group', 'src_group', 'tenant']],
],
)
description = module.params['description']
dst_group = module.params['dst_group']
src_group = module.params['src_group']
state = module.params['state']
tenant = module.params['tenant']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
module_object=tenant,
target_filter={'name': tenant},
),
subclass_1=dict(
aci_class='spanSrcGrp',
aci_rn='srcgrp-{0}'.format(src_group),
module_object=src_group,
target_filter={'name': src_group},
),
subclass_2=dict(
aci_class='spanSpanLbl',
aci_rn='spanlbl-{0}'.format(dst_group),
module_object=dst_group,
target_filter={'name': dst_group},
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='spanSpanLbl',
class_config=dict(
descr=description,
name=dst_group,
),
)
aci.get_diff(aci_class='spanSpanLbl')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
|
maxspad/MGrader
|
refs/heads/master
|
autograder/modules/GSM.py
|
1
|
'''
Created on Mar 22, 2013
@author: max
'''
import questions.Result as Result
import autograder.TerminalHelper as t
import Database as dbm
import Students
import Repos
import Execenv as env
import Assignments
import XMLHelper
import os
import autograder.Questions as Questions
import importlib
import Grades
def initialize():
pass
def process_cmd(cmdstr, args):
import argparse
args.remove(cmdstr)
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-v', '--verbose', action='store_true', help="Turns on more verbose output.")
parsed = None
if cmdstr == 'grade':
parser.add_argument('assigname', help='The name of the assignment to grade')
parser.add_argument('--single', metavar='UNAME', help='Grades only a single student')
parser.add_argument('--ctimeout', '-c', metavar="Seconds", type=int, default="5", help="The amount of time each student has for compilation")
parser.add_argument('--rtimeout', '-r', metavar="Seconds", type=int, default="5", help="The amount of time each student has to run.")
parsed = parser.parse_args(args)
t.verbose = parsed.verbose
if parsed.single != None and parsed.single != '':
gradesingle(parsed.assigname, parsed.single, parsed.ctimeout, parsed.rtimeout)
else:
grade(parsed.assigname, parsed.ctimeout, parsed.rtimeout)
def grade(assigname, ctimeout, rtimeout):
'''
Starter function for grading an assignment.
Initializes and starts the GSM.
@param assigname: The assignemtn name to grade.
@param ctimeout: The compilation timeout, passed from the command line.
@param rtimeout: The run timeout, passed from cmd line.
'''
t.v('Multigrade.')
db = dbm.DAL(connect=True)
sql = 'SELECT {0} FROM {1}'.format(Students.f_uname, Students.table_name)
rows = db.query(sql, None)
unames = [row[0] for row in rows]
db.disconnect()
gsm = GSM(assigname, unames, ctimeout, rtimeout)
gsm.start()
def gradesingle(assigname, uname, ctimeout, rtimeout):
'''
Equivalent of grade, but for a single student.
@see: grade function
'''
t.v('Singlegrade.')
db = dbm.DAL(connect=True)
if Students.studentexists(uname, db):
gsm = GSM(assigname, [uname], ctimeout, rtimeout)
gsm.start()
else:
t.e('Student %s does not exist. Have you added him/her?' % uname, 1)
class GSM(object):
'''
The Grading Session Manager class.
Responsible for loading an XML file, launching its validation,
using the validated information to move files in and out of the
execution environment, passing these files and other information
to the Question objects, retrieving and parsing the question objects'
results, and adding these results to the database. It also (tentatively,
as this is untested due to inability to test on a large class size) handles
the partner grading system.
'''
def __init__(self, assigname, unamelist, ctimeout, rtimeout):
'''
Constructor.
@param assigname: The assignment name being graded.
@param unamelist: The list of unames to grade.
@param ctimeout: @see: grade function above
@param param: @see grade function above
'''
self.assigname = assigname
self.unames = unamelist
self.cto = ctimeout
self.rto = rtimeout
def start(self):
'''
Starts the grading session.
Checks if the assignment the user asks to grade actually exists.
Loads the homeworkspec.xml from the database.
Validates it.
Uses it for grading.
'''
t.p('Grading assignment %s' % self.assigname)
t.v('With compile timeout = %d' % self.cto)
t.v('With run timeout = %d' % self.rto)
t.p("")
db = dbm.DAL(connect=True)
if Assignments.assigexists(self.assigname, db):
sql = 'SELECT {0} FROM {1} WHERE {2}=?'.format(Assignments.f_xml, Assignments.table_name, Assignments.f_assigname)
result = db.query(sql, (self.assigname,), fetchone=True)
resulttuple = XMLHelper.loadXMLandvalidate(result[0])
if resulttuple[1] == False:
# Bad hwspec.xml!
t.e('HwSpec validation failed - ' + resulttuple[2], 1)
else:
t.p('Validation...Success!')
self.specobj = resulttuple[0]
self.__grade()
else:
t.e('Assignment %s does not exist!' % self.assigname, 1)
def __grade(self):
'''
Where the magic happens.
If the assignment exists and its XML has validated, we get here.
Checks the grade status of each student (grade, nograde, nograde+autofail),
and it also assembles partner configurations.
Then it pulls together inputs and outputs, and moves them into the execution environments.
It passes almost all of its information to the question objects, and then processes their results,
adding their results to the database via the Grades module.
It does this for each student, for each question, for each test case.
@todo: Break it up to be more readable.
'''
# Get correct gradestatus for each student
students = self.__makeStudentsDict()
t.p("")
self.grader = Grades.Grader(self.assigname)
self.grader.startSession()
for uname in students:
if not students[uname].autofail:
# Student hasn't been flagged for incorrect partners. Continue.
if students[uname].gradestatus == 'grade':
t.v('Starting grade for %s' % students[uname])
# For each question in the assignment
for question in self.specobj.questions:
questiontotal = 0
questionpos = question.worth
for testcase in question.testcases:
studentfilespaths = self.__copyStudentFiles(question, uname) # copy student files at the question-level
if studentfilespaths == False:
gr = Result.GradeResult(question, False,
'Missing key files.',
ptspos=int(testcase.worth))
questiontotal += self.__processResult(gr,
uname, testcase,
students[uname].partner)
continue
inputs = self.__copyInputs(question, uname) # copy inputs at the question-level
outputs = self.__createOutputs(question, uname) # create outputs at the question level
inputs += self.__copyInputs(testcase, uname) # at test-case level
outputs += self.__createOutputs(testcase, uname) # at test-case level
self.__copyInsProg(testcase, uname) # makefile is question-specific and therefore copied separately
qmod = self.__getQuestionModule(question.type)
resultobj = qmod.grade(uname, self.assigname, testcase.name, inputs, outputs, testcase.instructorprog, self.rto, self.cto, question.diffcmd, testcase.runcmd, makefile=testcase.makefile, target=testcase.target)
questiontotal += self.__processResult(resultobj, uname, testcase, students[uname].partner)
# adds result object to question-level totalpoints
# reports to terminal and also records tc grades
# Stores question level total to grades
self.__reportQuestionTotal(uname, question.name,
questiontotal, questionpos,
question.descrip,
students[uname].partner)
# Finishes
env.clearexecenv(uname)
elif students[uname].gradestatus == 'nograde':
pass # do nothing, duplicate partner grades later
else:
self.__failEntireAssignment(students[uname])
self.grader.finishSession()
def __makeStudentsDict(self):
'''
Compiles a dictionary of Students objects, with the format
{uname:<StudentObj>,...}. The objects contain partner info
among other things. This function sets each Student objects
autofail and gradestatus attributes. autofail is set to True if
the student shows a partner discrepancy, a sign they are trying
to cheat the system or have communication issues with their partner.
gradestatus is set to either 'grade' or 'nograde', depending on if
that student should be graded or not, depending on the student's
partner setup.. autofails are automatically nograde.
@return: The above-described dictionary.
'''
t.p('Calculating partner configurations...')
# Take care of singlegrade option
if len(self.unames) < 2:
primarystudentslist = Students.getstudents(self.assigname, single=True,uname=self.unames[0])
else:
# Get list of student unames from Database
primarystudentslist = Students.getstudents(self.assigname)
studentsdict = dict()
for studentobj in primarystudentslist:
studentsdict[studentobj.uname] = studentobj
for key in studentsdict:
keypartner = studentsdict[key].partner # the partner listed by key uname
if keypartner is not None and keypartner != False:
try:
supposedlykey = studentsdict[keypartner].partner # supposed to list key uname back
if supposedlykey != str(key):
# discrepancy! fail key uname for cheating
studentsdict[key].gradestatus = 'nograde'
studentsdict[key].autofail = True
continue
except KeyError:
# discrepancy! key uname lists a partner that isn't really there
studentsdict[key].gradestatus = 'nograde'
studentsdict[key].autofail = True
continue
elif keypartner == False:
# Discrepancy! Student listed by uname doesnt exist.
studentsdict[key].gradestatus = 'nograde'
studentsdict[key].autofail = True
t.p('Success!')
return studentsdict
def __copyStudentFiles(self, container, uname):
'''
Copies student program files (.cpp, .py, etc.) to the execenv.
@param container: The container XML object.
@param uname: The uname being graded.
@type container: A XMLQuestion object.
'''
studentfiles = container.studentfiles
files = studentfiles.files
filepathlist = self.__buildFilePathList(files, uname)
if filepathlist == False:
# Student is missing key homeworkfiles
return False
for path in filepathlist:
env.putfile_st(path, uname)
return filepathlist
def __copyInputs(self, container, uname):
'''
Copies inputs to the execenv.
@type container: XMLQuestion or XMLTestcase
'''
inputs = container.inputs
stdin = None
if inputs is None:
return (stdin, [])
else:
files = inputs.files[:]
for filetype in files:
if type(filetype) == XMLHelper.XMLStdin:
files.remove(filetype)
stdin = filetype
filepathlist = self.__buildFilePathList(files, uname)
for path in filepathlist:
env.putfile_both(path, uname)
return (stdin, filepathlist)
def __createOutputs(self, container, uname):
'''
Create the necessary outputs in the execenv.
@type container: Either XMLQuestion or XMLTestcase
'''
outputs = container.outputs
if outputs is None:
return (None, None, [])
else:
files = outputs.files
stdouts = [ fileobj.loc for fileobj in files if type(fileobj) == XMLHelper.XMLStdout ]
stderrs = [ fileobj.loc for fileobj in files if type(fileobj) == XMLHelper.XMLStderr ]
regularfiles = [ fileobj.loc for fileobj in files if type(fileobj) == XMLHelper.XMLOutFile ]
combined = stdouts + stderrs
for std in combined:
pathIns = env.getpath(uname) + '/instructor/' + std
pathStu = env.getpath(uname) + '/student/' + std
createdfile = open(pathIns, 'w')
createdfile.close()
createdfile = open(pathStu, 'w')
createdfile.close()
try:
return (stdouts[0], stderrs[0], regularfiles)
except IndexError:
return (None, None, regularfiles)
def __copyInsProg(self, testcase, uname):
'''
Copies the instructor program to the execenv.
'''
instructorprogPath = testcase.instructorprog
env.putfile_ins(instructorprogPath, uname)
def __buildFilePathList(self, filecontainer, uname):
'''
Builds a list of paths to files. First adds the files based on
all matches to an XMLFilePatternMatching. Then removes any matching
a XMLNoInclude.
'''
# First add all matches
toReturn = []
filenames = []
for fileobj in filecontainer:
if type(fileobj) == XMLHelper.XMLFilePatternMatching:
matchedfilenames = fileobj.match(directory=Repos.getrepoassigpath(uname, self.assigname))
if len(matchedfilenames) == 0:
# Student is missing key files!
# Fail him!
return False
filenames = filenames + matchedfilenames
elif type(fileobj) == XMLHelper.XMLFile or type(fileobj) == XMLHelper.XMLStdin:
filenames.append(fileobj.loc)
# Then remove all nomatches
for fileobj in filecontainer:
if type(fileobj) == XMLHelper.XMLNoinclude:
matchedfilenames = fileobj.matchlist(filenames)
for filename in matchedfilenames:
try:
filenames.remove(filename)
except ValueError:
pass
# Then get the full path of each.
for filename in filenames:
path = Repos.getfile(filename, uname, self.assigname)
if path == False:
# Possible that it is an input, not a student-file. Check if it exists manually
if os.path.exists(filename):
toReturn.append(filename)
else:
t.e('Student missing crucial input files', 1)
# We don't need to worry about nonexistent inpus because they were alrady validated
# TODO: Fail the student
else:
toReturn.append(path)
return toReturn
def __getQuestionModule(self, qtypestr):
'''
Uses importlib to dynamically acquire the requested
question module based on the 'type' attribute of the
XMLQuestion abstraction.
'''
qlist = Questions.questions;
for qstr in qlist:
if qtypestr == qstr:
return importlib.import_module(qstr, 'autograder.modules.questions')
t.e('Question type given in XML is invalid.', 1)
def __processResult(self, resultobj, uname, testcase, partner):
'''
Processes a result object and adds its grades to the database
via the Grades module.
'''
outstr = '{0} {1} TC {2} - {3} - Recieved {4} of {5} pts.'
if resultobj.passed == True:
print outstr.format(uname, 'passed', testcase.name, resultobj.reason,
testcase.worth, testcase.worth)
self.grader.addGrade(uname, testcase.name, Grades.gtype_TC,
int(testcase.worth), int(testcase.worth),
resultobj.reason, resultobj.msg,
testcase.descrip, partner)
return testcase.worth
else:
print outstr.format(uname, 'failed', testcase.name, resultobj.reason,
0, testcase.worth)
self.grader.addGrade(uname, testcase.name, Grades.gtype_TC,
int(testcase.worth), 0, resultobj.reason,
resultobj.msg, testcase.descrip, partner)
return 0
def __reportQuestionTotal(self, uname, qname, qtotal, qpos, descrip, partner):
'''
Takes a question total and adds it to the database using the Grades module.
'''
outstr = '{0} - Question {1} - Received {2} of {3} pts.'
formatted = outstr.format(uname, qname, qtotal, qpos)
self.grader.addGrade(uname, qname, Grades.gtype_Q, qpos, qtotal, '', '',
descrip, partner)
print formatted
print '-'*len(formatted)
print ''
def __failEntireAssignment(self, studentobj):
'''
Fails an entire assignment for a Student with his/her
autofail attribute set to True. Mostly a convenience function.
'''
outstr = '{0} - FAILED ENTIRE ASSIGNMENT - Partner Discrepancy!'
formatted = outstr.format(studentobj.uname)
print formatted
for question in self.specobj.questions:
for testcase in question.testcases:
gr = Result.GradeResult(question, False,
Result.reasons[Result.PARTNER],
ptspos=int(testcase.worth))
self.__processResult(gr, studentobj.uname, testcase, '')
self.__reportQuestionTotal(studentobj.uname, question.name, 0,
question.worth, question.descrip, '')
def finish(self):
pass
|
mathspace/django
|
refs/heads/master
|
django/__init__.py
|
31
|
from __future__ import unicode_literals
from django.utils.version import get_version
VERSION = (1, 11, 0, 'alpha', 0)
__version__ = get_version(VERSION)
def setup(set_prefix=True):
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
"""
from django.apps import apps
from django.conf import settings
from django.urls import set_script_prefix
from django.utils.encoding import force_text
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
if set_prefix:
set_script_prefix(
'/' if settings.FORCE_SCRIPT_NAME is None else force_text(settings.FORCE_SCRIPT_NAME)
)
apps.populate(settings.INSTALLED_APPS)
|
bitesofcode/projexui
|
refs/heads/master
|
projexui/widgets/xscintillaedit/xlanguage.py
|
2
|
#!/usr/bin/python
""" Defines a language class that will be used for the XScintillaEdit """
# define authorship information
__authors__ = ['Eric Hulser']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2011, Projex Software'
__license__ = 'LGPL'
# maintanence information
__maintainer__ = 'Projex Software'
__email__ = 'team@projexsoftware.com'
#------------------------------------------------------------------------------
import glob
import logging
import os
import re
import sys
from ConfigParser import ConfigParser
import projex
logger = logging.getLogger(__name__)
from projex.text import nativestring
from projexui.qt import Qsci, wrapVariant
#------------------------------------------------------------------------------
class XMethodDescriptor(object):
def __init__( self, dtype, expr ):
self.dtype = dtype
self.exprText = expr
try:
self.expr = re.compile(expr, re.DOTALL|re.MULTILINE)
except:
logger.exception('Invalid regex: %s' % expr)
self.expr = None
def search( self, text, startpos = -1 ):
if ( not self.expr ):
return None
if ( startpos != -1 ):
return self.expr.search(text,startpos)
else:
return self.expr.search(text)
#------------------------------------------------------------------------------
class XLanguage(object):
_plugins = {}
def __init__(self):
self._name = ''
self._fileTypes = []
# lexer class information
self._lexerType = -1
self._lexerTypeName = ''
self._lexerModule = ''
self._lexerColorTypes = {}
self._lexerProperties = {}
self._custom = False
self._sourcefile = ''
self._tabWidth = 0
self._colorStyles = {}
# comment information
self._lineComment = ''
# method descriptors
self._descriptors = []
def addDescriptor( self, type, expr ):
self._descriptors.append( XMethodDescriptor(type,expr) )
def createLexer( self, parent = None, colorSet = None ):
# create an instance of the lexer
cls = self.lexerType()
if ( not cls ):
return None
output = cls(parent)
if ( output and parent ):
try:
parent.setLexer(output)
except AttributeError:
pass
# set lexer property options
for key, value in self.lexerProperties().items():
setter = getattr(output, 'set' + key[0].upper() + key[1:], None)
if setter:
setter(value)
else:
output.setProperty(key, wrapVariant(value))
output.setFont(parent.font())
if ( colorSet ):
self.setColorSet(output, colorSet)
return output
def descriptors( self ):
return self._descriptors
def isCustom( self ):
return self._custom
def name( self ):
return self._name
def lexerColorTypes( self ):
return self._lexerColorTypes
def lineComment( self ):
return self._lineComment
def loadLexerType( self ):
# use a custom lexer module
if ( self._lexerModule ):
# retrieve the lexer module
module = sys.modules.get(self._lexerModule)
# try to import the module
if ( not module ):
try:
__import__(self._lexerModule)
module = sys.modules.get(self._lexerModule)
except:
err = 'Could not import %s module' % self._lexerModule
logger.exception(err)
self._lexerType = None
return None
# otherwise, its in the Qsci module
else:
module = Qsci
# retrieve the lexer class
self._lexerType = module.__dict__.get(self._lexerTypeName)
if ( not self._lexerType ):
err = 'Lexer Error: No %s class in %s' % (self._lexerTypeName,
module.__name__)
logger.warning(err)
def fileTypes( self ):
return self._fileTypes
def lexerProperties(self):
return self._lexerProperties
def lexerType( self ):
if ( self._lexerType == -1 ):
self.loadLexerType()
return self._lexerType
def lexerTypeName( self ):
return self._lexerTypeName
def lexerModule( self ):
return self._lexerModule
def save( self, filename = '' ):
if ( not filename ):
filename = self.filename()
if ( not filename ):
return False
parser = ConfigParser()
parser.add_section('GLOBALS')
parser.set( 'GLOBALS', 'name', self.name() )
parser.set( 'GLOBALS', 'filetypes', ';'.join(self.fileTypes()) )
parser.set( 'GLOBALS', 'linecomment', self.lineComment() )
parser.add_section('LEXER')
parser.set( 'LEXER', 'class', self.lexerTypeName() )
parser.set( 'LEXER', 'module', self.lexerModule() )
if self.lexerProperties():
parser.add_section('LEXER_PROPERTIES')
for i, (key, value) in enumerate(self.lexerProperties().items()):
store = '{0}:{1}'.format(key, value)
store_name = 'prop{0}'.format(i)
parser.set('LEXER_PROPERTIES', store_name, store)
parser.add_section('DESCRIPTORS')
for i, desc in enumerate( self._descriptors ):
parser.set('DESCRIPTORS','%s%i' % (desc.dtype,i),desc.exprText)
parser.add_section('COLOR_TYPES')
for key, value in self.lexerColorTypes().items():
parser.set('COLOR_TYPES',key,','.join([nativestring(val) for val in value]))
# save the language
f = open(filename,'w')
parser.write(f)
f.close()
self._sourcefile = filename
return True
def setColorSet( self, lexer, colorSet ):
for colorKey, colorStyles in self._colorStyles.items():
color = colorSet.color(colorKey)
for colorStyle in colorStyles:
lexer.setColor(color, colorStyle)
lexer.setPaper(colorSet.color('Background'))
lexer.setDefaultColor(colorSet.color('Text'))
def setCustom( self, state ):
self._custom = state
def setFileTypes( self, fileTypes ):
self._fileTypes = fileTypes
def setLexerTypeName( self, className ):
self._lexerTypeName = className
def setLexerModule( self, module ):
self._lexerModule = module
def setLineComment( self, lineComment ):
self._lineComment = lineComment
def setLexerColorTypes( self, lexerColorTypes ):
self._lexerColorTypes = lexerColorTypes
def setLexerProperty(self, key, value):
self._lexerProperties[nativestring(key)] = value
def setLexerProperties(self, props):
self._lexerProperties = props.copy()
def setName( self, name ):
self._name = name
def setTabWidth(self, width):
self._tabWidth = width
def sourcefile( self ):
return self._sourcefile
def tabWidth(self):
return self._tabWidth
@staticmethod
def byFileType( fileType ):
"""
Looks up the language plugin by the inputed file type.
:param fileType | <str>
:return <XLanguage> || None
"""
XLanguage.load()
for lang in XLanguage._plugins.values():
if ( fileType in lang.fileTypes() ):
return lang
return None
@staticmethod
def byLexer( lexer ):
"""
Looks up the language plugin by the lexer class of the inputed lexer.
:param lexer | <QsciLexer>
:return <XLanguage> || None
"""
XLanguage.load()
lexerType = type(lexer)
for lang in XLanguage._plugins.values():
if ( lang.lexerType() == lexerType ):
return lang
return None
@staticmethod
def byName( name ):
"""
Looks up the language plugin by the name of the language.
:param name | <str>
:return <XLanguage> || None
"""
XLanguage.load()
return XLanguage._plugins.get(nativestring(name))
@staticmethod
def fromConfig( filename ):
parser = ConfigParser()
if not parser.read(filename):
return False
plugin = XLanguage()
plugin._name = parser.get('GLOBALS','name')
plugin._fileTypes = parser.get('GLOBALS','filetypes').split(';')
try:
plugin._tabWidth = int(parser.get('GLOBALS', 'tabwidth'))
except:
pass
try:
colorKeys = parser.options('COLOR_TYPES')
except:
colorKeys = []
colorStyles = {}
for colorKey in colorKeys:
values = parser.get('COLOR_TYPES', colorKey)
if not values:
continue
colorStyles[colorKey.capitalize()] = map(int, values.split(','))
plugin._colorStyles = colorStyles
# try to load the line comment information
try:
plugin._lineComment = parser.get('GLOBALS','linecomment')
except:
pass
# try to load the lexer information
try:
plugin._lexerTypeName = parser.get('LEXER','class')
plugin._lexerModule = parser.get('LEXER','module')
except:
pass
# try to load the lexer properties
try:
options = parser.options('LEXER_PROPERTIES')
except:
options = []
props = {}
for option in options:
try:
key, value = parser.get('LEXER_PROPERTIES', option).split(':')
except:
continue
try:
value = eval(value)
except:
pass
props[key] = value
plugin._lexerProperties = props
# load the different descriptor options
try:
options = parser.options('DESCRIPTORS')
except:
options = []
for option in options:
expr = parser.get('DESCRIPTORS',option)
option = re.match('([^\d]*)\d*',option).groups()[0]
plugin._descriptors.append(XMethodDescriptor(option,expr))
# load the different color map options
try:
options = parser.options('COLOR_TYPES')
except:
options = []
for option in options:
vals = []
for val in parser.get('COLOR_TYPES',option).split(','):
if not val:
continue
try:
vals.append(int(val))
except:
pass
plugin._lexerColorTypes[option] = vals
plugin._sourcefile = filename
return plugin
@staticmethod
def loadPlugins(path, custom = False):
path = projex.environ().expandvars(path)
if ( not os.path.exists(path) ):
return False
files = glob.glob(os.path.join(path, '*.ini'))
for file in files:
plugin = XLanguage.fromConfig(file)
if ( plugin ):
plugin.setCustom(custom)
XLanguage._plugins[plugin.name()] = plugin
else:
logger.warning('Could not import %s' % file)
@staticmethod
def load():
if XLanguage._plugins:
return
# load the installed plugins
XLanguage.loadPlugins(os.path.dirname(__file__) + '/lang')
# load additional languages
for key in os.environ.keys():
if key.startswith('PROJEX_XLANG_PATH_'):
XLanguage.loadPlugins(os.environ[key])
@staticmethod
def refresh():
XLanguage._plugins.clear()
XLanguage.load()
@staticmethod
def pluginLanguages():
XLanguage.load()
return sorted(XLanguage._plugins.keys())
@staticmethod
def pluginFileTypes():
XLanguage.load()
keys = sorted(XLanguage._plugins.keys())
output = []
output.append( 'All Files (*.*)' )
output.append( 'Text Files (*.txt)' )
for key in keys:
ptypes = '*'+';*'.join(XLanguage._plugins[key].fileTypes())
output.append( '%s Files (%s)' % (key, ptypes) )
return ';;'.join(output)
|
windskyer/nova
|
refs/heads/master
|
nova/objectstore/s3server.py
|
1
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010 OpenStack Foundation
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of an S3-like storage server based on local files.
Useful to test features that will eventually run on S3, or if you want to
run something locally that was once running on S3.
We don't support all the features of S3, but it does work with the
standard S3 client for the most basic semantics. To use the standard
S3 client with this module::
c = S3.AWSAuthConnection("", "", server="localhost", port=8888,
is_secure=False)
c.create_bucket("mybucket")
c.put("mybucket", "mykey", "a value")
print c.get("mybucket", "mykey").body
"""
import bisect
import datetime
import os
import os.path
import urllib
from oslo_config import cfg
from oslo_log import log as logging
from oslo_log import versionutils
from oslo_utils import fileutils
import routes
import six
import webob
from nova.i18n import _LW
from nova import paths
from nova import utils
from nova import wsgi
LOG = logging.getLogger(__name__)
s3_opts = [
cfg.StrOpt('buckets_path',
default=paths.state_path_def('buckets'),
help='Path to S3 buckets'),
cfg.StrOpt('s3_listen',
default="0.0.0.0",
help='IP address for S3 API to listen'),
cfg.IntOpt('s3_listen_port',
default=3333,
min=1,
max=65535,
help='Port for S3 API to listen'),
]
CONF = cfg.CONF
CONF.register_opts(s3_opts)
def get_wsgi_server():
return wsgi.Server("S3 Objectstore",
S3Application(CONF.buckets_path),
port=CONF.s3_listen_port,
host=CONF.s3_listen)
class S3Application(wsgi.Router):
"""Implementation of an S3-like storage server based on local files.
If bucket depth is given, we break files up into multiple directories
to prevent hitting file system limits for number of files in each
directories. 1 means one level of directories, 2 means 2, etc.
"""
def __init__(self, root_directory, bucket_depth=0, mapper=None):
versionutils.report_deprecated_feature(
LOG,
_LW('The in tree EC2 API is deprecated as of Kilo release and may '
'be removed in a future release. The openstack ec2-api '
'project http://git.openstack.org/cgit/openstack/ec2-api/ '
'is the target replacement for this functionality.')
)
if mapper is None:
mapper = routes.Mapper()
mapper.connect('/',
controller=lambda *a, **kw: RootHandler(self)(*a, **kw))
mapper.connect('/{bucket}/{object_name}',
controller=lambda *a, **kw: ObjectHandler(self)(*a, **kw))
mapper.connect('/{bucket_name}/',
controller=lambda *a, **kw: BucketHandler(self)(*a, **kw))
self.directory = os.path.abspath(root_directory)
fileutils.ensure_tree(self.directory)
self.bucket_depth = bucket_depth
super(S3Application, self).__init__(mapper)
class BaseRequestHandler(object):
"""Base class emulating Tornado's web framework pattern in WSGI.
This is a direct port of Tornado's implementation, so some key decisions
about how the code interacts have already been chosen.
The two most common ways of designing web frameworks can be
classified as async object-oriented and sync functional.
Tornado's is on the OO side because a response is built up in and using
the shared state of an object and one of the object's methods will
eventually trigger the "finishing" of the response asynchronously.
Most WSGI stuff is in the functional side, we pass a request object to
every call down a chain and the eventual return value will be a response.
Part of the function of the routing code in S3Application as well as the
code in BaseRequestHandler's __call__ method is to merge those two styles
together enough that the Tornado code can work without extensive
modifications.
To do that it needs to give the Tornado-style code clean objects that it
can modify the state of for each request that is processed, so we use a
very simple factory lambda to create new state for each request, that's
the stuff in the router, and when we let the Tornado code modify that
object to handle the request, then we return the response it generated.
This wouldn't work the same if Tornado was being more async'y and doing
other callbacks throughout the process, but since Tornado is being
relatively simple here we can be satisfied that the response will be
complete by the end of the get/post method.
"""
def __init__(self, application):
self.application = application
@webob.dec.wsgify
def __call__(self, request):
method = request.method.lower()
f = getattr(self, method, self.invalid)
self.request = request
self.response = webob.Response()
params = request.environ['wsgiorg.routing_args'][1]
del params['controller']
f(**params)
return self.response
def get_argument(self, arg, default):
return self.request.params.get(arg, default)
def set_header(self, header, value):
self.response.headers[header] = value
def set_status(self, status_code):
self.response.status = status_code
def set_404(self):
self.render_xml({"Error": {
"Code": "NoSuchKey",
"Message": "The resource you requested does not exist"
}})
self.set_status(404)
def finish(self, body=''):
self.response.body = utils.utf8(body)
def invalid(self, **kwargs):
pass
def render_xml(self, value):
assert isinstance(value, dict) and len(value) == 1
self.set_header("Content-Type", "application/xml; charset=UTF-8")
name = list(value.keys())[0]
parts = []
parts.append('<' + utils.utf8(name) +
' xmlns="http://doc.s3.amazonaws.com/2006-03-01">')
self._render_parts(list(value.values())[0], parts)
parts.append('</' + utils.utf8(name) + '>')
self.finish('<?xml version="1.0" encoding="UTF-8"?>\n' +
''.join(parts))
def _render_parts(self, value, parts=None):
if not parts:
parts = []
if isinstance(value, six.string_types):
parts.append(utils.xhtml_escape(value))
elif type(value) in six.integer_types:
parts.append(str(value))
elif isinstance(value, bool):
parts.append(str(value))
elif isinstance(value, datetime.datetime):
parts.append(value.strftime("%Y-%m-%dT%H:%M:%S.000Z"))
elif isinstance(value, dict):
for name, subvalue in six.iteritems(value):
if not isinstance(subvalue, list):
subvalue = [subvalue]
for subsubvalue in subvalue:
parts.append('<' + utils.utf8(name) + '>')
self._render_parts(subsubvalue, parts)
parts.append('</' + utils.utf8(name) + '>')
else:
raise Exception("Unknown S3 value type %r", value)
def _object_path(self, bucket, object_name):
if self.application.bucket_depth < 1:
return os.path.abspath(os.path.join(
self.application.directory, bucket, object_name))
hash = utils.get_hash_str(object_name)
path = os.path.abspath(os.path.join(
self.application.directory, bucket))
for i in range(self.application.bucket_depth):
path = os.path.join(path, hash[:2 * (i + 1)])
return os.path.join(path, object_name)
class RootHandler(BaseRequestHandler):
def get(self):
names = os.listdir(self.application.directory)
buckets = []
for name in names:
path = os.path.join(self.application.directory, name)
info = os.stat(path)
buckets.append({
"Name": name,
"CreationDate": datetime.datetime.utcfromtimestamp(
info.st_ctime),
})
self.render_xml({"ListAllMyBucketsResult": {
"Buckets": {"Bucket": buckets},
}})
class BucketHandler(BaseRequestHandler):
def get(self, bucket_name):
prefix = self.get_argument("prefix", u"")
marker = self.get_argument("marker", u"")
max_keys = int(self.get_argument("max-keys", 50000))
path = os.path.abspath(os.path.join(self.application.directory,
bucket_name))
terse = int(self.get_argument("terse", 0))
if (not path.startswith(self.application.directory) or
not os.path.isdir(path)):
self.set_404()
return
object_names = []
for root, dirs, files in os.walk(path):
for file_name in files:
object_names.append(os.path.join(root, file_name))
skip = len(path) + 1
for i in range(self.application.bucket_depth):
skip += 2 * (i + 1) + 1
object_names = [n[skip:] for n in object_names]
object_names.sort()
contents = []
start_pos = 0
if marker:
start_pos = bisect.bisect_right(object_names, marker, start_pos)
if prefix:
start_pos = bisect.bisect_left(object_names, prefix, start_pos)
truncated = False
for object_name in object_names[start_pos:]:
if not object_name.startswith(prefix):
break
if len(contents) >= max_keys:
truncated = True
break
object_path = self._object_path(bucket_name, object_name)
c = {"Key": object_name}
if not terse:
info = os.stat(object_path)
c.update({
"LastModified": datetime.datetime.utcfromtimestamp(
info.st_mtime),
"Size": info.st_size,
})
contents.append(c)
marker = object_name
self.render_xml({"ListBucketResult": {
"Name": bucket_name,
"Prefix": prefix,
"Marker": marker,
"MaxKeys": max_keys,
"IsTruncated": truncated,
"Contents": contents,
}})
def put(self, bucket_name):
path = os.path.abspath(os.path.join(
self.application.directory, bucket_name))
if (not path.startswith(self.application.directory) or
os.path.exists(path)):
self.set_status(403)
return
fileutils.ensure_tree(path)
self.finish()
def delete(self, bucket_name):
path = os.path.abspath(os.path.join(
self.application.directory, bucket_name))
if (not path.startswith(self.application.directory) or
not os.path.isdir(path)):
self.set_404()
return
if len(os.listdir(path)) > 0:
self.set_status(403)
return
os.rmdir(path)
self.set_status(204)
self.finish()
def head(self, bucket_name):
path = os.path.abspath(os.path.join(self.application.directory,
bucket_name))
if (not path.startswith(self.application.directory) or
not os.path.isdir(path)):
self.set_404()
return
self.set_status(200)
self.finish()
class ObjectHandler(BaseRequestHandler):
def get(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
if (not path.startswith(self.application.directory) or
not os.path.isfile(path)):
self.set_404()
return
info = os.stat(path)
self.set_header("Content-Type", "application/unknown")
self.set_header("Last-Modified", datetime.datetime.utcfromtimestamp(
info.st_mtime))
with open(path, "r") as object_file:
self.finish(object_file.read())
def put(self, bucket, object_name):
object_name = urllib.unquote(object_name)
bucket_dir = os.path.abspath(os.path.join(
self.application.directory, bucket))
if (not bucket_dir.startswith(self.application.directory) or
not os.path.isdir(bucket_dir)):
self.set_404()
return
path = self._object_path(bucket, object_name)
if not path.startswith(bucket_dir) or os.path.isdir(path):
self.set_status(403)
return
directory = os.path.dirname(path)
fileutils.ensure_tree(directory)
with open(path, "w") as object_file:
object_file.write(self.request.body)
self.set_header('ETag',
'"%s"' % utils.get_hash_str(self.request.body))
self.finish()
def delete(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
if (not path.startswith(self.application.directory) or
not os.path.isfile(path)):
self.set_404()
return
os.unlink(path)
self.set_status(204)
self.finish()
|
appneta/boto
|
refs/heads/develop
|
tests/integration/ec2/elb/test_cert_verification.py
|
114
|
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Check that all of the certs on all service endpoints validate.
"""
from tests.integration import ServiceCertVerificationTest
from tests.compat import unittest
import boto.ec2.elb
class ELBCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
elb = True
regions = boto.ec2.elb.regions()
def sample_service_call(self, conn):
conn.get_all_load_balancers()
|
ngonzalvez/sentry
|
refs/heads/master
|
tests/sentry/nodestore/riak/__init__.py
|
12133432
| |
xbonderos/gide_aion
|
refs/heads/master
|
aion/pageviews/__init__.py
|
12133432
| |
nju520/django
|
refs/heads/master
|
tests/model_inheritance/same_model_name/__init__.py
|
12133432
| |
dol-sen/portage
|
refs/heads/master
|
pym/portage/tests/resolver/soname/test_unsatisfiable.py
|
10
|
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground, ResolverPlaygroundTestCase)
class SonameUnsatisfiableTestCase(TestCase):
def testSonameUnsatisfiable(self):
binpkgs = {
"app-misc/A-1" : {
"EAPI": "5",
"PROVIDES": "x86_32: libA.so.1",
},
"app-misc/B-1" : {
"DEPEND": "app-misc/A",
"RDEPEND": "app-misc/A",
"REQUIRES": "x86_32: libA.so.2",
},
"app-misc/B-0" : {
"DEPEND": "app-misc/A",
"RDEPEND": "app-misc/A",
"REQUIRES": "x86_32: libA.so.1",
},
}
installed = {
"app-misc/A-1" : {
"EAPI": "5",
"PROVIDES": "x86_32: libA.so.1",
},
"app-misc/B-0" : {
"DEPEND": "app-misc/A",
"RDEPEND": "app-misc/A",
"REQUIRES": "x86_32: libA.so.1",
},
}
world = ["app-misc/B"]
test_cases = (
# Skip update due to unsatisfied soname dependency.
ResolverPlaygroundTestCase(
["@world"],
options = {
"--deep": True,
"--ignore-soname-deps": "n",
"--update": True,
"--usepkgonly": True,
},
success = True,
mergelist = [],
),
)
playground = ResolverPlayground(binpkgs=binpkgs, debug=False,
installed=installed, world=world)
try:
for test_case in test_cases:
playground.run_TestCase(test_case)
self.assertEqual(
test_case.test_success, True, test_case.fail_msg)
finally:
# Disable debug so that cleanup works.
playground.debug = False
playground.cleanup()
|
bswartz/cinder
|
refs/heads/master
|
cinder/tests/unit/monkey_patch_example/example_a.py
|
127
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Example Module A for testing utils.monkey_patch()."""
def example_function_a():
return 'Example function'
class ExampleClassA(object):
def example_method(self):
return 'Example method'
def example_method_add(self, arg1, arg2):
return arg1 + arg2
|
ATIX-AG/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_alertscriptconfig.py
|
41
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_alertscriptconfig
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of AlertScriptConfig Avi RESTful Object
description:
- This module is used to configure AlertScriptConfig object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
action_script:
description:
- User defined alert action script.
- Please refer to kb.avinetworks.com for more information.
name:
description:
- A user-friendly name of the script.
required: true
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Create Alert Script to perform AWS server autoscaling
avi_alertscriptconfig:
username: '{{ username }}'
controller: '{{ controller }}'
password: '{{ password }}'
action_script: "echo Hello"
name: AWS-Launch-Script
tenant_ref: Demo
"""
RETURN = '''
obj:
description: AlertScriptConfig (api/alertscriptconfig) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
action_script=dict(type='str',),
name=dict(type='str', required=True),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'alertscriptconfig',
set([]))
if __name__ == '__main__':
main()
|
benjamindeleener/odoo
|
refs/heads/master
|
addons/report/__openerp__.py
|
27
|
{
'name': 'Report',
'category': 'Base',
'summary': 'Report',
'version': '1.0',
'description': """
Report
""",
'depends': ['base', 'web'],
'data': [
'views/layouts.xml',
'views/views.xml',
'data/report_paperformat.xml',
'security/ir.model.access.csv',
'views/report.xml',
],
'installable': True,
'auto_install': True,
}
|
fgesora/odoo
|
refs/heads/8.0
|
openerp/addons/test_inherits/models.py
|
295
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api, osv
# We just create a new model
class Unit(models.Model):
_name = 'test.unit'
_columns = {
'name': osv.fields.char('Name', required=True),
'state': osv.fields.selection([('a', 'A'), ('b', 'B')],
string='State'),
}
surname = fields.Char(compute='_compute_surname')
@api.one
@api.depends('name')
def _compute_surname(self):
self.surname = self.name or ''
# We want to _inherits from the parent model and we add some fields
# in the child object
class Box(models.Model):
_name = 'test.box'
_inherits = {'test.unit': 'unit_id'}
unit_id = fields.Many2one('test.unit', 'Unit', required=True,
ondelete='cascade')
field_in_box = fields.Char('Field1')
# We add a third level of _inherits
class Pallet(models.Model):
_name = 'test.pallet'
_inherits = {'test.box': 'box_id'}
box_id = fields.Many2one('test.box', 'Box', required=True,
ondelete='cascade')
field_in_pallet = fields.Char('Field2')
|
shoheietzel/proj5-maps
|
refs/heads/master
|
env/lib/python3.6/site-packages/pip/_vendor/webencodings/labels.py
|
512
|
"""
webencodings.labels
~~~~~~~~~~~~~~~~~~~
Map encoding labels to their name.
:copyright: Copyright 2012 by Simon Sapin
:license: BSD, see LICENSE for details.
"""
# XXX Do not edit!
# This file is automatically generated by mklabels.py
LABELS = {
'unicode-1-1-utf-8': 'utf-8',
'utf-8': 'utf-8',
'utf8': 'utf-8',
'866': 'ibm866',
'cp866': 'ibm866',
'csibm866': 'ibm866',
'ibm866': 'ibm866',
'csisolatin2': 'iso-8859-2',
'iso-8859-2': 'iso-8859-2',
'iso-ir-101': 'iso-8859-2',
'iso8859-2': 'iso-8859-2',
'iso88592': 'iso-8859-2',
'iso_8859-2': 'iso-8859-2',
'iso_8859-2:1987': 'iso-8859-2',
'l2': 'iso-8859-2',
'latin2': 'iso-8859-2',
'csisolatin3': 'iso-8859-3',
'iso-8859-3': 'iso-8859-3',
'iso-ir-109': 'iso-8859-3',
'iso8859-3': 'iso-8859-3',
'iso88593': 'iso-8859-3',
'iso_8859-3': 'iso-8859-3',
'iso_8859-3:1988': 'iso-8859-3',
'l3': 'iso-8859-3',
'latin3': 'iso-8859-3',
'csisolatin4': 'iso-8859-4',
'iso-8859-4': 'iso-8859-4',
'iso-ir-110': 'iso-8859-4',
'iso8859-4': 'iso-8859-4',
'iso88594': 'iso-8859-4',
'iso_8859-4': 'iso-8859-4',
'iso_8859-4:1988': 'iso-8859-4',
'l4': 'iso-8859-4',
'latin4': 'iso-8859-4',
'csisolatincyrillic': 'iso-8859-5',
'cyrillic': 'iso-8859-5',
'iso-8859-5': 'iso-8859-5',
'iso-ir-144': 'iso-8859-5',
'iso8859-5': 'iso-8859-5',
'iso88595': 'iso-8859-5',
'iso_8859-5': 'iso-8859-5',
'iso_8859-5:1988': 'iso-8859-5',
'arabic': 'iso-8859-6',
'asmo-708': 'iso-8859-6',
'csiso88596e': 'iso-8859-6',
'csiso88596i': 'iso-8859-6',
'csisolatinarabic': 'iso-8859-6',
'ecma-114': 'iso-8859-6',
'iso-8859-6': 'iso-8859-6',
'iso-8859-6-e': 'iso-8859-6',
'iso-8859-6-i': 'iso-8859-6',
'iso-ir-127': 'iso-8859-6',
'iso8859-6': 'iso-8859-6',
'iso88596': 'iso-8859-6',
'iso_8859-6': 'iso-8859-6',
'iso_8859-6:1987': 'iso-8859-6',
'csisolatingreek': 'iso-8859-7',
'ecma-118': 'iso-8859-7',
'elot_928': 'iso-8859-7',
'greek': 'iso-8859-7',
'greek8': 'iso-8859-7',
'iso-8859-7': 'iso-8859-7',
'iso-ir-126': 'iso-8859-7',
'iso8859-7': 'iso-8859-7',
'iso88597': 'iso-8859-7',
'iso_8859-7': 'iso-8859-7',
'iso_8859-7:1987': 'iso-8859-7',
'sun_eu_greek': 'iso-8859-7',
'csiso88598e': 'iso-8859-8',
'csisolatinhebrew': 'iso-8859-8',
'hebrew': 'iso-8859-8',
'iso-8859-8': 'iso-8859-8',
'iso-8859-8-e': 'iso-8859-8',
'iso-ir-138': 'iso-8859-8',
'iso8859-8': 'iso-8859-8',
'iso88598': 'iso-8859-8',
'iso_8859-8': 'iso-8859-8',
'iso_8859-8:1988': 'iso-8859-8',
'visual': 'iso-8859-8',
'csiso88598i': 'iso-8859-8-i',
'iso-8859-8-i': 'iso-8859-8-i',
'logical': 'iso-8859-8-i',
'csisolatin6': 'iso-8859-10',
'iso-8859-10': 'iso-8859-10',
'iso-ir-157': 'iso-8859-10',
'iso8859-10': 'iso-8859-10',
'iso885910': 'iso-8859-10',
'l6': 'iso-8859-10',
'latin6': 'iso-8859-10',
'iso-8859-13': 'iso-8859-13',
'iso8859-13': 'iso-8859-13',
'iso885913': 'iso-8859-13',
'iso-8859-14': 'iso-8859-14',
'iso8859-14': 'iso-8859-14',
'iso885914': 'iso-8859-14',
'csisolatin9': 'iso-8859-15',
'iso-8859-15': 'iso-8859-15',
'iso8859-15': 'iso-8859-15',
'iso885915': 'iso-8859-15',
'iso_8859-15': 'iso-8859-15',
'l9': 'iso-8859-15',
'iso-8859-16': 'iso-8859-16',
'cskoi8r': 'koi8-r',
'koi': 'koi8-r',
'koi8': 'koi8-r',
'koi8-r': 'koi8-r',
'koi8_r': 'koi8-r',
'koi8-u': 'koi8-u',
'csmacintosh': 'macintosh',
'mac': 'macintosh',
'macintosh': 'macintosh',
'x-mac-roman': 'macintosh',
'dos-874': 'windows-874',
'iso-8859-11': 'windows-874',
'iso8859-11': 'windows-874',
'iso885911': 'windows-874',
'tis-620': 'windows-874',
'windows-874': 'windows-874',
'cp1250': 'windows-1250',
'windows-1250': 'windows-1250',
'x-cp1250': 'windows-1250',
'cp1251': 'windows-1251',
'windows-1251': 'windows-1251',
'x-cp1251': 'windows-1251',
'ansi_x3.4-1968': 'windows-1252',
'ascii': 'windows-1252',
'cp1252': 'windows-1252',
'cp819': 'windows-1252',
'csisolatin1': 'windows-1252',
'ibm819': 'windows-1252',
'iso-8859-1': 'windows-1252',
'iso-ir-100': 'windows-1252',
'iso8859-1': 'windows-1252',
'iso88591': 'windows-1252',
'iso_8859-1': 'windows-1252',
'iso_8859-1:1987': 'windows-1252',
'l1': 'windows-1252',
'latin1': 'windows-1252',
'us-ascii': 'windows-1252',
'windows-1252': 'windows-1252',
'x-cp1252': 'windows-1252',
'cp1253': 'windows-1253',
'windows-1253': 'windows-1253',
'x-cp1253': 'windows-1253',
'cp1254': 'windows-1254',
'csisolatin5': 'windows-1254',
'iso-8859-9': 'windows-1254',
'iso-ir-148': 'windows-1254',
'iso8859-9': 'windows-1254',
'iso88599': 'windows-1254',
'iso_8859-9': 'windows-1254',
'iso_8859-9:1989': 'windows-1254',
'l5': 'windows-1254',
'latin5': 'windows-1254',
'windows-1254': 'windows-1254',
'x-cp1254': 'windows-1254',
'cp1255': 'windows-1255',
'windows-1255': 'windows-1255',
'x-cp1255': 'windows-1255',
'cp1256': 'windows-1256',
'windows-1256': 'windows-1256',
'x-cp1256': 'windows-1256',
'cp1257': 'windows-1257',
'windows-1257': 'windows-1257',
'x-cp1257': 'windows-1257',
'cp1258': 'windows-1258',
'windows-1258': 'windows-1258',
'x-cp1258': 'windows-1258',
'x-mac-cyrillic': 'x-mac-cyrillic',
'x-mac-ukrainian': 'x-mac-cyrillic',
'chinese': 'gbk',
'csgb2312': 'gbk',
'csiso58gb231280': 'gbk',
'gb2312': 'gbk',
'gb_2312': 'gbk',
'gb_2312-80': 'gbk',
'gbk': 'gbk',
'iso-ir-58': 'gbk',
'x-gbk': 'gbk',
'gb18030': 'gb18030',
'hz-gb-2312': 'hz-gb-2312',
'big5': 'big5',
'big5-hkscs': 'big5',
'cn-big5': 'big5',
'csbig5': 'big5',
'x-x-big5': 'big5',
'cseucpkdfmtjapanese': 'euc-jp',
'euc-jp': 'euc-jp',
'x-euc-jp': 'euc-jp',
'csiso2022jp': 'iso-2022-jp',
'iso-2022-jp': 'iso-2022-jp',
'csshiftjis': 'shift_jis',
'ms_kanji': 'shift_jis',
'shift-jis': 'shift_jis',
'shift_jis': 'shift_jis',
'sjis': 'shift_jis',
'windows-31j': 'shift_jis',
'x-sjis': 'shift_jis',
'cseuckr': 'euc-kr',
'csksc56011987': 'euc-kr',
'euc-kr': 'euc-kr',
'iso-ir-149': 'euc-kr',
'korean': 'euc-kr',
'ks_c_5601-1987': 'euc-kr',
'ks_c_5601-1989': 'euc-kr',
'ksc5601': 'euc-kr',
'ksc_5601': 'euc-kr',
'windows-949': 'euc-kr',
'csiso2022kr': 'iso-2022-kr',
'iso-2022-kr': 'iso-2022-kr',
'utf-16be': 'utf-16be',
'utf-16': 'utf-16le',
'utf-16le': 'utf-16le',
'x-user-defined': 'x-user-defined',
}
|
yglazko/socorro
|
refs/heads/master
|
socorro/external/postgresql/correlations.py
|
10
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import logging
from socorro.external.postgresql.base import PostgreSQLBase
from socorro.lib import datetimeutil, external_common
from socorro.external import BadArgumentError
class Correlations(PostgreSQLBase):
def get(self, **kwargs):
filters = [
("report_date", None, "datetime"),
("report_type", None, "str"),
("product", None, "str"),
("version", None, "str"),
("signature", None, "str"),
("platform", None, "str"),
("min_crashes", 10, "int"),
("min_baseline_diff", 0.05, "float"),
]
params = external_common.parse_arguments(filters, kwargs)
hits = []
if params['report_type'] == 'interesting-addons':
hits = self.interesting_addons(params)
elif params['report_type'] == 'interesting-modules':
hits = self.interesting_modules(params)
elif params['report_type'] == 'interesting-addons-with-version':
hits = self.interesting_addons_with_version(params)
elif params['report_type'] == 'interesting-modules-with-version':
hits = self.interesting_modules_with_version(params)
elif params['report_type'] == 'core-counts':
hits = self.core_counts(params)
else:
raise BadArgumentError(
'report_type',
received=report_type
)
return {
'hits': hits,
'total': len(hits)
}
def interesting_addons(self, params):
sql = """
/* socorro.external.postgresql.correlations.Correlations.get(addons)*/
WITH total_for_sig AS (
SELECT
sum(total)
FROM correlations_addon
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
),
total_for_os AS (
SELECT
sum(total)
FROM correlations_addon
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
),
crashes_for_sig AS (
SELECT
sum(total) AS crashes_for_sig,
reason_id,
addon_id
FROM correlations_addon
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
GROUP BY addon_id, reason_id
),
crashes_for_os AS (
SELECT
sum(total) AS crashes_for_os,
addon_id,
reason_id
FROM correlations_addon
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND os_name = %(platform)s
AND product_name = %(product)s
AND version_string = %(version)s
GROUP BY addon_id, reason_id
)
SELECT
(SELECT sum
FROM total_for_sig) AS total_for_sig,
(SELECT sum
FROM total_for_os) AS total_for_os,
crashes_for_sig,
crashes_for_os,
(crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float) * 100
AS in_sig_ratio,
(crashes_for_os::float / (SELECT sum FROM total_for_os)::float) * 100
AS in_os_ratio,
addon_id,
reason
FROM crashes_for_sig
JOIN crashes_for_os USING (addon_id, reason_id)
JOIN reasons USING (reason_id)
WHERE crashes_for_sig >= %(min_crashes)s
AND ((crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float)
- (crashes_for_os::float / (SELECT sum FROM total_for_os)::float)
>= %(min_baseline_diff)s)
;
"""
error_message = ('Failed to retrieve correlations addon data ',
'from PostgreSQL')
sql_results = self.query(sql, params, error_message=error_message)
fields = (
"total_for_sig",
"total_for_os",
"crashes_for_sig",
"crashes_for_os",
"in_sig_ratio",
"in_os_ratio",
"addon_id",
"reason",
)
return [dict(zip(fields, row)) for row in sql_results]
def interesting_modules(self, params):
sql = """
/* socorro.external.postgresql.correlations.Correlations.get(modules)*/
WITH total_for_sig AS (
SELECT
sum(total)
FROM correlations_module
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
),
total_for_os AS (
SELECT
sum(total)
FROM correlations_module
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
),
crashes_for_sig AS (
SELECT
sum(total) AS crashes_for_sig,
reason_id,
module_id
FROM correlations_module
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
GROUP BY module_id, reason_id
),
crashes_for_os AS (
SELECT
sum(total) AS crashes_for_os,
module_id,
reason_id
FROM correlations_module
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND os_name = %(platform)s
AND product_name = %(product)s
AND version_string = %(version)s
GROUP BY module_id, reason_id
)
SELECT
(SELECT sum
FROM total_for_sig) AS total_for_sig,
(SELECT sum
FROM total_for_os) AS total_for_os,
crashes_for_sig,
crashes_for_os,
(crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float) * 100
AS in_sig_ratio,
(crashes_for_os::float / (SELECT sum FROM total_for_os)::float) * 100
AS in_os_ratio,
modules.name AS module_name,
reason
FROM crashes_for_sig
JOIN crashes_for_os USING (module_id, reason_id)
JOIN reasons USING (reason_id)
JOIN modules USING (module_id)
WHERE crashes_for_sig >= %(min_crashes)s
AND ((crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float)
- (crashes_for_os::float / (SELECT sum FROM total_for_os)::float)
>= %(min_baseline_diff)s)
;
"""
error_message = ('Failed to retrieve correlations addon data ',
'from PostgreSQL')
sql_results = self.query(sql, params, error_message=error_message)
fields = (
"total_for_sig",
"total_for_os",
"crashes_for_sig",
"crashes_for_os",
"in_sig_ratio",
"in_os_ratio",
"module_name",
"reason",
)
return [dict(zip(fields, row)) for row in sql_results]
def interesting_addons_with_version(self, params):
sql = """
/* socorro.external.postgresql.correlations.Correlations.get(addons-version)*/
WITH total_for_sig AS (
SELECT
sum(total)
FROM correlations_addon
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
),
total_for_os AS (
SELECT
sum(total)
FROM correlations_addon
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
),
crashes_for_sig AS (
SELECT
sum(total) AS crashes_for_sig,
reason,
addon_id,
addon_version
FROM correlations_addon
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
JOIN reasons USING (reason_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
GROUP BY reason, addon_id, addon_version
),
crashes_for_os AS (
SELECT
sum(total) AS crashes_for_os,
reason,
addon_id,
addon_version
FROM correlations_addon
JOIN product_versions USING (product_version_id)
JOIN reasons USING (reason_id)
WHERE report_date = %(report_date)s
AND os_name = %(platform)s
AND product_name = %(product)s
AND version_string = %(version)s
GROUP BY reason, addon_id, addon_version
)
SELECT
(SELECT sum
FROM total_for_sig) AS total_for_sig,
(SELECT sum
FROM total_for_os) AS total_for_os,
crashes_for_sig,
crashes_for_os,
(crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float) * 100
AS in_sig_ratio,
(crashes_for_os::float / (SELECT sum FROM total_for_os)::float) * 100
AS in_os_ratio,
crashes_for_sig.addon_id,
crashes_for_sig.addon_version,
crashes_for_sig.reason
FROM crashes_for_sig
JOIN crashes_for_os USING (reason, addon_id, addon_version)
WHERE crashes_for_sig >= %(min_crashes)s
AND ((crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float)
- (crashes_for_os::float / (SELECT sum FROM total_for_os)::float)
>= %(min_baseline_diff)s)
;
"""
error_message = ('Failed to retrieve correlations module data ',
'from PostgreSQL')
sql_results = self.query(sql, params, error_message=error_message)
fields = (
"total_for_sig",
"total_for_os",
"crashes_for_sig",
"crashes_for_os",
"in_sig_ratio",
"in_os_ratio",
"addon_id",
"addon_version",
"reason",
)
return [dict(zip(fields, row)) for row in sql_results]
def interesting_modules_with_version(self, params):
sql = """
/* socorro.external.postgresql.correlations.Correlations.get(modules-version)*/
WITH total_for_sig AS (
SELECT
sum(total)
FROM correlations_module
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
),
total_for_os AS (
SELECT
sum(total)
FROM correlations_module
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
),
crashes_for_sig AS (
SELECT
sum(total) AS crashes_for_sig,
reason_id,
module_id
FROM correlations_module
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
GROUP BY reason_id, module_id
),
crashes_for_os AS (
SELECT
sum(total) AS crashes_for_os,
reason_id,
module_id
FROM correlations_module
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND os_name = %(platform)s
AND product_name = %(product)s
AND version_string = %(version)s
GROUP BY reason_id, module_id
)
SELECT
(SELECT sum
FROM total_for_sig) AS total_for_sig,
(SELECT sum
FROM total_for_os) AS total_for_os,
crashes_for_sig,
crashes_for_os,
(crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float) * 100
AS in_sig_ratio,
(crashes_for_os::float / (SELECT sum FROM total_for_os)::float) * 100
AS in_os_ratio,
name AS module_name,
version AS module_version,
reason
FROM crashes_for_sig
JOIN crashes_for_os USING (reason_id, module_id)
JOIN modules USING (module_id)
JOIN reasons USING (reason_id)
WHERE crashes_for_sig >= %(min_crashes)s
AND ((crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float)
- (crashes_for_os::float / (SELECT sum FROM total_for_os)::float)
>= %(min_baseline_diff)s)
;
"""
error_message = ('Failed to retrieve correlations module data ',
'from PostgreSQL')
sql_results = self.query(sql, params, error_message=error_message)
fields = (
"total_for_sig",
"total_for_os",
"crashes_for_sig",
"crashes_for_os",
"in_sig_ratio",
"in_os_ratio",
"module_name",
"module_version",
"reason",
)
return [dict(zip(fields, row)) for row in sql_results]
def core_counts(self, params):
sql = """
/* socorro.external.postgresql.correlations.Correlations.get(cores)*/
WITH total_for_sig AS (
SELECT
sum(total)
FROM correlations_core
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
),
total_for_os AS (
SELECT
sum(total)
FROM correlations_core
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
),
crashes_for_sig AS (
SELECT
sum(total) AS crashes_for_sig,
reason,
cpu_arch,
cpu_count
FROM correlations_core
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
JOIN reasons USING (reason_id)
WHERE report_date = %(report_date)s
AND product_name = %(product)s
AND os_name = %(platform)s
AND version_string = %(version)s
AND signature = %(signature)s
GROUP BY cpu_arch, cpu_count, reason
),
crashes_for_os AS (
SELECT
sum(total) AS crashes_for_os,
cpu_arch,
cpu_count
FROM correlations_core
JOIN product_versions USING (product_version_id)
WHERE report_date = %(report_date)s
AND os_name = %(platform)s
AND product_name = %(product)s
AND version_string = %(version)s
GROUP BY cpu_arch, cpu_count
)
SELECT
(SELECT sum
FROM total_for_sig) AS total_for_sig,
(SELECT sum
FROM total_for_os) AS total_for_os,
crashes_for_sig,
crashes_for_os,
(crashes_for_sig::float / (SELECT sum FROM total_for_sig)::float) * 100
AS in_sig_ratio,
(crashes_for_os::float / (SELECT sum FROM total_for_os)::float) * 100
AS in_os_ratio,
cpu_arch,
cpu_count,
reason
FROM crashes_for_sig
JOIN crashes_for_os USING (cpu_arch, cpu_count)
WHERE crashes_for_sig >= %(min_crashes)s
;
"""
error_message = ('Failed to retrieve correlations core data ',
'from PostgreSQL')
sql_results = self.query(sql, params, error_message=error_message)
fields = (
"total_for_sig",
"total_for_os",
"crashes_for_sig",
"crashes_for_os",
"in_sig_ratio",
"in_os_ratio",
"cpu_arch",
"cpu_count",
"reason",
)
return [dict(zip(fields, row)) for row in sql_results]
|
SurfasJones/cookie
|
refs/heads/master
|
cookie/users/admin.py
|
46
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
from .models import User
class UserAdmin(AuthUserAdmin):
create_form_class = UserCreationForm
update_form_class = UserChangeForm
admin.site.register(User, UserAdmin)
|
Storm7874/Utils
|
refs/heads/master
|
Colorama/ansitowin32.py
|
450
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
import os
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll, winapi_test
winterm = None
if windll is not None:
winterm = WinTerm()
def is_stream_closed(stream):
return not hasattr(stream, 'closed') or stream.closed
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = os.name == 'nt'
# We test if the WinAPI works, because even if we are on Windows
# we may be using a terminal that doesn't support the WinAPI
# (e.g. Cygwin Terminal). In this case it's up to the terminal
# to support the ANSI codes.
conversion_supported = on_windows and winapi_test()
# should we strip ANSI sequences from our output?
if strip is None:
strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
}
return dict()
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif not self.strip and not is_stream_closed(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
text = self.convert_osc(text)
for match in self.ANSI_CSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(command, paramstring)
self.call_win32(command, params)
def extract_params(self, command, paramstring):
if command in 'Hf':
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
while len(params) < 2:
# defaults:
params = params + (1,)
else:
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
if len(params) == 0:
# defaults:
if command in 'JKm':
params = (0,)
elif command in 'ABCD':
params = (1,)
return params
def call_win32(self, command, params):
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in 'J':
winterm.erase_screen(params[0], on_stderr=self.on_stderr)
elif command in 'K':
winterm.erase_line(params[0], on_stderr=self.on_stderr)
elif command in 'Hf': # cursor position - absolute
winterm.set_cursor_position(params, on_stderr=self.on_stderr)
elif command in 'ABCD': # cursor position - relative
n = params[0]
# A - up, B - down, C - forward, D - back
x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
def convert_osc(self, text):
for match in self.ANSI_OSC_RE.finditer(text):
start, end = match.span()
text = text[:start] + text[end:]
paramstring, command = match.groups()
if command in '\x07': # \x07 = BEL
params = paramstring.split(";")
# 0 - change title and icon (we will only change title)
# 1 - change icon (we don't support this)
# 2 - change title
if params[0] in '02':
winterm.set_title(params[1])
return text
|
google-code-export/pyglet
|
refs/heads/master
|
contrib/spryte/rect.py
|
29
|
class Rect(object):
'''Define a rectangular area.
Many convenience handles and other properties are also defined - all of
which may be assigned to which will result in altering the position
and sometimes dimensions of the Rect.
The Rect area includes the bottom and left borders but not the top and
right borders.
'''
def __init__(self, x, y, width, height):
'''Create a Rect with the bottom-left corner at (x, y) and
dimensions (width, height).
'''
self._x, self._y = x, y
self._width, self._height = width, height
# the following four properties will most likely be overridden in a
# subclass
def set_x(self, value): self._x = value
x = property(lambda self: self._x, set_x)
def set_y(self, value): self._y = value
y = property(lambda self: self._y, set_y)
def set_width(self, value): self._width = value
width = property(lambda self: self._width, set_width)
def set_height(self, value): self._height = value
height = property(lambda self: self._height, set_height)
def set_pos(self, value): self._x, self._y = value
pos = property(lambda self: (self._x, self._y), set_pos)
def set_size(self, value): self._width, self._height = value
size = property(lambda self: (self._width, self._height), set_size)
def contains(self, x, y):
'''Return boolean whether the point defined by x, y is inside the
rect area.
'''
if x < self._x or x > self._x + self._width: return False
if y < self._y or y > self._y + self._height: return False
return True
def intersects(self, other):
'''Return boolean whether the "other" rect (an object with .x, .y,
.width and .height attributes) overlaps this Rect in any way.
'''
if self._x + self._width < other.x: return False
if other.x + other.width < self._x: return False
if self._y + self._height < other.y: return False
if other.y + other.height < self._y: return False
return True
# r/w, in pixels, y extent
def get_top(self): return self.y + self.height
def set_top(self, y): self.y = y - self.height
top = property(get_top, set_top)
# r/w, in pixels, y extent
def get_bottom(self): return self.y
def set_bottom(self, y): self.y = y
bottom = property(get_bottom, set_bottom)
# r/w, in pixels, x extent
def get_left(self): return self.x
def set_left(self, x): self.x = x
left = property(get_left, set_left)
# r/w, in pixels, x extent
def get_right(self): return self.x + self.width
def set_right(self, x): self.x = x - self.width
right = property(get_right, set_right)
# r/w, in pixels, (x, y)
def get_center(self):
return (self.x + self.width/2, self.y + self.height/2)
def set_center(self, center):
x, y = center
self.pos = (x - self.width/2, y - self.height/2)
center = property(get_center, set_center)
# r/w, in pixels, (x, y)
def get_midtop(self):
return (self.x + self.width/2, self.y + self.height)
def set_midtop(self, midtop):
x, y = midtop
self.pos = (x - self.width/2, y - self.height)
midtop = property(get_midtop, set_midtop)
# r/w, in pixels, (x, y)
def get_midbottom(self):
return (self.x + self.width/2, self.y)
def set_midbottom(self, midbottom):
x, y = midbottom
self.pos = (x - self.width/2, y)
midbottom = property(get_midbottom, set_midbottom)
# r/w, in pixels, (x, y)
def get_midleft(self):
return (self.x, self.y + self.height/2)
def set_midleft(self, midleft):
x, y = midleft
self.pos = (x, y - self.height/2)
midleft = property(get_midleft, set_midleft)
# r/w, in pixels, (x, y)
def get_midright(self):
return (self.x + self.width, self.y + self.height/2)
def set_midright(self, midright):
x, y = midright
self.pos = (x - self.width, y - self.height/2)
midright = property(get_midright, set_midright)
# r/w, in pixels, (x, y)
def get_topleft(self):
return (self.x, self.y + self.height)
def set_topleft(self, pos):
x, y = pos
self.pos = (x, y - self.height)
topleft = property(get_topleft, set_topleft)
# r/w, in pixels, (x, y)
def get_topright(self):
return (self.x + self.width, self.y + self.height)
def set_topright(self, pos):
x, y = pos
self.pos = (x - self.width, y - self.height)
topright = property(get_topright, set_topright)
# r/w, in pixels, (x, y)
def get_bottomright(self):
return (self.x + self.width, self.y)
def set_bottomright(self, pos):
x, y = pos
self.pos = (x - self.width, y)
bottomright = property(get_bottomright, set_bottomright)
# r/w, in pixels, (x, y)
def get_bottomleft(self):
return (self.x, self.y)
def set_bottomleft(self, pos):
self.x, self.y = pos
bottomleft = property(get_bottomleft, set_bottomleft)
|
abhattad4/Digi-Menu
|
refs/heads/master
|
build/lib.linux-x86_64-2.7/django/contrib/messages/storage/session.py
|
478
|
import json
from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import (
MessageDecoder, MessageEncoder,
)
from django.utils import six
class SessionStorage(BaseStorage):
"""
Stores messages in the session (that is, django.contrib.sessions).
"""
session_key = '_messages'
def __init__(self, request, *args, **kwargs):
assert hasattr(request, 'session'), "The session-based temporary "\
"message storage requires session middleware to be installed, "\
"and come before the message middleware in the "\
"MIDDLEWARE_CLASSES list."
super(SessionStorage, self).__init__(request, *args, **kwargs)
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the request's session. This storage
always stores everything it is given, so return True for the
all_retrieved flag.
"""
return self.deserialize_messages(self.request.session.get(self.session_key)), True
def _store(self, messages, response, *args, **kwargs):
"""
Stores a list of messages to the request's session.
"""
if messages:
self.request.session[self.session_key] = self.serialize_messages(messages)
else:
self.request.session.pop(self.session_key, None)
return []
def serialize_messages(self, messages):
encoder = MessageEncoder(separators=(',', ':'))
return encoder.encode(messages)
def deserialize_messages(self, data):
if data and isinstance(data, six.string_types):
return json.loads(data, cls=MessageDecoder)
return data
|
2013Commons/HUE-SHARK
|
refs/heads/master
|
desktop/core/ext-py/Django-1.2.3/tests/regressiontests/middleware_exceptions/tests.py
|
51
|
import sys
from django.test import TestCase
from django.core.signals import got_request_exception
class TestException(Exception):
pass
class TestMiddleware(object):
def process_request(self, request):
raise TestException('Test Exception')
class MiddlewareExceptionTest(TestCase):
def setUp(self):
self.exceptions = []
got_request_exception.connect(self._on_request_exception)
self.client.handler.load_middleware()
def tearDown(self):
got_request_exception.disconnect(self._on_request_exception)
self.exceptions = []
def _on_request_exception(self, sender, request, **kwargs):
self.exceptions.append(sys.exc_info())
def test_process_request(self):
self.client.handler._request_middleware.insert(0, TestMiddleware().process_request)
try:
response = self.client.get('/')
except TestException, e:
# Test client indefinitely re-raises any exceptions being raised
# during request handling. Hence actual testing that exception was
# properly handled is done by relying on got_request_exception
# signal being sent.
pass
except Exception, e:
self.fail("Unexpected exception: %s" % e)
self.assertEquals(len(self.exceptions), 1)
exception, value, tb = self.exceptions[0]
self.assertEquals(value.args, ('Test Exception', ))
|
chjw8016/GreenOdoo7-haibao
|
refs/heads/master
|
openerp/netsvc.py
|
21
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Common Services: netsvc
#.apidoc module-mods: member-order: bysource
import errno
import logging
import logging.handlers
import os
import platform
import release
import socket
import sys
import threading
import time
import types
from pprint import pformat
try:
import psutil
except ImportError:
psutil = None
# TODO modules that import netsvc only for things from loglevels must be changed to use loglevels.
from loglevels import *
import tools
import openerp
_logger = logging.getLogger(__name__)
def close_socket(sock):
""" Closes a socket instance cleanly
:param sock: the network socket to close
:type sock: socket.socket
"""
try:
sock.shutdown(socket.SHUT_RDWR)
except socket.error, e:
# On OSX, socket shutdowns both sides if any side closes it
# causing an error 57 'Socket is not connected' on shutdown
# of the other side (or something), see
# http://bugs.python.org/issue4397
# note: stdlib fixed test, not behavior
if e.errno != errno.ENOTCONN or platform.system() not in ['Darwin', 'Windows']:
raise
sock.close()
def abort_response(dummy_1, description, dummy_2, details):
# TODO Replace except_{osv,orm} with these directly.
raise openerp.osv.osv.except_osv(description, details)
class Service(object):
""" Base class for Local services
Functionality here is trusted, no authentication.
Workflow engine and reports subclass this.
"""
_services = {}
def __init__(self, name):
Service._services[name] = self
self.__name = name
@classmethod
def exists(cls, name):
return name in cls._services
@classmethod
def remove(cls, name):
if cls.exists(name):
cls._services.pop(name)
def LocalService(name):
# Special case for addons support, will be removed in a few days when addons
# are updated to directly use openerp.osv.osv.service.
if name == 'object_proxy':
return openerp.osv.osv.service
return Service._services[name]
class ExportService(object):
""" Proxy for exported services.
Note that this class has no direct proxy, capable of calling
eservice.method(). Rather, the proxy should call
dispatch(method, params)
"""
_services = {}
def __init__(self, name):
ExportService._services[name] = self
self.__name = name
_logger.debug("Registered an exported service: %s" % name)
@classmethod
def getService(cls,name):
return cls._services[name]
# Dispatch a RPC call w.r.t. the method name. The dispatching
# w.r.t. the service (this class) is done by OpenERPDispatcher.
def dispatch(self, method, params):
raise Exception("stub dispatch at %s" % self.__name)
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, _NOTHING, DEFAULT = range(10)
#The background is set with 40 plus the number of the color, and the foreground with 30
#These are the sequences need to get colored ouput
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
COLOR_PATTERN = "%s%s%%s%s" % (COLOR_SEQ, COLOR_SEQ, RESET_SEQ)
LEVEL_COLOR_MAPPING = {
logging.DEBUG: (BLUE, DEFAULT),
logging.INFO: (GREEN, DEFAULT),
logging.TEST: (WHITE, BLUE),
logging.WARNING: (YELLOW, DEFAULT),
logging.ERROR: (RED, DEFAULT),
logging.CRITICAL: (WHITE, RED),
}
class DBFormatter(logging.Formatter):
def format(self, record):
record.pid = os.getpid()
record.dbname = getattr(threading.currentThread(), 'dbname', '?')
return logging.Formatter.format(self, record)
class ColoredFormatter(DBFormatter):
def format(self, record):
fg_color, bg_color = LEVEL_COLOR_MAPPING[record.levelno]
record.levelname = COLOR_PATTERN % (30 + fg_color, 40 + bg_color, record.levelname)
return DBFormatter.format(self, record)
def init_logger():
from tools.translate import resetlocale
resetlocale()
# create a format for log messages and dates
format = '%(asctime)s %(pid)s %(levelname)s %(dbname)s %(name)s: %(message)s'
if tools.config['syslog']:
# SysLog Handler
if os.name == 'nt':
handler = logging.handlers.NTEventLogHandler("%s %s" % (release.description, release.version))
else:
handler = logging.handlers.SysLogHandler('/dev/log')
format = '%s %s' % (release.description, release.version) \
+ ':%(dbname)s:%(levelname)s:%(name)s:%(message)s'
elif tools.config['logfile']:
# LogFile Handler
logf = tools.config['logfile']
try:
dirname = os.path.dirname(logf)
if dirname and not os.path.isdir(dirname):
os.makedirs(dirname)
if tools.config['logrotate'] is not False:
handler = logging.handlers.TimedRotatingFileHandler(logf,'D',1,30)
elif os.name == 'posix':
handler = logging.handlers.WatchedFileHandler(logf)
else:
handler = logging.handlers.FileHandler(logf)
except Exception:
sys.stderr.write("ERROR: couldn't create the logfile directory. Logging to the standard output.\n")
handler = logging.StreamHandler(sys.stdout)
else:
# Normal Handler on standard output
handler = logging.StreamHandler(sys.stdout)
# Check that handler.stream has a fileno() method: when running OpenERP
# behind Apache with mod_wsgi, handler.stream will have type mod_wsgi.Log,
# which has no fileno() method. (mod_wsgi.Log is what is being bound to
# sys.stderr when the logging.StreamHandler is being constructed above.)
if isinstance(handler, logging.StreamHandler) \
and hasattr(handler.stream, 'fileno') \
and os.isatty(handler.stream.fileno()):
formatter = ColoredFormatter(format)
else:
formatter = DBFormatter(format)
handler.setFormatter(formatter)
# Configure handlers
default_config = [
'openerp.netsvc.rpc.request:INFO',
'openerp.netsvc.rpc.response:INFO',
'openerp.addons.web.http:INFO',
'openerp.sql_db:INFO',
':INFO',
]
if tools.config['log_level'] == 'info':
pseudo_config = []
elif tools.config['log_level'] == 'debug_rpc':
pseudo_config = ['openerp:DEBUG','openerp.netsvc.rpc.request:DEBUG']
elif tools.config['log_level'] == 'debug_rpc_answer':
pseudo_config = ['openerp:DEBUG','openerp.netsvc.rpc.request:DEBUG', 'openerp.netsvc.rpc.response:DEBUG']
elif tools.config['log_level'] == 'debug':
pseudo_config = ['openerp:DEBUG']
elif tools.config['log_level'] == 'test':
pseudo_config = ['openerp:TEST']
elif tools.config['log_level'] == 'warn':
pseudo_config = ['openerp:WARNING']
elif tools.config['log_level'] == 'error':
pseudo_config = ['openerp:ERROR']
elif tools.config['log_level'] == 'critical':
pseudo_config = ['openerp:CRITICAL']
elif tools.config['log_level'] == 'debug_sql':
pseudo_config = ['openerp.sql_db:DEBUG']
else:
pseudo_config = []
logconfig = tools.config['log_handler']
for logconfig_item in default_config + pseudo_config + logconfig:
loggername, level = logconfig_item.split(':')
level = getattr(logging, level, logging.INFO)
logger = logging.getLogger(loggername)
logger.handlers = []
logger.setLevel(level)
logger.addHandler(handler)
if loggername != '':
logger.propagate = False
for logconfig_item in default_config + pseudo_config + logconfig:
_logger.debug('logger level set: "%s"', logconfig_item)
# A alternative logging scheme for automated runs of the
# server intended to test it.
def init_alternative_logger():
class H(logging.Handler):
def emit(self, record):
if record.levelno > 20:
print record.levelno, record.pathname, record.msg
handler = H()
# Add the handler to the 'openerp' logger.
logger = logging.getLogger('openerp')
logger.addHandler(handler)
logger.setLevel(logging.ERROR)
def replace_request_password(args):
# password is always 3rd argument in a request, we replace it in RPC logs
# so it's easier to forward logs for diagnostics/debugging purposes...
if len(args) > 2:
args = list(args)
args[2] = '*'
return tuple(args)
def log(logger, level, prefix, msg, depth=None):
indent=''
indent_after=' '*len(prefix)
for line in (prefix+pformat(msg, depth=depth)).split('\n'):
logger.log(level, indent+line)
indent=indent_after
def dispatch_rpc(service_name, method, params):
""" Handle a RPC call.
This is pure Python code, the actual marshalling (from/to XML-RPC or
NET-RPC) is done in a upper layer.
"""
try:
rpc_request = logging.getLogger(__name__ + '.rpc.request')
rpc_response = logging.getLogger(__name__ + '.rpc.response')
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
start_time = time.time()
start_rss, start_vms = 0, 0
if psutil:
start_rss, start_vms = psutil.Process(os.getpid()).get_memory_info()
if rpc_request and rpc_response_flag:
log(rpc_request,logging.DEBUG,'%s.%s'%(service_name,method), replace_request_password(params))
result = ExportService.getService(service_name).dispatch(method, params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
end_rss, end_vms = 0, 0
if psutil:
end_rss, end_vms = psutil.Process(os.getpid()).get_memory_info()
logline = '%s.%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (service_name, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
log(rpc_response,logging.DEBUG, logline, result)
else:
log(rpc_request,logging.DEBUG, logline, replace_request_password(params), depth=1)
return result
except openerp.exceptions.AccessError:
raise
except openerp.exceptions.AccessDenied:
raise
except openerp.exceptions.Warning:
raise
except openerp.exceptions.DeferredException, e:
_logger.exception(tools.exception_to_unicode(e))
post_mortem(e.traceback)
raise
except Exception, e:
_logger.exception(tools.exception_to_unicode(e))
post_mortem(sys.exc_info())
raise
def post_mortem(info):
if tools.config['debug_mode'] and isinstance(info[2], types.TracebackType):
import pdb
pdb.post_mortem(info[2])
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Y3K/django
|
refs/heads/master
|
tests/migrations/models.py
|
386
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps.registry import Apps
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class CustomModelBase(models.base.ModelBase):
pass
class ModelWithCustomBase(six.with_metaclass(CustomModelBase, models.Model)):
pass
@python_2_unicode_compatible
class UnicodeModel(models.Model):
title = models.CharField('ÚÑÍ¢ÓÐÉ', max_length=20, default='“Ðjáñgó”')
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
verbose_name = 'úñí©óðé µóðéø'
verbose_name_plural = 'úñí©óðé µóðéøß'
def __str__(self):
return self.title
class Unserializable(object):
"""
An object that migration doesn't know how to serialize.
"""
pass
class UnserializableModel(models.Model):
title = models.CharField(max_length=20, default=Unserializable())
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
class UnmigratedModel(models.Model):
"""
A model that is in a migration-less app (which this app is
if its migrations directory has not been repointed)
"""
pass
class EmptyManager(models.Manager):
use_in_migrations = True
class FoodQuerySet(models.query.QuerySet):
pass
class BaseFoodManager(models.Manager):
def __init__(self, a, b, c=1, d=2):
super(BaseFoodManager, self).__init__()
self.args = (a, b, c, d)
class FoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
use_in_migrations = True
class NoMigrationFoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
pass
|
felixma/nova
|
refs/heads/master
|
nova/tests/unit/api/openstack/compute/test_virtual_interfaces.py
|
7
|
# Copyright (C) 2011 Midokura KK
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack import api_version_request
from nova.api.openstack.compute.legacy_v2.contrib import virtual_interfaces \
as vi20
from nova.api.openstack.compute import virtual_interfaces as vi21
from nova import compute
from nova.compute import api as compute_api
from nova import context
from nova import exception
from nova import network
from nova.objects import virtual_interface as vif_obj
from nova import test
from nova.tests.unit.api.openstack import fakes
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
def compute_api_get(self, context, instance_id, expected_attrs=None,
want_objects=False):
return dict(uuid=FAKE_UUID, id=instance_id, instance_type_id=1, host='bob')
def _generate_fake_vifs(context):
vif = vif_obj.VirtualInterface(context=context)
vif.address = '00-00-00-00-00-00'
vif.network_id = 123
vif.net_uuid = '22222222-2222-2222-2222-22222222222222222'
vif.uuid = '00000000-0000-0000-0000-00000000000000000'
fake_vifs = [vif]
vif = vif_obj.VirtualInterface(context=context)
vif.address = '11-11-11-11-11-11'
vif.network_id = 456
vif.net_uuid = '33333333-3333-3333-3333-33333333333333333'
vif.uuid = '11111111-1111-1111-1111-11111111111111111'
fake_vifs.append(vif)
return fake_vifs
def get_vifs_by_instance(self, context, instance_id):
return _generate_fake_vifs(context)
class FakeRequest(object):
def __init__(self, context):
self.environ = {'nova.context': context}
class ServerVirtualInterfaceTestV21(test.NoDBTestCase):
wsgi_api_version = None
expected_response = {
'virtual_interfaces': [
{'id': '00000000-0000-0000-0000-00000000000000000',
'mac_address': '00-00-00-00-00-00'},
{'id': '11111111-1111-1111-1111-11111111111111111',
'mac_address': '11-11-11-11-11-11'}]}
def setUp(self):
super(ServerVirtualInterfaceTestV21, self).setUp()
self.stubs.Set(compute.api.API, "get",
compute_api_get)
self.stubs.Set(network.api.API, "get_vifs_by_instance",
get_vifs_by_instance)
self._set_controller()
def _set_controller(self):
self.controller = vi21.ServerVirtualInterfaceController()
def test_get_virtual_interfaces_list(self):
req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
res_dict = self.controller.index(req, 'fake_uuid')
self.assertEqual(res_dict, self.expected_response)
def test_vif_instance_not_found(self):
self.mox.StubOutWithMock(compute_api.API, 'get')
fake_context = context.RequestContext('fake', 'fake')
fake_req = FakeRequest(fake_context)
fake_req.api_version_request = api_version_request.APIVersionRequest(
self.wsgi_api_version)
compute_api.API.get(fake_context, 'fake_uuid',
expected_attrs=None,
want_objects=True).AndRaise(
exception.InstanceNotFound(instance_id='instance-0000'))
self.mox.ReplayAll()
self.assertRaises(
webob.exc.HTTPNotFound,
self.controller.index,
fake_req, 'fake_uuid')
class ServerVirtualInterfaceTestV20(ServerVirtualInterfaceTestV21):
def _set_controller(self):
self.controller = vi20.ServerVirtualInterfaceController()
class ServerVirtualInterfaceTestV212(ServerVirtualInterfaceTestV21):
wsgi_api_version = '2.12'
expected_response = {
'virtual_interfaces': [
{'id': '00000000-0000-0000-0000-00000000000000000',
'mac_address': '00-00-00-00-00-00',
'net_id': '22222222-2222-2222-2222-22222222222222222'},
{'id': '11111111-1111-1111-1111-11111111111111111',
'mac_address': '11-11-11-11-11-11',
'net_id': '33333333-3333-3333-3333-33333333333333333'}]}
class ServerVirtualInterfaceEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(ServerVirtualInterfaceEnforcementV21, self).setUp()
self.controller = vi21.ServerVirtualInterfaceController()
self.req = fakes.HTTPRequest.blank('')
def test_index_virtual_interfaces_policy_failed(self):
rule_name = "os_compute_api:os-virtual-interfaces"
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
|
dainan13/py-aluminium
|
refs/heads/master
|
src/easychecker.py
|
2
|
#!/usr/bin/env python2.6
# coding: utf-8
import sys
import operator
import re
import types
from simpleparse import generator
from mx.TextTools import TextTools
from pprint import pprint
def checkerattr( *args ):
def setcheckerattr( func ):
setattr( func, 'type', args )
return func
return setcheckerattr
def autologchecker( func ):
def autologchecker ( self, stack, log, x, *args ):
if func( self, x, *args ) == False :
tostrx = str(x)
tostrx = tostrx if len(tostrx) <= 10 else ( tostrx[:7]+'...' )
self._log( stack, log, "'%s' is not correct format as <%s> " %
( tostrx, func.__name__[8:], ) )
return False
return True
autologchecker._iswrapper = True
autologchecker._realcmd = func
autologchecker.__name__ = func.__name__
autologchecker.__doc__ = func.__doc__
return autologchecker
class CheckerError(Exception):
pass
class Checker(object):
checkergrammer = r'''
expr := funchkr / numchkr / strchkr
exprsub := funpair / ( strhead?, strpair ) /
funchkr / numchkr / ( strhead?, strchkr )
strpair := strchkr, [ \t\r\n]*, ':', [ \t\r\n]*, ( funchkr / strchkr )
funpair := funchkr, [ \t\r\n]*, ':', [ \t\r\n]*, ( funchkr / strchkr )
funchkr := funname,
( '(', [ \t\r\n]*, ( exprsub, [ \t\r\n]*, ',', [ \t\r\n]* )*,
( exprsub )?, [ \t\r\n]*, ')' )?
funname := [a-zA-Z@]+
numchkr := ( [><+-]?, [0-9]+ ) / [+-]
strchkr := ( ".", -[,:()]+) /
( '"', ( '\\\\' / '\\"' / -["] )*, '"' ) /
( "'", ( '\\\\' / "\\'" / -['] )* , "'" ) /
( '`', ( '\\\\' / '\\`' / -[`] )* , '`' )
strhead := '#' / '!'
'''
parser = generator.buildParser(checkergrammer).parserbyname('expr')
def __init__( self, checkercode ):
self.name = checkercode
self.checker = self._build( checkercode )
return
def __call__( self, obj, stack=[], log=[] ):
if log == None :
log = []
rst = self.checker( obj, stack, log )
if rst == False :
# disabled by xp
# print '< checker :', self.name, '>'
# print '\r\n'.join(log)
pass
return rst
return self.checker( stack, log, obj )
def _build( self, code ):
success, tree, nchar = TextTools.tag( code, self.parser )
if success == 0 or nchar != len(code):
raise CheckerError, 'Syntax Error at %d neared \' ... %s ... \'.' \
% ( nchar, code[nchar-5:nchar+5] )
# disabled by xp
# pprint(tree)
return self._build_expr( code, tree )
def _build_expr( self, code, tree ):
builder = getattr( self, '_build_'+ tree[-1][0] )
ckr = builder( code, tree[-1] )
if type(ckr) == type(()) :
xckr = ckr[0]
else :
xckr = ckr
for head in tree[:-1] :
strhead = code[head[1]:head[2]]
if strhead == '#':
xckr.type += ['abs',]
return ckr
_build_exprsub = _build_expr
def _build_funchkr( self, code, tree ):
checkername = tree[3][0]
if checkername[0] != 'funname' :
raise CheckerError, 'can not reach here [%s]' % \
sys._getframe().f_code.co_name
checkername = code[checkername[1]:checkername[2]]
try :
checker = getattr( self, 'checker_'+checkername )
except :
raise CheckerError, 'checker <%s> not found.' % checkername
childs = tree[3][1:]
childs = [ self._build_exprsub( code, c[3] ) for c in childs ]
_types = [ ['pair',] if type(c) == type(()) else getattr( c, 'type', [] )
for c in childs ]
_types = [ 'tag' if 'tag' in t else 'pair' if 'pair' in t else None
for t in _types ]
tags = [ c for t, c in zip( _types, childs ) if t == 'tag' ]
pairs = [ c for t, c in zip( _types, childs ) if t == 'pair' ]
items = [ c for t, c in zip( _types, childs ) if t == None ]
return self._makechecker_funchkr( checkername,
checker, tags, pairs, items )
@staticmethod
def _log( stack, logger, info ):
#print
#logger += [ (stack[:], info), ]
logger.append( (stack[:], info) )
#print logger
return
def _makechecker_funchkr( self, name, checker, tags, pairs, items ):
ckrtype = getattr( checker, 'type', [] )
if 'object' in ckrtype :
if items != [] :
raise CheckerError, '%s can not contain item subchecker.' % name
abses = set([ idx for idx, pair in enumerate(pairs)
if 'abs' in pair[0].type ])
def _checker( stack, logger, obj ):
if checker( stack, logger, obj ) == False :
return False
# tag checker
t_rst = [ tck( stack, logger, obj ) for tck in tags ]
t_rst = all(t_rst)
# item checker
i_rst = [ self._buildin_checker_pairs(
stack, logger, sub, pairs )
for sub in obj.items() ]
i_rst, n_rst = zip( *i_rst ) or [[],[]]
i_rst = all(i_rst)
# abs checker
a_rst = abses - set(n_rst)
if len(a_rst) != 0 :
self._log( stack, logger,
( a_rst, 'some arguments required' ) )
a_rst = False
else :
a_rst = True
return t_rst and i_rst and a_rst
elif 'array' in ckrtype :
if pairs != [] :
raise CheckerError, '%s can not contain pair subchecker.' % name
abses = set([ idx for idx, item in enumerate(items)
if 'abs' in item.type ])
def _checker( stack, logger, obj ):
if checker( stack, logger, obj ) == False :
return False
# tag checker
t_rst = [ tck( stack, logger, obj ) for tck in tags ]
t_rst = all(t_rst)
# item checker
i_rst = [ self._buildin_checker_items(
stack, logger, sub, items )
for sub in obj ]
i_rst, n_rst = zip( *i_rst ) or [[],[]]
i_rst = all(i_rst)
# abs checker
a_rst = abses - set(n_rst)
if len(a_rst) != 0 :
self._log( stack, logger,
( a_rst, 'some arguments required' ) )
a_rst = False
else :
a_rst = True
return t_rst and i_rst and a_rst
else :
if items != [] or pairs != [] :
raise CheckerError, '%s can not contain subchecker.' % name
def _checker( stack, logger, obj ):
if checker( stack, logger, obj ) == False :
return False
rst = [ tck( stack, logger, obj ) for tck in tags ]
return all(rst)
_checker.__name__ = checker.__name__
_checker.type = list(ckrtype)[:]
return _checker
def _makechecker_numchkr( self, name, oper, y ):
def _checker ( stack, logger, obj ):
return self._buildin_checker_numchkr( stack, logger, obj, y, oper )
_checker.__name__ = name
orignaltype = getattr( self._buildin_checker_numchkr, 'type', [] )
_checker.type = list(orignaltype)[:]
return _checker
opertable = {
'+': operator.ge,
'-': operator.le,
'>': operator.gt,
'<': operator.lt,
'=': operator.eq,
}
def _build_numchkr( self, code, tree ):
if tree[0] != 'numchkr' :
raise CheckerError, 'can not reach here [%s]' % \
sys._getframe().f_code.co_name
checkername = code[tree[1]:tree[2]]
if checkername[0] in self.opertable :
oper = self.opertable[checkername[0]]
y = checkername[1:]
y = 0 if y == '' else int(y)
else :
oper = operator.eq
y = int(checkername)
return self._makechecker_numchkr( checkername, oper, y )
def _makechecker_strchkr( self, name, y ):
def _checker ( stack, logger, obj ):
return self._buildin_checker_strchkr( stack, logger, obj, y )
_checker.__name__ = name
orignaltype = getattr( self._buildin_checker_strchkr, 'type', [] )
_checker.type = list(orignaltype)[:]
return _checker
def _build_strchkr( self, code, tree ):
if tree[0] != 'strchkr' :
raise CheckerError, 'can not reach here [%s]' % \
sys._getframe().f_code.co_name
checkername = code[tree[1]:tree[2]]
if checkername.startswith('.'):
y = checkername[1:]
elif checkername.startswith('`'):
y = eval( '"""' + checkername[1:-1] + '"""' )
else :
y = eval( checkername )
return self._makechecker_strchkr( checkername, y )
def _build_strpair( self, code, tree ):
pair = [ getattr( self, '_build_'+ subtree[0] )( code, subtree )
for subtree in tree[3] ]
pair = tuple(pair)
return pair
_build_funpair = _build_strpair
@checkerattr( 'buildin' )
def _buildin_checker_items( self, stack, logger, subobj, subcheckers ):
if subcheckers == [] :
return ( True, None )
icls = [ ( i, c, list() ) for i, c in enumerate( subcheckers ) ]
for iii, ckr, log in icls :
if ckr( stack, log, subobj ) == True :
return ( True, iii )
logs = [ ( max([ len(stk) for stk, inf in l ]), l )
for i, c, l in icls ]
maxdeep = max( zip(*logs)[0] )
logger += [ log for deep, log in logs if deep == maxdeep ]
return ( False, None )
@checkerattr( 'buildin' )
def _buildin_checker_pairs( self, stack, logger, subobj, subcheckers ):
if subcheckers == [] :
return ( True, None )
icls = [ ( i, c, list() ) for i, c in enumerate( subcheckers ) ]
for iii, ckr, log in icls :
kckr, vckr = ckr
if kckr( stack, log, subobj[0] ) == True :
if vckr( stack+[str(subobj[0]),], logger, subobj[1] ) == True :
return ( True, iii )
else :
return ( False, iii )
logs = [ ( max([ len(stk) for stk, inf in l ]+[0,]), l )
for i, c, l in icls ]
maxdeep = max( zip(*logs)[0] )
#logger += sum([ log for deep, log in logs if deep == maxdeep ], [] )
self._log( stack, logger, 'Uncached key "%s"' %( str( subobj[0] ) ) )
return ( False, None )
inopertable = {
operator.lt : '<',
operator.le : '<=',
operator.eq : '==',
operator.ne : '!=',
operator.ge : '>=',
operator.gt : '>',
}
@checkerattr( 'buildin', 'tag' )
def _buildin_checker_numchkr( self, stack, log, x, y, op ):
orignalx = x
if type(x) not in ( types.IntType, types.LongType, types.FloatType ):
x = len(x)
orignalx = ( str(orignalx) \
if type(orignalx) != types.UnicodeType \
else orignalx
) + "'s length"
if not op( x, y ) :
self._log( stack, log,
'%s not %s %s' % ( str(orignalx),
self.inopertable[op], str(y) )
)
return False
return True
@checkerattr( 'buildin' )
def _buildin_checker_strchkr( self, stack, log, x, y ):
if type(x) not in ( type(''), type(u'') ):
self._log( stack, log, '%s is not a string' % ( x, ) )
return False
if x != y :
self._log( stack, log, '"%s" != "%s" ' % ( x, y ) )
return False
return True
@checkerattr( 'buildin' )
@autologchecker
def checker_any( self, x ):
return True
@checkerattr( 'buildin', 'object' )
@autologchecker
def checker_object( self, x ):
return type(x) == type({})
@checkerattr( 'buildin', 'array' )
@autologchecker
def checker_array( self, x ):
return type(x) in ( type([]), type(()) )
@checkerattr( 'buildin' )
@autologchecker
def checker_string( self, x ):
return type(x) in ( type(''), type(u'') )
@checkerattr( 'buildin' )
@autologchecker
def checker_bool( self, x ):
return x in ( True, False )
@checkerattr( 'buildin' )
@autologchecker
def checker_number( self, x ):
return type(x) == type(0)
@autologchecker
def checker_hex( self, x ):
return re.match(r'[a-fA-F0-9]*',x) != None
@autologchecker
def checker_ascii( self, x ):
return type(x) in ( type(''), type(u'') ) \
and re.match(r'^[a-zA-Z0-9_-]*$',x) != None
@autologchecker
def checker_alnum( self, x ):
return type(x) in ( type(''), type(u'') ) and x.isalnum()
@autologchecker
def checker_null( self, x ):
return x == None
if __name__=='__main__':
testdata = {
'object( -5, #.a:string, .b:string )':
[ {}, {'a':'hahaha'}],
'object( -5, .a:number )':
[ {}, {'a':'hahaha'}, {'a':3}],
'object( -5, #.a:string, .a:number )':
[ {}, {'a':'hahaha'}, {'a':3}],
'object( #.a:string )':
[ {}, {'b':'hahaha'} ],
'array( string(3) )':
[ ['abd','abc'], ],
'null':
[ None, ]
}
for checker, datas in testdata.items() :
print '--', checker
ckr = Checker(checker)
for data in datas :
a = []
print ' ', data, '>', ckr(data, log=a), '>>>', a
#print
print
|
Just-D/chromium-1
|
refs/heads/master
|
tools/telemetry/telemetry/internal/backends/mandoline/mandoline_browser_backend.py
|
6
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import re
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.internal.backends import browser_backend
from telemetry.internal.backends.chrome import tab_list_backend
from telemetry.internal.backends.chrome_inspector import devtools_client_backend
from telemetry.internal import forwarders
from telemetry.util import wpr_modes
class MandolineBrowserBackend(browser_backend.BrowserBackend):
"""An abstract class for mandoline browser backends. Provides basic
functionality once a remote-debugger port has been established."""
# It is OK to have abstract methods. pylint: disable=W0223
def __init__(self, platform_backend, browser_options):
super(MandolineBrowserBackend, self).__init__(
platform_backend=platform_backend,
supports_extensions=False,
browser_options=browser_options,
tab_list_backend=tab_list_backend.TabListBackend)
self._port = None
self._devtools_client = None
if browser_options.netsim:
self.wpr_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(80, 80),
https=forwarders.PortPair(443, 443),
dns=forwarders.PortPair(53, 53))
else:
self.wpr_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(0, 0),
https=forwarders.PortPair(0, 0),
dns=None)
# Some of the browser options are not supported by mandoline yet.
self._CheckUnsupportedBrowserOptions(browser_options)
@property
def devtools_client(self):
return self._devtools_client
def GetBrowserStartupArgs(self):
args = []
args.extend(self.browser_options.extra_browser_args)
args.extend(self.GetReplayBrowserStartupArgs())
return args
def _UseHostResolverRules(self):
"""Returns True to add --host-resolver-rules to send requests to replay."""
if self._platform_backend.forwarder_factory.does_forwarder_override_dns:
# Avoid --host-resolver-rules when the forwarder will map DNS requests
# from the target platform to replay (on the host platform).
# This allows the browser to exercise DNS requests.
return False
if self.browser_options.netsim and self.platform_backend.is_host_platform:
# Avoid --host-resolver-rules when replay will configure the platform to
# resolve hosts to replay.
# This allows the browser to exercise DNS requests.
return False
return True
def GetReplayBrowserStartupArgs(self):
if self.browser_options.wpr_mode == wpr_modes.WPR_OFF:
return []
replay_args = []
if self.should_ignore_certificate_errors:
# Ignore certificate errors if the platform backend has not created
# and installed a root certificate.
replay_args.append('--ignore-certificate-errors')
if self._UseHostResolverRules():
# Force hostnames to resolve to the replay's host_ip.
replay_args.append('--host-resolver-rules=MAP * %s,EXCLUDE localhost,'
#'EXCLUDE *.google.com' %
% self._platform_backend.forwarder_factory.host_ip)
# Force the browser to send HTTP/HTTPS requests to fixed ports if they
# are not the standard HTTP/HTTPS ports.
http_port = self.platform_backend.wpr_http_device_port
https_port = self.platform_backend.wpr_https_device_port
if http_port != 80:
replay_args.append('--testing-fixed-http-port=%s' % http_port)
if https_port != 443:
replay_args.append('--testing-fixed-https-port=%s' % https_port)
return replay_args
def HasBrowserFinishedLaunching(self):
assert self._port, 'No DevTools port info available.'
return devtools_client_backend.IsDevToolsAgentAvailable(self._port)
def _InitDevtoolsClientBackend(self, remote_devtools_port=None):
""" Initiates the devtool client backend which allows browser connection
through browser' devtool.
Args:
remote_devtools_port: The remote devtools port, if any. Otherwise assumed
to be the same as self._port.
"""
assert not self._devtools_client, (
'Devtool client backend cannot be init twice')
self._devtools_client = devtools_client_backend.DevToolsClientBackend(
self._port, remote_devtools_port or self._port, self)
def _WaitForBrowserToComeUp(self):
""" Waits for browser to come up. """
try:
timeout = self.browser_options.browser_startup_timeout
util.WaitFor(self.HasBrowserFinishedLaunching, timeout=timeout)
except (exceptions.TimeoutException, exceptions.ProcessGoneException) as e:
if not self.IsBrowserRunning():
raise exceptions.BrowserGoneException(self.browser, e)
raise exceptions.BrowserConnectionGoneException(self.browser, e)
@property
def browser_directory(self):
raise NotImplementedError()
@property
def profile_directory(self):
raise NotImplementedError()
@property
def supports_tab_control(self):
return False
@property
def supports_tracing(self):
return False
@property
def supports_system_info(self):
return False
@property
def supports_cpu_metrics(self):
return False
@property
def supports_memory_metrics(self):
return False
@property
def supports_power_metrics(self):
return False
def GetProcessName(self, cmd_line):
"""Returns a user-friendly name for the process of the given |cmd_line|."""
if not cmd_line:
return 'unknown'
m = re.search(r'\s--child-process(\s.*)?$', cmd_line)
if not m:
return 'browser'
return 'child-process'
def Close(self):
if self._devtools_client:
self._devtools_client.Close()
self._devtools_client = None
def _CheckUnsupportedBrowserOptions(self, browser_options):
def _GetMessage(name):
return ('BrowserOptions.%s is ignored. Value: %r'
% (name, getattr(browser_options, name)))
def _RaiseForUnsupportedOption(name):
raise Exception(_GetMessage(name))
def _WarnForUnsupportedOption(name):
logging.warning(_GetMessage(name))
if browser_options.dont_override_profile:
_RaiseForUnsupportedOption('dont_override_profile')
if browser_options.profile_dir:
_RaiseForUnsupportedOption('profile_dir')
if browser_options.profile_type and browser_options.profile_type != 'clean':
_RaiseForUnsupportedOption('profile_type')
if browser_options.extra_wpr_args:
_RaiseForUnsupportedOption('extra_wpr_args')
if not browser_options.disable_background_networking:
_RaiseForUnsupportedOption('disable_background_networking')
if browser_options.no_proxy_server:
_RaiseForUnsupportedOption('no_proxy_server')
if browser_options.use_devtools_active_port:
_RaiseForUnsupportedOption('use_devtools_active_port')
if browser_options.browser_user_agent_type:
_WarnForUnsupportedOption('browser_user_agent_type')
|
marcetin/2bacco
|
refs/heads/master
|
contrib/seeds/makeseeds.py
|
753
|
#!/usr/bin/env python
#
# Generate pnSeed[] from Pieter's DNS seeder
#
NSEEDS=600
import re
import sys
from subprocess import check_output
def main():
lines = sys.stdin.readlines()
ips = []
pattern = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):9333")
for line in lines:
m = pattern.match(line)
if m is None:
continue
ip = 0
for i in range(0,4):
ip = ip + (int(m.group(i+1)) << (8*(i)))
if ip == 0:
continue
ips.append(ip)
for row in range(0, min(NSEEDS,len(ips)), 8):
print " " + ", ".join([ "0x%08x"%i for i in ips[row:row+8] ]) + ","
if __name__ == '__main__':
main()
|
plivo/plivo-python
|
refs/heads/master
|
tests/resources/test_messages.py
|
1
|
# -*- coding: utf-8 -*-
from plivo import exceptions
from tests.base import PlivoResourceTestCase
from tests.decorators import with_response
class MessageTest(PlivoResourceTestCase):
def test_send_message(self):
expected_response = {'message_uuid': 'adsdafkjadshf123123'}
self.client.set_expected_response(
status_code=202, data_to_return=expected_response)
test_message = self.client.messages.create(
src='1234', dst='12345', text='Abcd')
self.assertEqual(
self.client.current_request.url,
'https://api.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/Message/')
self.assertEqual(self.client.current_request.method, 'POST')
self.assertEqual(test_message.message_uuid,
expected_response['message_uuid'])
def test_send_message_same_src_dst(self):
self.assertRaises(
exceptions.ValidationError,
self.client.messages.create,
src='1234',
dst='1234',
text='Abcd')
def test_send_message_with_powerpack(self):
expected_response = {'message_uuid': 'adsdafkjadshf123123'}
self.client.set_expected_response(
status_code=202, data_to_return=expected_response)
test_message = self.client.messages.create(
powerpack_uuid='1234', dst='12345', text='Abcd')
self.assertEqual(
self.client.current_request.url,
'https://api.plivo.com/v1/Account/MAXXXXXXXXXXXXXXXXXX/Message/')
self.assertEqual(self.client.current_request.method, 'POST')
self.assertEqual(test_message.message_uuid,
expected_response['message_uuid'])
def test_send_message_with_both_src_powerpack(self):
self.assertRaises(
exceptions.ValidationError,
self.client.messages.create,
powerpack_uuid='1234',
src='1234',
dst='1234',
text='Abcd')
def test_send_message_with_no_src_powerpack(self):
self.assertRaises(
exceptions.ValidationError,
self.client.messages.create,
dst='1234',
text='Abcd')
@with_response(200)
def test_get(self):
message_uuid = 'message_uuid'
message = self.client.messages.get(message_uuid)
self.assertResponseMatches(message)
self.assertUrlEqual(self.client.current_request.url,
self.get_url('Message', message_uuid))
self.assertEqual(self.client.current_request.method, 'GET')
@with_response(200)
def test_list_media(self):
message_uuid = 'message_uuid'
message = self.client.messages.get(message_uuid).listMedia()
self.assertResponseMatches(message)
self.assertUrlEqual(self.client.current_request.url,
self.get_url('Message', message_uuid, 'Media'))
self.assertEqual(self.client.current_request.method, 'GET')
@with_response(200, method_name='get')
def test_response_has_user_agent(self):
message_uuid = 'message_uuid'
self.client.messages.get(message_uuid)
self.assertIn('plivo-python',
self.client.current_request.headers['User-Agent'])
@with_response(200)
def test_list(self):
messages = self.client.messages.list()
# Test if ListResponseObject's __iter__ is working correctly
self.assertEqual(len(list(messages)), 20)
self.assertUrlEqual(self.client.current_request.url,
self.get_url('Message'))
self.assertEqual(self.client.current_request.method, 'GET')
|
jlspyaozhongkai/Uter
|
refs/heads/master
|
third_party_build/Python-2.7.9/lib/python2.7/distutils/command/install_scripts.py
|
241
|
"""distutils.command.install_scripts
Implements the Distutils 'install_scripts' command, for installing
Python scripts."""
# contributed by Bastian Kleineidam
__revision__ = "$Id$"
import os
from distutils.core import Command
from distutils import log
from stat import ST_MODE
class install_scripts (Command):
description = "install scripts (Python or otherwise)"
user_options = [
('install-dir=', 'd', "directory to install scripts to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'skip-build']
def initialize_options (self):
self.install_dir = None
self.force = 0
self.build_dir = None
self.skip_build = None
def finalize_options (self):
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
self.set_undefined_options('install',
('install_scripts', 'install_dir'),
('force', 'force'),
('skip_build', 'skip_build'),
)
def run (self):
if not self.skip_build:
self.run_command('build_scripts')
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
if os.name == 'posix':
# Set the executable bits (owner, group, and world) on
# all the scripts we just installed.
for file in self.get_outputs():
if self.dry_run:
log.info("changing mode of %s", file)
else:
mode = ((os.stat(file)[ST_MODE]) | 0555) & 07777
log.info("changing mode of %s to %o", file, mode)
os.chmod(file, mode)
def get_inputs (self):
return self.distribution.scripts or []
def get_outputs(self):
return self.outfiles or []
# class install_scripts
|
fortmeier/yge-game-engine
|
refs/heads/master
|
test/gtest-1.7.0/test/gtest_output_test.py
|
1733
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the text output of Google C++ Testing Framework.
SYNOPSIS
gtest_output_test.py --build_dir=BUILD/DIR --gengolden
# where BUILD/DIR contains the built gtest_output_test_ file.
gtest_output_test.py --gengolden
gtest_output_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sys
import gtest_test_utils
# The flag for generating the golden file
GENGOLDEN_FLAG = '--gengolden'
CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
IS_WINDOWS = os.name == 'nt'
# TODO(vladl@google.com): remove the _lin suffix.
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
# At least one command we exercise must not have the
# --gtest_internal_skip_environment_and_ad_hoc_tests flag.
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
'--gtest_print_time',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
COMMAND_WITH_DISABLED = (
{}, [PROGRAM_PATH,
'--gtest_also_run_disabled_tests',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=*DISABLED_*'])
COMMAND_WITH_SHARDING = (
{'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
[PROGRAM_PATH,
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=PassingTest.*'])
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
def ToUnixLineEnding(s):
"""Changes all Windows/Mac line endings in s to UNIX line endings."""
return s.replace('\r\n', '\n').replace('\r', '\n')
def RemoveLocations(test_output):
"""Removes all file location info from a Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with all file location info (in the form of
'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
'FILE_NAME:#: '.
"""
return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\: ', r'\1:#: ', test_output)
def RemoveStackTraceDetails(output):
"""Removes all stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n',
'Stack trace: (omitted)\n\n', output)
def RemoveStackTraces(output):
"""Removes all traces of stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
def RemoveTime(output):
"""Removes all time information from a Google Test program's output."""
return re.sub(r'\(\d+ ms', '(? ms', output)
def RemoveTypeInfoDetails(test_output):
"""Removes compiler-specific type info from Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with type information normalized to canonical form.
"""
# some compilers output the name of type 'unsigned int' as 'unsigned'
return re.sub(r'unsigned int', 'unsigned', test_output)
def NormalizeToCurrentPlatform(test_output):
"""Normalizes platform specific output details for easier comparison."""
if IS_WINDOWS:
# Removes the color information that is not present on Windows.
test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
# Changes failure message headers into the Windows format.
test_output = re.sub(r': Failure\n', r': error: ', test_output)
# Changes file(line_number) to file:line_number.
test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
return test_output
def RemoveTestCounts(output):
"""Removes test counts from a Google Test program's output."""
output = re.sub(r'\d+ tests?, listed below',
'? tests, listed below', output)
output = re.sub(r'\d+ FAILED TESTS',
'? FAILED TESTS', output)
output = re.sub(r'\d+ tests? from \d+ test cases?',
'? tests from ? test cases', output)
output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
r'? tests from \1', output)
return re.sub(r'\d+ tests?\.', '? tests.', output)
def RemoveMatchingTests(test_output, pattern):
"""Removes output of specified tests from a Google Test program's output.
This function strips not only the beginning and the end of a test but also
all output in between.
Args:
test_output: A string containing the test output.
pattern: A regex string that matches names of test cases or
tests to remove.
Returns:
Contents of test_output with tests whose names match pattern removed.
"""
test_output = re.sub(
r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
pattern, pattern),
'',
test_output)
return re.sub(r'.*%s.*\n' % pattern, '', test_output)
def NormalizeOutput(output):
"""Normalizes output (the output of gtest_output_test_.exe)."""
output = ToUnixLineEnding(output)
output = RemoveLocations(output)
output = RemoveStackTraceDetails(output)
output = RemoveTime(output)
return output
def GetShellCommandOutput(env_cmd):
"""Runs a command in a sub-process, and returns its output in a string.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
Returns:
A string with the command's combined standard and diagnostic output.
"""
# Spawns cmd in a sub-process, and gets its standard I/O file objects.
# Set and save the environment properly.
environ = os.environ.copy()
environ.update(env_cmd[0])
p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
return p.output
def GetCommandOutput(env_cmd):
"""Runs a command and returns its output with all file location
info stripped off.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
"""
# Disables exception pop-ups on Windows.
environ, cmdline = env_cmd
environ = dict(environ) # Ensures we are modifying a copy.
environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
def GetOutputOfAllCommands():
"""Returns concatenated output from several representative commands."""
return (GetCommandOutput(COMMAND_WITH_COLOR) +
GetCommandOutput(COMMAND_WITH_TIME) +
GetCommandOutput(COMMAND_WITH_DISABLED) +
GetCommandOutput(COMMAND_WITH_SHARDING))
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
SUPPORTS_STACK_TRACES = False
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
SUPPORTS_TYPED_TESTS and
SUPPORTS_THREADS)
class GTestOutputTest(gtest_test_utils.TestCase):
def RemoveUnsupportedTests(self, test_output):
if not SUPPORTS_DEATH_TESTS:
test_output = RemoveMatchingTests(test_output, 'DeathTest')
if not SUPPORTS_TYPED_TESTS:
test_output = RemoveMatchingTests(test_output, 'TypedTest')
test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
if not SUPPORTS_THREADS:
test_output = RemoveMatchingTests(test_output,
'ExpectFailureWithThreadsTest')
test_output = RemoveMatchingTests(test_output,
'ScopedFakeTestPartResultReporterTest')
test_output = RemoveMatchingTests(test_output,
'WorksConcurrently')
if not SUPPORTS_STACK_TRACES:
test_output = RemoveStackTraces(test_output)
return test_output
def testOutput(self):
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'rb')
# A mis-configured source control system can cause \r appear in EOL
# sequences when we read the golden file irrespective of an operating
# system used. Therefore, we need to strip those \r's from newlines
# unconditionally.
golden = ToUnixLineEnding(golden_file.read())
golden_file.close()
# We want the test to pass regardless of certain features being
# supported or not.
# We still have to remove type name specifics in all cases.
normalized_actual = RemoveTypeInfoDetails(output)
normalized_golden = RemoveTypeInfoDetails(golden)
if CAN_GENERATE_GOLDEN_FILE:
self.assertEqual(normalized_golden, normalized_actual)
else:
normalized_actual = NormalizeToCurrentPlatform(
RemoveTestCounts(normalized_actual))
normalized_golden = NormalizeToCurrentPlatform(
RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
# This code is very handy when debugging golden file differences:
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_actual.txt'), 'wb').write(
normalized_actual)
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_golden.txt'), 'wb').write(
normalized_golden)
self.assertEqual(normalized_golden, normalized_actual)
if __name__ == '__main__':
if sys.argv[1:] == [GENGOLDEN_FLAG]:
if CAN_GENERATE_GOLDEN_FILE:
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'wb')
golden_file.write(output)
golden_file.close()
else:
message = (
"""Unable to write a golden file when compiled in an environment
that does not support all the required features (death tests, typed tests,
and multiple threads). Please generate the golden file using a binary built
with those features enabled.""")
sys.stderr.write(message)
sys.exit(1)
else:
gtest_test_utils.Main()
|
emil-mi/exercises-in-programming-style
|
refs/heads/master
|
18-aspects/tf-18.py
|
17
|
#!/usr/bin/env python
import sys, re, operator, string, time
#
# The functions
#
def extract_words(path_to_file):
with open(path_to_file) as f:
str_data = f.read()
pattern = re.compile('[\W_]+')
word_list = pattern.sub(' ', str_data).lower().split()
with open('../stop_words.txt') as f:
stop_words = f.read().split(',')
stop_words.extend(list(string.ascii_lowercase))
return [w for w in word_list if not w in stop_words]
def frequencies(word_list):
word_freqs = {}
for w in word_list:
if w in word_freqs:
word_freqs[w] += 1
else:
word_freqs[w] = 1
return word_freqs
def sort(word_freq):
return sorted(word_freq.iteritems(), key=operator.itemgetter(1), reverse=True)
# The side functionality
def profile(f):
def profilewrapper(*arg, **kw):
start_time = time.time()
ret_value = f(*arg, **kw)
elapsed = time.time() - start_time
print "%s(...) took %s secs" % (f.__name__, elapsed)
return ret_value
return profilewrapper
# join points
tracked_functions = [extract_words, frequencies, sort]
# weaver
for func in tracked_functions:
globals()[func.func_name]=profile(func)
word_freqs = sort(frequencies(extract_words(sys.argv[1])))
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
|
Jeff-Tian/mybnb
|
refs/heads/master
|
Python27/Lib/test/test_cmd_line.py
|
4
|
# Tests invocation of the interpreter with various command line arguments
# All tests are executed with environment variables ignored
# See test_cmd_line_script.py for testing of script execution
import test.test_support
import sys
import unittest
from test.script_helper import (
assert_python_ok, assert_python_failure, spawn_python, kill_python,
python_exit_code
)
class CmdLineTest(unittest.TestCase):
def start_python(self, *args):
p = spawn_python(*args)
return kill_python(p)
def exit_code(self, *args):
return python_exit_code(*args)
def test_directories(self):
self.assertNotEqual(self.exit_code('.'), 0)
self.assertNotEqual(self.exit_code('< .'), 0)
def verify_valid_flag(self, cmd_line):
data = self.start_python(cmd_line)
self.assertTrue(data == '' or data.endswith('\n'))
self.assertNotIn('Traceback', data)
def test_optimize(self):
self.verify_valid_flag('-O')
self.verify_valid_flag('-OO')
def test_q(self):
self.verify_valid_flag('-Qold')
self.verify_valid_flag('-Qnew')
self.verify_valid_flag('-Qwarn')
self.verify_valid_flag('-Qwarnall')
def test_site_flag(self):
self.verify_valid_flag('-S')
def test_usage(self):
self.assertIn('usage', self.start_python('-h'))
def test_version(self):
version = 'Python %d.%d' % sys.version_info[:2]
self.assertTrue(self.start_python('-V').startswith(version))
def test_run_module(self):
# Test expected operation of the '-m' switch
# Switch needs an argument
self.assertNotEqual(self.exit_code('-m'), 0)
# Check we get an error for a nonexistent module
self.assertNotEqual(
self.exit_code('-m', 'fnord43520xyz'),
0)
# Check the runpy module also gives an error for
# a nonexistent module
self.assertNotEqual(
self.exit_code('-m', 'runpy', 'fnord43520xyz'),
0)
# All good if module is located and run successfully
self.assertEqual(
self.exit_code('-m', 'timeit', '-n', '1'),
0)
def test_run_module_bug1764407(self):
# -m and -i need to play well together
# Runs the timeit module and checks the __main__
# namespace has been populated appropriately
p = spawn_python('-i', '-m', 'timeit', '-n', '1')
p.stdin.write('Timer\n')
p.stdin.write('exit()\n')
data = kill_python(p)
self.assertTrue(data.startswith('1 loop'))
self.assertIn('__main__.Timer', data)
def test_run_code(self):
# Test expected operation of the '-c' switch
# Switch needs an argument
self.assertNotEqual(self.exit_code('-c'), 0)
# Check we get an error for an uncaught exception
self.assertNotEqual(
self.exit_code('-c', 'raise Exception'),
0)
# All good if execution is successful
self.assertEqual(
self.exit_code('-c', 'pass'),
0)
def test_hash_randomization(self):
# Verify that -R enables hash randomization:
self.verify_valid_flag('-R')
hashes = []
for i in range(2):
code = 'print(hash("spam"))'
data = self.start_python('-R', '-c', code)
hashes.append(data)
self.assertNotEqual(hashes[0], hashes[1])
# Verify that sys.flags contains hash_randomization
code = 'import sys; print sys.flags'
data = self.start_python('-R', '-c', code)
self.assertTrue('hash_randomization=1' in data)
def test_del___main__(self):
# Issue #15001: PyRun_SimpleFileExFlags() did crash because it kept a
# borrowed reference to the dict of __main__ module and later modify
# the dict whereas the module was destroyed
filename = test.test_support.TESTFN
self.addCleanup(test.test_support.unlink, filename)
with open(filename, "w") as script:
print >>script, "import sys"
print >>script, "del sys.modules['__main__']"
assert_python_ok(filename)
def test_unknown_options(self):
rc, out, err = assert_python_failure('-E', '-z')
self.assertIn(b'Unknown option: -z', err)
self.assertEqual(err.splitlines().count(b'Unknown option: -z'), 1)
self.assertEqual(b'', out)
# Add "without='-E'" to prevent _assert_python to append -E
# to env_vars and change the output of stderr
rc, out, err = assert_python_failure('-z', without='-E')
self.assertIn(b'Unknown option: -z', err)
self.assertEqual(err.splitlines().count(b'Unknown option: -z'), 1)
self.assertEqual(b'', out)
rc, out, err = assert_python_failure('-a', '-z', without='-E')
self.assertIn(b'Unknown option: -a', err)
# only the first unknown option is reported
self.assertNotIn(b'Unknown option: -z', err)
self.assertEqual(err.splitlines().count(b'Unknown option: -a'), 1)
self.assertEqual(b'', out)
def test_main():
test.test_support.run_unittest(CmdLineTest)
test.test_support.reap_children()
if __name__ == "__main__":
test_main()
|
foodszhang/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/tkinter/test/test_ttk/test_widgets.py
|
59
|
import unittest
import tkinter
from tkinter import ttk
from test.support import requires
import sys
from tkinter.test.test_ttk.test_functions import MockTclObj
from tkinter.test.support import (AbstractTkTest, tcl_version, get_tk_patchlevel,
simulate_mouse_click)
from tkinter.test.widget_tests import (add_standard_options, noconv,
AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests,
setUpModule)
requires('gui')
class StandardTtkOptionsTests(StandardOptionsTests):
def test_class(self):
widget = self.create()
self.assertEqual(widget['class'], '')
errmsg='attempt to change read-only option'
if get_tk_patchlevel() < (8, 6, 0): # actually this was changed in 8.6b3
errmsg='Attempt to change read-only option'
self.checkInvalidParam(widget, 'class', 'Foo', errmsg=errmsg)
widget2 = self.create(class_='Foo')
self.assertEqual(widget2['class'], 'Foo')
def test_padding(self):
widget = self.create()
self.checkParam(widget, 'padding', 0, expected=('0',))
self.checkParam(widget, 'padding', 5, expected=('5',))
self.checkParam(widget, 'padding', (5, 6), expected=('5', '6'))
self.checkParam(widget, 'padding', (5, 6, 7),
expected=('5', '6', '7'))
self.checkParam(widget, 'padding', (5, 6, 7, 8),
expected=('5', '6', '7', '8'))
self.checkParam(widget, 'padding', ('5p', '6p', '7p', '8p'))
self.checkParam(widget, 'padding', (), expected='')
def test_style(self):
widget = self.create()
self.assertEqual(widget['style'], '')
errmsg = 'Layout Foo not found'
if hasattr(self, 'default_orient'):
errmsg = ('Layout %s.Foo not found' %
getattr(self, 'default_orient').title())
self.checkInvalidParam(widget, 'style', 'Foo',
errmsg=errmsg)
widget2 = self.create(class_='Foo')
self.assertEqual(widget2['class'], 'Foo')
# XXX
pass
class WidgetTest(AbstractTkTest, unittest.TestCase):
"""Tests methods available in every ttk widget."""
def setUp(self):
super().setUp()
self.widget = ttk.Button(self.root, width=0, text="Text")
self.widget.pack()
self.widget.wait_visibility()
def test_identify(self):
self.widget.update_idletasks()
self.assertEqual(self.widget.identify(
int(self.widget.winfo_width() / 2),
int(self.widget.winfo_height() / 2)
), "label")
self.assertEqual(self.widget.identify(-1, -1), "")
self.assertRaises(tkinter.TclError, self.widget.identify, None, 5)
self.assertRaises(tkinter.TclError, self.widget.identify, 5, None)
self.assertRaises(tkinter.TclError, self.widget.identify, 5, '')
def test_widget_state(self):
# XXX not sure about the portability of all these tests
self.assertEqual(self.widget.state(), ())
self.assertEqual(self.widget.instate(['!disabled']), True)
# changing from !disabled to disabled
self.assertEqual(self.widget.state(['disabled']), ('!disabled', ))
# no state change
self.assertEqual(self.widget.state(['disabled']), ())
# change back to !disable but also active
self.assertEqual(self.widget.state(['!disabled', 'active']),
('!active', 'disabled'))
# no state changes, again
self.assertEqual(self.widget.state(['!disabled', 'active']), ())
self.assertEqual(self.widget.state(['active', '!disabled']), ())
def test_cb(arg1, **kw):
return arg1, kw
self.assertEqual(self.widget.instate(['!disabled'],
test_cb, "hi", **{"msg": "there"}),
('hi', {'msg': 'there'}))
# attempt to set invalid statespec
currstate = self.widget.state()
self.assertRaises(tkinter.TclError, self.widget.instate,
['badstate'])
self.assertRaises(tkinter.TclError, self.widget.instate,
['disabled', 'badstate'])
# verify that widget didn't change its state
self.assertEqual(currstate, self.widget.state())
# ensuring that passing None as state doesn't modify current state
self.widget.state(['active', '!disabled'])
self.assertEqual(self.widget.state(), ('active', ))
class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests):
_conv_pixels = noconv
@add_standard_options(StandardTtkOptionsTests)
class FrameTest(AbstractToplevelTest, unittest.TestCase):
OPTIONS = (
'borderwidth', 'class', 'cursor', 'height',
'padding', 'relief', 'style', 'takefocus',
'width',
)
def create(self, **kwargs):
return ttk.Frame(self.root, **kwargs)
@add_standard_options(StandardTtkOptionsTests)
class LabelFrameTest(AbstractToplevelTest, unittest.TestCase):
OPTIONS = (
'borderwidth', 'class', 'cursor', 'height',
'labelanchor', 'labelwidget',
'padding', 'relief', 'style', 'takefocus',
'text', 'underline', 'width',
)
def create(self, **kwargs):
return ttk.LabelFrame(self.root, **kwargs)
def test_labelanchor(self):
widget = self.create()
self.checkEnumParam(widget, 'labelanchor',
'e', 'en', 'es', 'n', 'ne', 'nw', 's', 'se', 'sw', 'w', 'wn', 'ws',
errmsg='Bad label anchor specification {}')
self.checkInvalidParam(widget, 'labelanchor', 'center')
def test_labelwidget(self):
widget = self.create()
label = ttk.Label(self.root, text='Mupp', name='foo')
self.checkParam(widget, 'labelwidget', label, expected='.foo')
label.destroy()
class AbstractLabelTest(AbstractWidgetTest):
def checkImageParam(self, widget, name):
image = tkinter.PhotoImage(master=self.root, name='image1')
image2 = tkinter.PhotoImage(master=self.root, name='image2')
self.checkParam(widget, name, image, expected=('image1',))
self.checkParam(widget, name, 'image1', expected=('image1',))
self.checkParam(widget, name, (image,), expected=('image1',))
self.checkParam(widget, name, (image, 'active', image2),
expected=('image1', 'active', 'image2'))
self.checkParam(widget, name, 'image1 active image2',
expected=('image1', 'active', 'image2'))
self.checkInvalidParam(widget, name, 'spam',
errmsg='image "spam" doesn\'t exist')
def test_compound(self):
widget = self.create()
self.checkEnumParam(widget, 'compound',
'none', 'text', 'image', 'center',
'top', 'bottom', 'left', 'right')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state', 'active', 'disabled', 'normal')
def test_width(self):
widget = self.create()
self.checkParams(widget, 'width', 402, -402, 0)
@add_standard_options(StandardTtkOptionsTests)
class LabelTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'anchor', 'background',
'class', 'compound', 'cursor', 'font', 'foreground',
'image', 'justify', 'padding', 'relief', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'width', 'wraplength',
)
_conv_pixels = noconv
def create(self, **kwargs):
return ttk.Label(self.root, **kwargs)
def test_font(self):
widget = self.create()
self.checkParam(widget, 'font',
'-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*')
@add_standard_options(StandardTtkOptionsTests)
class ButtonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor', 'default',
'image', 'state', 'style', 'takefocus', 'text', 'textvariable',
'underline', 'width',
)
def create(self, **kwargs):
return ttk.Button(self.root, **kwargs)
def test_default(self):
widget = self.create()
self.checkEnumParam(widget, 'default', 'normal', 'active', 'disabled')
def test_invoke(self):
success = []
btn = ttk.Button(self.root, command=lambda: success.append(1))
btn.invoke()
self.assertTrue(success)
@add_standard_options(StandardTtkOptionsTests)
class CheckbuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor',
'image',
'offvalue', 'onvalue',
'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'variable', 'width',
)
def create(self, **kwargs):
return ttk.Checkbutton(self.root, **kwargs)
def test_offvalue(self):
widget = self.create()
self.checkParams(widget, 'offvalue', 1, 2.3, '', 'any string')
def test_onvalue(self):
widget = self.create()
self.checkParams(widget, 'onvalue', 1, 2.3, '', 'any string')
def test_invoke(self):
success = []
def cb_test():
success.append(1)
return "cb test called"
cbtn = ttk.Checkbutton(self.root, command=cb_test)
# the variable automatically created by ttk.Checkbutton is actually
# undefined till we invoke the Checkbutton
self.assertEqual(cbtn.state(), ('alternate', ))
self.assertRaises(tkinter.TclError, cbtn.tk.globalgetvar,
cbtn['variable'])
res = cbtn.invoke()
self.assertEqual(res, "cb test called")
self.assertEqual(cbtn['onvalue'],
cbtn.tk.globalgetvar(cbtn['variable']))
self.assertTrue(success)
cbtn['command'] = ''
res = cbtn.invoke()
self.assertFalse(str(res))
self.assertLessEqual(len(success), 1)
self.assertEqual(cbtn['offvalue'],
cbtn.tk.globalgetvar(cbtn['variable']))
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class ComboboxTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'exportselection', 'height',
'justify', 'postcommand', 'state', 'style',
'takefocus', 'textvariable', 'values', 'width',
)
def setUp(self):
super().setUp()
self.combo = self.create()
def create(self, **kwargs):
return ttk.Combobox(self.root, **kwargs)
def test_height(self):
widget = self.create()
self.checkParams(widget, 'height', 100, 101.2, 102.6, -100, 0, '1i')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state', 'active', 'disabled', 'normal')
def _show_drop_down_listbox(self):
width = self.combo.winfo_width()
self.combo.event_generate('<ButtonPress-1>', x=width - 5, y=5)
self.combo.event_generate('<ButtonRelease-1>', x=width - 5, y=5)
self.combo.update_idletasks()
def test_virtual_event(self):
success = []
self.combo['values'] = [1]
self.combo.bind('<<ComboboxSelected>>',
lambda evt: success.append(True))
self.combo.pack()
self.combo.wait_visibility()
height = self.combo.winfo_height()
self._show_drop_down_listbox()
self.combo.update()
self.combo.event_generate('<Return>')
self.combo.update()
self.assertTrue(success)
def test_postcommand(self):
success = []
self.combo['postcommand'] = lambda: success.append(True)
self.combo.pack()
self.combo.wait_visibility()
self._show_drop_down_listbox()
self.assertTrue(success)
# testing postcommand removal
self.combo['postcommand'] = ''
self._show_drop_down_listbox()
self.assertEqual(len(success), 1)
def test_values(self):
def check_get_current(getval, currval):
self.assertEqual(self.combo.get(), getval)
self.assertEqual(self.combo.current(), currval)
self.assertEqual(self.combo['values'],
() if tcl_version < (8, 5) else '')
check_get_current('', -1)
self.checkParam(self.combo, 'values', 'mon tue wed thur',
expected=('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.combo, 'values', ('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.combo, 'values', (42, 3.14, '', 'any string'))
self.checkParam(self.combo, 'values', '', expected=())
self.combo['values'] = ['a', 1, 'c']
self.combo.set('c')
check_get_current('c', 2)
self.combo.current(0)
check_get_current('a', 0)
self.combo.set('d')
check_get_current('d', -1)
# testing values with empty string
self.combo.set('')
self.combo['values'] = (1, 2, '', 3)
check_get_current('', 2)
# testing values with empty string set through configure
self.combo.configure(values=[1, '', 2])
self.assertEqual(self.combo['values'],
('1', '', '2') if self.wantobjects else
'1 {} 2')
# testing values with spaces
self.combo['values'] = ['a b', 'a\tb', 'a\nb']
self.assertEqual(self.combo['values'],
('a b', 'a\tb', 'a\nb') if self.wantobjects else
'{a b} {a\tb} {a\nb}')
# testing values with special characters
self.combo['values'] = [r'a\tb', '"a"', '} {']
self.assertEqual(self.combo['values'],
(r'a\tb', '"a"', '} {') if self.wantobjects else
r'a\\tb {"a"} \}\ \{')
# out of range
self.assertRaises(tkinter.TclError, self.combo.current,
len(self.combo['values']))
# it expects an integer (or something that can be converted to int)
self.assertRaises(tkinter.TclError, self.combo.current, '')
# testing creating combobox with empty string in values
combo2 = ttk.Combobox(self.root, values=[1, 2, ''])
self.assertEqual(combo2['values'],
('1', '2', '') if self.wantobjects else '1 2 {}')
combo2.destroy()
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class EntryTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'background', 'class', 'cursor',
'exportselection', 'font',
'invalidcommand', 'justify',
'show', 'state', 'style', 'takefocus', 'textvariable',
'validate', 'validatecommand', 'width', 'xscrollcommand',
)
def setUp(self):
super().setUp()
self.entry = self.create()
def create(self, **kwargs):
return ttk.Entry(self.root, **kwargs)
def test_invalidcommand(self):
widget = self.create()
self.checkCommandParam(widget, 'invalidcommand')
def test_show(self):
widget = self.create()
self.checkParam(widget, 'show', '*')
self.checkParam(widget, 'show', '')
self.checkParam(widget, 'show', ' ')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state',
'disabled', 'normal', 'readonly')
def test_validate(self):
widget = self.create()
self.checkEnumParam(widget, 'validate',
'all', 'key', 'focus', 'focusin', 'focusout', 'none')
def test_validatecommand(self):
widget = self.create()
self.checkCommandParam(widget, 'validatecommand')
def test_bbox(self):
self.assertIsBoundingBox(self.entry.bbox(0))
self.assertRaises(tkinter.TclError, self.entry.bbox, 'noindex')
self.assertRaises(tkinter.TclError, self.entry.bbox, None)
def test_identify(self):
self.entry.pack()
self.entry.wait_visibility()
self.entry.update_idletasks()
self.assertEqual(self.entry.identify(5, 5), "textarea")
self.assertEqual(self.entry.identify(-1, -1), "")
self.assertRaises(tkinter.TclError, self.entry.identify, None, 5)
self.assertRaises(tkinter.TclError, self.entry.identify, 5, None)
self.assertRaises(tkinter.TclError, self.entry.identify, 5, '')
def test_validation_options(self):
success = []
test_invalid = lambda: success.append(True)
self.entry['validate'] = 'none'
self.entry['validatecommand'] = lambda: False
self.entry['invalidcommand'] = test_invalid
self.entry.validate()
self.assertTrue(success)
self.entry['invalidcommand'] = ''
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['invalidcommand'] = test_invalid
self.entry['validatecommand'] = lambda: True
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['validatecommand'] = ''
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['validatecommand'] = True
self.assertRaises(tkinter.TclError, self.entry.validate)
def test_validation(self):
validation = []
def validate(to_insert):
if not 'a' <= to_insert.lower() <= 'z':
validation.append(False)
return False
validation.append(True)
return True
self.entry['validate'] = 'key'
self.entry['validatecommand'] = self.entry.register(validate), '%S'
self.entry.insert('end', 1)
self.entry.insert('end', 'a')
self.assertEqual(validation, [False, True])
self.assertEqual(self.entry.get(), 'a')
def test_revalidation(self):
def validate(content):
for letter in content:
if not 'a' <= letter.lower() <= 'z':
return False
return True
self.entry['validatecommand'] = self.entry.register(validate), '%P'
self.entry.insert('end', 'avocado')
self.assertEqual(self.entry.validate(), True)
self.assertEqual(self.entry.state(), ())
self.entry.delete(0, 'end')
self.assertEqual(self.entry.get(), '')
self.entry.insert('end', 'a1b')
self.assertEqual(self.entry.validate(), False)
self.assertEqual(self.entry.state(), ('invalid', ))
self.entry.delete(1)
self.assertEqual(self.entry.validate(), True)
self.assertEqual(self.entry.state(), ())
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class PanedWindowTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'height',
'orient', 'style', 'takefocus', 'width',
)
def setUp(self):
super().setUp()
self.paned = self.create()
def create(self, **kwargs):
return ttk.PanedWindow(self.root, **kwargs)
def test_orient(self):
widget = self.create()
self.assertEqual(str(widget['orient']), 'vertical')
errmsg='attempt to change read-only option'
if get_tk_patchlevel() < (8, 6, 0): # actually this was changed in 8.6b3
errmsg='Attempt to change read-only option'
self.checkInvalidParam(widget, 'orient', 'horizontal',
errmsg=errmsg)
widget2 = self.create(orient='horizontal')
self.assertEqual(str(widget2['orient']), 'horizontal')
def test_add(self):
# attempt to add a child that is not a direct child of the paned window
label = ttk.Label(self.paned)
child = ttk.Label(label)
self.assertRaises(tkinter.TclError, self.paned.add, child)
label.destroy()
child.destroy()
# another attempt
label = ttk.Label(self.root)
child = ttk.Label(label)
self.assertRaises(tkinter.TclError, self.paned.add, child)
child.destroy()
label.destroy()
good_child = ttk.Label(self.root)
self.paned.add(good_child)
# re-adding a child is not accepted
self.assertRaises(tkinter.TclError, self.paned.add, good_child)
other_child = ttk.Label(self.paned)
self.paned.add(other_child)
self.assertEqual(self.paned.pane(0), self.paned.pane(1))
self.assertRaises(tkinter.TclError, self.paned.pane, 2)
good_child.destroy()
other_child.destroy()
self.assertRaises(tkinter.TclError, self.paned.pane, 0)
def test_forget(self):
self.assertRaises(tkinter.TclError, self.paned.forget, None)
self.assertRaises(tkinter.TclError, self.paned.forget, 0)
self.paned.add(ttk.Label(self.root))
self.paned.forget(0)
self.assertRaises(tkinter.TclError, self.paned.forget, 0)
def test_insert(self):
self.assertRaises(tkinter.TclError, self.paned.insert, None, 0)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, None)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, 0)
child = ttk.Label(self.root)
child2 = ttk.Label(self.root)
child3 = ttk.Label(self.root)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, child)
self.paned.insert('end', child2)
self.paned.insert(0, child)
self.assertEqual(self.paned.panes(), (str(child), str(child2)))
self.paned.insert(0, child2)
self.assertEqual(self.paned.panes(), (str(child2), str(child)))
self.paned.insert('end', child3)
self.assertEqual(self.paned.panes(),
(str(child2), str(child), str(child3)))
# reinserting a child should move it to its current position
panes = self.paned.panes()
self.paned.insert('end', child3)
self.assertEqual(panes, self.paned.panes())
# moving child3 to child2 position should result in child2 ending up
# in previous child position and child ending up in previous child3
# position
self.paned.insert(child2, child3)
self.assertEqual(self.paned.panes(),
(str(child3), str(child2), str(child)))
def test_pane(self):
self.assertRaises(tkinter.TclError, self.paned.pane, 0)
child = ttk.Label(self.root)
self.paned.add(child)
self.assertIsInstance(self.paned.pane(0), dict)
self.assertEqual(self.paned.pane(0, weight=None),
0 if self.wantobjects else '0')
# newer form for querying a single option
self.assertEqual(self.paned.pane(0, 'weight'),
0 if self.wantobjects else '0')
self.assertEqual(self.paned.pane(0), self.paned.pane(str(child)))
self.assertRaises(tkinter.TclError, self.paned.pane, 0,
badoption='somevalue')
def test_sashpos(self):
self.assertRaises(tkinter.TclError, self.paned.sashpos, None)
self.assertRaises(tkinter.TclError, self.paned.sashpos, '')
self.assertRaises(tkinter.TclError, self.paned.sashpos, 0)
child = ttk.Label(self.paned, text='a')
self.paned.add(child, weight=1)
self.assertRaises(tkinter.TclError, self.paned.sashpos, 0)
child2 = ttk.Label(self.paned, text='b')
self.paned.add(child2)
self.assertRaises(tkinter.TclError, self.paned.sashpos, 1)
self.paned.pack(expand=True, fill='both')
self.paned.wait_visibility()
curr_pos = self.paned.sashpos(0)
self.paned.sashpos(0, 1000)
self.assertNotEqual(curr_pos, self.paned.sashpos(0))
self.assertIsInstance(self.paned.sashpos(0), int)
@add_standard_options(StandardTtkOptionsTests)
class RadiobuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor',
'image',
'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'value', 'variable', 'width',
)
def create(self, **kwargs):
return ttk.Radiobutton(self.root, **kwargs)
def test_value(self):
widget = self.create()
self.checkParams(widget, 'value', 1, 2.3, '', 'any string')
def test_invoke(self):
success = []
def cb_test():
success.append(1)
return "cb test called"
myvar = tkinter.IntVar(self.root)
cbtn = ttk.Radiobutton(self.root, command=cb_test,
variable=myvar, value=0)
cbtn2 = ttk.Radiobutton(self.root, command=cb_test,
variable=myvar, value=1)
if self.wantobjects:
conv = lambda x: x
else:
conv = int
res = cbtn.invoke()
self.assertEqual(res, "cb test called")
self.assertEqual(conv(cbtn['value']), myvar.get())
self.assertEqual(myvar.get(),
conv(cbtn.tk.globalgetvar(cbtn['variable'])))
self.assertTrue(success)
cbtn2['command'] = ''
res = cbtn2.invoke()
self.assertEqual(str(res), '')
self.assertLessEqual(len(success), 1)
self.assertEqual(conv(cbtn2['value']), myvar.get())
self.assertEqual(myvar.get(),
conv(cbtn.tk.globalgetvar(cbtn['variable'])))
self.assertEqual(str(cbtn['variable']), str(cbtn2['variable']))
class MenubuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'compound', 'cursor', 'direction',
'image', 'menu', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'width',
)
def create(self, **kwargs):
return ttk.Menubutton(self.root, **kwargs)
def test_direction(self):
widget = self.create()
self.checkEnumParam(widget, 'direction',
'above', 'below', 'left', 'right', 'flush')
def test_menu(self):
widget = self.create()
menu = tkinter.Menu(widget, name='menu')
self.checkParam(widget, 'menu', menu, conv=str)
menu.destroy()
@add_standard_options(StandardTtkOptionsTests)
class ScaleTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'cursor', 'from', 'length',
'orient', 'style', 'takefocus', 'to', 'value', 'variable',
)
_conv_pixels = noconv
default_orient = 'horizontal'
def setUp(self):
super().setUp()
self.scale = self.create()
self.scale.pack()
self.scale.update()
def create(self, **kwargs):
return ttk.Scale(self.root, **kwargs)
def test_from(self):
widget = self.create()
self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=False)
def test_length(self):
widget = self.create()
self.checkPixelsParam(widget, 'length', 130, 131.2, 135.6, '5i')
def test_to(self):
widget = self.create()
self.checkFloatParam(widget, 'to', 300, 14.9, 15.1, -10, conv=False)
def test_value(self):
widget = self.create()
self.checkFloatParam(widget, 'value', 300, 14.9, 15.1, -10, conv=False)
def test_custom_event(self):
failure = [1, 1, 1] # will need to be empty
funcid = self.scale.bind('<<RangeChanged>>', lambda evt: failure.pop())
self.scale['from'] = 10
self.scale['from_'] = 10
self.scale['to'] = 3
self.assertFalse(failure)
failure = [1, 1, 1]
self.scale.configure(from_=2, to=5)
self.scale.configure(from_=0, to=-2)
self.scale.configure(to=10)
self.assertFalse(failure)
def test_get(self):
if self.wantobjects:
conv = lambda x: x
else:
conv = float
scale_width = self.scale.winfo_width()
self.assertEqual(self.scale.get(scale_width, 0), self.scale['to'])
self.assertEqual(conv(self.scale.get(0, 0)), conv(self.scale['from']))
self.assertEqual(self.scale.get(), self.scale['value'])
self.scale['value'] = 30
self.assertEqual(self.scale.get(), self.scale['value'])
self.assertRaises(tkinter.TclError, self.scale.get, '', 0)
self.assertRaises(tkinter.TclError, self.scale.get, 0, '')
def test_set(self):
if self.wantobjects:
conv = lambda x: x
else:
conv = float
# set restricts the max/min values according to the current range
max = conv(self.scale['to'])
new_max = max + 10
self.scale.set(new_max)
self.assertEqual(conv(self.scale.get()), max)
min = conv(self.scale['from'])
self.scale.set(min - 1)
self.assertEqual(conv(self.scale.get()), min)
# changing directly the variable doesn't impose this limitation tho
var = tkinter.DoubleVar(self.root)
self.scale['variable'] = var
var.set(max + 5)
self.assertEqual(conv(self.scale.get()), var.get())
self.assertEqual(conv(self.scale.get()), max + 5)
del var
# the same happens with the value option
self.scale['value'] = max + 10
self.assertEqual(conv(self.scale.get()), max + 10)
self.assertEqual(conv(self.scale.get()), conv(self.scale['value']))
# nevertheless, note that the max/min values we can get specifying
# x, y coords are the ones according to the current range
self.assertEqual(conv(self.scale.get(0, 0)), min)
self.assertEqual(conv(self.scale.get(self.scale.winfo_width(), 0)), max)
self.assertRaises(tkinter.TclError, self.scale.set, None)
@add_standard_options(StandardTtkOptionsTests)
class ProgressbarTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'orient', 'length',
'mode', 'maximum', 'phase',
'style', 'takefocus', 'value', 'variable',
)
_conv_pixels = noconv
default_orient = 'horizontal'
def create(self, **kwargs):
return ttk.Progressbar(self.root, **kwargs)
def test_length(self):
widget = self.create()
self.checkPixelsParam(widget, 'length', 100.1, 56.7, '2i')
def test_maximum(self):
widget = self.create()
self.checkFloatParam(widget, 'maximum', 150.2, 77.7, 0, -10, conv=False)
def test_mode(self):
widget = self.create()
self.checkEnumParam(widget, 'mode', 'determinate', 'indeterminate')
def test_phase(self):
# XXX
pass
def test_value(self):
widget = self.create()
self.checkFloatParam(widget, 'value', 150.2, 77.7, 0, -10,
conv=False)
@unittest.skipIf(sys.platform == 'darwin',
'ttk.Scrollbar is special on MacOSX')
@add_standard_options(StandardTtkOptionsTests)
class ScrollbarTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'cursor', 'orient', 'style', 'takefocus',
)
default_orient = 'vertical'
def create(self, **kwargs):
return ttk.Scrollbar(self.root, **kwargs)
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class NotebookTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'height', 'padding', 'style', 'takefocus',
)
def setUp(self):
super().setUp()
self.nb = self.create(padding=0)
self.child1 = ttk.Label(self.root)
self.child2 = ttk.Label(self.root)
self.nb.add(self.child1, text='a')
self.nb.add(self.child2, text='b')
def create(self, **kwargs):
return ttk.Notebook(self.root, **kwargs)
def test_tab_identifiers(self):
self.nb.forget(0)
self.nb.hide(self.child2)
self.assertRaises(tkinter.TclError, self.nb.tab, self.child1)
self.assertEqual(self.nb.index('end'), 1)
self.nb.add(self.child2)
self.assertEqual(self.nb.index('end'), 1)
self.nb.select(self.child2)
self.assertTrue(self.nb.tab('current'))
self.nb.add(self.child1, text='a')
self.nb.pack()
self.nb.wait_visibility()
if sys.platform == 'darwin':
tb_idx = "@20,5"
else:
tb_idx = "@5,5"
self.assertEqual(self.nb.tab(tb_idx), self.nb.tab('current'))
for i in range(5, 100, 5):
try:
if self.nb.tab('@%d, 5' % i, text=None) == 'a':
break
except tkinter.TclError:
pass
else:
self.fail("Tab with text 'a' not found")
def test_add_and_hidden(self):
self.assertRaises(tkinter.TclError, self.nb.hide, -1)
self.assertRaises(tkinter.TclError, self.nb.hide, 'hi')
self.assertRaises(tkinter.TclError, self.nb.hide, None)
self.assertRaises(tkinter.TclError, self.nb.add, None)
self.assertRaises(tkinter.TclError, self.nb.add, ttk.Label(self.root),
unknown='option')
tabs = self.nb.tabs()
self.nb.hide(self.child1)
self.nb.add(self.child1)
self.assertEqual(self.nb.tabs(), tabs)
child = ttk.Label(self.root)
self.nb.add(child, text='c')
tabs = self.nb.tabs()
curr = self.nb.index('current')
# verify that the tab gets readded at its previous position
child2_index = self.nb.index(self.child2)
self.nb.hide(self.child2)
self.nb.add(self.child2)
self.assertEqual(self.nb.tabs(), tabs)
self.assertEqual(self.nb.index(self.child2), child2_index)
self.assertEqual(str(self.child2), self.nb.tabs()[child2_index])
# but the tab next to it (not hidden) is the one selected now
self.assertEqual(self.nb.index('current'), curr + 1)
def test_forget(self):
self.assertRaises(tkinter.TclError, self.nb.forget, -1)
self.assertRaises(tkinter.TclError, self.nb.forget, 'hi')
self.assertRaises(tkinter.TclError, self.nb.forget, None)
tabs = self.nb.tabs()
child1_index = self.nb.index(self.child1)
self.nb.forget(self.child1)
self.assertNotIn(str(self.child1), self.nb.tabs())
self.assertEqual(len(tabs) - 1, len(self.nb.tabs()))
self.nb.add(self.child1)
self.assertEqual(self.nb.index(self.child1), 1)
self.assertNotEqual(child1_index, self.nb.index(self.child1))
def test_index(self):
self.assertRaises(tkinter.TclError, self.nb.index, -1)
self.assertRaises(tkinter.TclError, self.nb.index, None)
self.assertIsInstance(self.nb.index('end'), int)
self.assertEqual(self.nb.index(self.child1), 0)
self.assertEqual(self.nb.index(self.child2), 1)
self.assertEqual(self.nb.index('end'), 2)
def test_insert(self):
# moving tabs
tabs = self.nb.tabs()
self.nb.insert(1, tabs[0])
self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0]))
self.nb.insert(self.child1, self.child2)
self.assertEqual(self.nb.tabs(), tabs)
self.nb.insert('end', self.child1)
self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0]))
self.nb.insert('end', 0)
self.assertEqual(self.nb.tabs(), tabs)
# bad moves
self.assertRaises(tkinter.TclError, self.nb.insert, 2, tabs[0])
self.assertRaises(tkinter.TclError, self.nb.insert, -1, tabs[0])
# new tab
child3 = ttk.Label(self.root)
self.nb.insert(1, child3)
self.assertEqual(self.nb.tabs(), (tabs[0], str(child3), tabs[1]))
self.nb.forget(child3)
self.assertEqual(self.nb.tabs(), tabs)
self.nb.insert(self.child1, child3)
self.assertEqual(self.nb.tabs(), (str(child3), ) + tabs)
self.nb.forget(child3)
self.assertRaises(tkinter.TclError, self.nb.insert, 2, child3)
self.assertRaises(tkinter.TclError, self.nb.insert, -1, child3)
# bad inserts
self.assertRaises(tkinter.TclError, self.nb.insert, 'end', None)
self.assertRaises(tkinter.TclError, self.nb.insert, None, 0)
self.assertRaises(tkinter.TclError, self.nb.insert, None, None)
def test_select(self):
self.nb.pack()
self.nb.wait_visibility()
success = []
tab_changed = []
self.child1.bind('<Unmap>', lambda evt: success.append(True))
self.nb.bind('<<NotebookTabChanged>>',
lambda evt: tab_changed.append(True))
self.assertEqual(self.nb.select(), str(self.child1))
self.nb.select(self.child2)
self.assertTrue(success)
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.update()
self.assertTrue(tab_changed)
def test_tab(self):
self.assertRaises(tkinter.TclError, self.nb.tab, -1)
self.assertRaises(tkinter.TclError, self.nb.tab, 'notab')
self.assertRaises(tkinter.TclError, self.nb.tab, None)
self.assertIsInstance(self.nb.tab(self.child1), dict)
self.assertEqual(self.nb.tab(self.child1, text=None), 'a')
# newer form for querying a single option
self.assertEqual(self.nb.tab(self.child1, 'text'), 'a')
self.nb.tab(self.child1, text='abc')
self.assertEqual(self.nb.tab(self.child1, text=None), 'abc')
self.assertEqual(self.nb.tab(self.child1, 'text'), 'abc')
def test_tabs(self):
self.assertEqual(len(self.nb.tabs()), 2)
self.nb.forget(self.child1)
self.nb.forget(self.child2)
self.assertEqual(self.nb.tabs(), ())
def test_traversal(self):
self.nb.pack()
self.nb.wait_visibility()
self.nb.select(0)
simulate_mouse_click(self.nb, 5, 5)
self.nb.focus_force()
self.nb.event_generate('<Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.focus_force()
self.nb.event_generate('<Shift-Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child1))
self.nb.focus_force()
self.nb.event_generate('<Shift-Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.tab(self.child1, text='a', underline=0)
self.nb.enable_traversal()
self.nb.focus_force()
simulate_mouse_click(self.nb, 5, 5)
if sys.platform == 'darwin':
self.nb.event_generate('<Option-a>')
else:
self.nb.event_generate('<Alt-a>')
self.assertEqual(self.nb.select(), str(self.child1))
@add_standard_options(StandardTtkOptionsTests)
class TreeviewTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'columns', 'cursor', 'displaycolumns',
'height', 'padding', 'selectmode', 'show',
'style', 'takefocus', 'xscrollcommand', 'yscrollcommand',
)
def setUp(self):
super().setUp()
self.tv = self.create(padding=0)
def create(self, **kwargs):
return ttk.Treeview(self.root, **kwargs)
def test_columns(self):
widget = self.create()
self.checkParam(widget, 'columns', 'a b c',
expected=('a', 'b', 'c'))
self.checkParam(widget, 'columns', ('a', 'b', 'c'))
self.checkParam(widget, 'columns', ())
def test_displaycolumns(self):
widget = self.create()
widget['columns'] = ('a', 'b', 'c')
self.checkParam(widget, 'displaycolumns', 'b a c',
expected=('b', 'a', 'c'))
self.checkParam(widget, 'displaycolumns', ('b', 'a', 'c'))
self.checkParam(widget, 'displaycolumns', '#all',
expected=('#all',))
self.checkParam(widget, 'displaycolumns', (2, 1, 0))
self.checkInvalidParam(widget, 'displaycolumns', ('a', 'b', 'd'),
errmsg='Invalid column index d')
self.checkInvalidParam(widget, 'displaycolumns', (1, 2, 3),
errmsg='Column index 3 out of bounds')
self.checkInvalidParam(widget, 'displaycolumns', (1, -2),
errmsg='Column index -2 out of bounds')
def test_height(self):
widget = self.create()
self.checkPixelsParam(widget, 'height', 100, -100, 0, '3c', conv=False)
self.checkPixelsParam(widget, 'height', 101.2, 102.6, conv=noconv)
def test_selectmode(self):
widget = self.create()
self.checkEnumParam(widget, 'selectmode',
'none', 'browse', 'extended')
def test_show(self):
widget = self.create()
self.checkParam(widget, 'show', 'tree headings',
expected=('tree', 'headings'))
self.checkParam(widget, 'show', ('tree', 'headings'))
self.checkParam(widget, 'show', ('headings', 'tree'))
self.checkParam(widget, 'show', 'tree', expected=('tree',))
self.checkParam(widget, 'show', 'headings', expected=('headings',))
def test_bbox(self):
self.tv.pack()
self.assertEqual(self.tv.bbox(''), '')
self.tv.wait_visibility()
self.tv.update()
item_id = self.tv.insert('', 'end')
children = self.tv.get_children()
self.assertTrue(children)
bbox = self.tv.bbox(children[0])
self.assertIsBoundingBox(bbox)
# compare width in bboxes
self.tv['columns'] = ['test']
self.tv.column('test', width=50)
bbox_column0 = self.tv.bbox(children[0], 0)
root_width = self.tv.column('#0', width=None)
if not self.wantobjects:
root_width = int(root_width)
self.assertEqual(bbox_column0[0], bbox[0] + root_width)
# verify that bbox of a closed item is the empty string
child1 = self.tv.insert(item_id, 'end')
self.assertEqual(self.tv.bbox(child1), '')
def test_children(self):
# no children yet, should get an empty tuple
self.assertEqual(self.tv.get_children(), ())
item_id = self.tv.insert('', 'end')
self.assertIsInstance(self.tv.get_children(), tuple)
self.assertEqual(self.tv.get_children()[0], item_id)
# add item_id and child3 as children of child2
child2 = self.tv.insert('', 'end')
child3 = self.tv.insert('', 'end')
self.tv.set_children(child2, item_id, child3)
self.assertEqual(self.tv.get_children(child2), (item_id, child3))
# child3 has child2 as parent, thus trying to set child2 as a children
# of child3 should result in an error
self.assertRaises(tkinter.TclError,
self.tv.set_children, child3, child2)
# remove child2 children
self.tv.set_children(child2)
self.assertEqual(self.tv.get_children(child2), ())
# remove root's children
self.tv.set_children('')
self.assertEqual(self.tv.get_children(), ())
def test_column(self):
# return a dict with all options/values
self.assertIsInstance(self.tv.column('#0'), dict)
# return a single value of the given option
if self.wantobjects:
self.assertIsInstance(self.tv.column('#0', width=None), int)
# set a new value for an option
self.tv.column('#0', width=10)
# testing new way to get option value
self.assertEqual(self.tv.column('#0', 'width'),
10 if self.wantobjects else '10')
self.assertEqual(self.tv.column('#0', width=None),
10 if self.wantobjects else '10')
# check read-only option
self.assertRaises(tkinter.TclError, self.tv.column, '#0', id='X')
self.assertRaises(tkinter.TclError, self.tv.column, 'invalid')
invalid_kws = [
{'unknown_option': 'some value'}, {'stretch': 'wrong'},
{'anchor': 'wrong'}, {'width': 'wrong'}, {'minwidth': 'wrong'}
]
for kw in invalid_kws:
self.assertRaises(tkinter.TclError, self.tv.column, '#0',
**kw)
def test_delete(self):
self.assertRaises(tkinter.TclError, self.tv.delete, '#0')
item_id = self.tv.insert('', 'end')
item2 = self.tv.insert(item_id, 'end')
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
self.tv.delete(item_id)
self.assertFalse(self.tv.get_children())
# reattach should fail
self.assertRaises(tkinter.TclError,
self.tv.reattach, item_id, '', 'end')
# test multiple item delete
item1 = self.tv.insert('', 'end')
item2 = self.tv.insert('', 'end')
self.assertEqual(self.tv.get_children(), (item1, item2))
self.tv.delete(item1, item2)
self.assertFalse(self.tv.get_children())
def test_detach_reattach(self):
item_id = self.tv.insert('', 'end')
item2 = self.tv.insert(item_id, 'end')
# calling detach without items is valid, although it does nothing
prev = self.tv.get_children()
self.tv.detach() # this should do nothing
self.assertEqual(prev, self.tv.get_children())
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
# detach item with children
self.tv.detach(item_id)
self.assertFalse(self.tv.get_children())
# reattach item with children
self.tv.reattach(item_id, '', 'end')
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
# move a children to the root
self.tv.move(item2, '', 'end')
self.assertEqual(self.tv.get_children(), (item_id, item2))
self.assertEqual(self.tv.get_children(item_id), ())
# bad values
self.assertRaises(tkinter.TclError,
self.tv.reattach, 'nonexistent', '', 'end')
self.assertRaises(tkinter.TclError,
self.tv.detach, 'nonexistent')
self.assertRaises(tkinter.TclError,
self.tv.reattach, item2, 'otherparent', 'end')
self.assertRaises(tkinter.TclError,
self.tv.reattach, item2, '', 'invalid')
# multiple detach
self.tv.detach(item_id, item2)
self.assertEqual(self.tv.get_children(), ())
self.assertEqual(self.tv.get_children(item_id), ())
def test_exists(self):
self.assertEqual(self.tv.exists('something'), False)
self.assertEqual(self.tv.exists(''), True)
self.assertEqual(self.tv.exists({}), False)
# the following will make a tk.call equivalent to
# tk.call(treeview, "exists") which should result in an error
# in the tcl interpreter since tk requires an item.
self.assertRaises(tkinter.TclError, self.tv.exists, None)
def test_focus(self):
# nothing is focused right now
self.assertEqual(self.tv.focus(), '')
item1 = self.tv.insert('', 'end')
self.tv.focus(item1)
self.assertEqual(self.tv.focus(), item1)
self.tv.delete(item1)
self.assertEqual(self.tv.focus(), '')
# try focusing inexistent item
self.assertRaises(tkinter.TclError, self.tv.focus, 'hi')
def test_heading(self):
# check a dict is returned
self.assertIsInstance(self.tv.heading('#0'), dict)
# check a value is returned
self.tv.heading('#0', text='hi')
self.assertEqual(self.tv.heading('#0', 'text'), 'hi')
self.assertEqual(self.tv.heading('#0', text=None), 'hi')
# invalid option
self.assertRaises(tkinter.TclError, self.tv.heading, '#0',
background=None)
# invalid value
self.assertRaises(tkinter.TclError, self.tv.heading, '#0',
anchor=1)
def test_heading_callback(self):
def simulate_heading_click(x, y):
simulate_mouse_click(self.tv, x, y)
self.tv.update()
success = [] # no success for now
self.tv.pack()
self.tv.wait_visibility()
self.tv.heading('#0', command=lambda: success.append(True))
self.tv.column('#0', width=100)
self.tv.update()
# assuming that the coords (5, 5) fall into heading #0
simulate_heading_click(5, 5)
if not success:
self.fail("The command associated to the treeview heading wasn't "
"invoked.")
success = []
commands = self.tv.master._tclCommands
self.tv.heading('#0', command=str(self.tv.heading('#0', command=None)))
self.assertEqual(commands, self.tv.master._tclCommands)
simulate_heading_click(5, 5)
if not success:
self.fail("The command associated to the treeview heading wasn't "
"invoked.")
# XXX The following raises an error in a tcl interpreter, but not in
# Python
#self.tv.heading('#0', command='I dont exist')
#simulate_heading_click(5, 5)
def test_index(self):
# item 'what' doesn't exist
self.assertRaises(tkinter.TclError, self.tv.index, 'what')
self.assertEqual(self.tv.index(''), 0)
item1 = self.tv.insert('', 'end')
item2 = self.tv.insert('', 'end')
c1 = self.tv.insert(item1, 'end')
c2 = self.tv.insert(item1, 'end')
self.assertEqual(self.tv.index(item1), 0)
self.assertEqual(self.tv.index(c1), 0)
self.assertEqual(self.tv.index(c2), 1)
self.assertEqual(self.tv.index(item2), 1)
self.tv.move(item2, '', 0)
self.assertEqual(self.tv.index(item2), 0)
self.assertEqual(self.tv.index(item1), 1)
# check that index still works even after its parent and siblings
# have been detached
self.tv.detach(item1)
self.assertEqual(self.tv.index(c2), 1)
self.tv.detach(c1)
self.assertEqual(self.tv.index(c2), 0)
# but it fails after item has been deleted
self.tv.delete(item1)
self.assertRaises(tkinter.TclError, self.tv.index, c2)
def test_insert_item(self):
# parent 'none' doesn't exist
self.assertRaises(tkinter.TclError, self.tv.insert, 'none', 'end')
# open values
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
open='')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
open='please')
self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=True)))
self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=False)))
# invalid index
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'middle')
# trying to duplicate item id is invalid
itemid = self.tv.insert('', 'end', 'first-item')
self.assertEqual(itemid, 'first-item')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
'first-item')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
MockTclObj('first-item'))
# unicode values
value = '\xe1ba'
item = self.tv.insert('', 'end', values=(value, ))
self.assertEqual(self.tv.item(item, 'values'),
(value,) if self.wantobjects else value)
self.assertEqual(self.tv.item(item, values=None),
(value,) if self.wantobjects else value)
self.tv.item(item, values=self.root.splitlist(self.tv.item(item, values=None)))
self.assertEqual(self.tv.item(item, values=None),
(value,) if self.wantobjects else value)
self.assertIsInstance(self.tv.item(item), dict)
# erase item values
self.tv.item(item, values='')
self.assertFalse(self.tv.item(item, values=None))
# item tags
item = self.tv.insert('', 'end', tags=[1, 2, value])
self.assertEqual(self.tv.item(item, tags=None),
('1', '2', value) if self.wantobjects else
'1 2 %s' % value)
self.tv.item(item, tags=[])
self.assertFalse(self.tv.item(item, tags=None))
self.tv.item(item, tags=(1, 2))
self.assertEqual(self.tv.item(item, tags=None),
('1', '2') if self.wantobjects else '1 2')
# values with spaces
item = self.tv.insert('', 'end', values=('a b c',
'%s %s' % (value, value)))
self.assertEqual(self.tv.item(item, values=None),
('a b c', '%s %s' % (value, value)) if self.wantobjects else
'{a b c} {%s %s}' % (value, value))
# text
self.assertEqual(self.tv.item(
self.tv.insert('', 'end', text="Label here"), text=None),
"Label here")
self.assertEqual(self.tv.item(
self.tv.insert('', 'end', text=value), text=None),
value)
def test_set(self):
self.tv['columns'] = ['A', 'B']
item = self.tv.insert('', 'end', values=['a', 'b'])
self.assertEqual(self.tv.set(item), {'A': 'a', 'B': 'b'})
self.tv.set(item, 'B', 'a')
self.assertEqual(self.tv.item(item, values=None),
('a', 'a') if self.wantobjects else 'a a')
self.tv['columns'] = ['B']
self.assertEqual(self.tv.set(item), {'B': 'a'})
self.tv.set(item, 'B', 'b')
self.assertEqual(self.tv.set(item, column='B'), 'b')
self.assertEqual(self.tv.item(item, values=None),
('b', 'a') if self.wantobjects else 'b a')
self.tv.set(item, 'B', 123)
self.assertEqual(self.tv.set(item, 'B'),
123 if self.wantobjects else '123')
self.assertEqual(self.tv.item(item, values=None),
(123, 'a') if self.wantobjects else '123 a')
self.assertEqual(self.tv.set(item),
{'B': 123} if self.wantobjects else {'B': '123'})
# inexistent column
self.assertRaises(tkinter.TclError, self.tv.set, item, 'A')
self.assertRaises(tkinter.TclError, self.tv.set, item, 'A', 'b')
# inexistent item
self.assertRaises(tkinter.TclError, self.tv.set, 'notme')
def test_tag_bind(self):
events = []
item1 = self.tv.insert('', 'end', tags=['call'])
item2 = self.tv.insert('', 'end', tags=['call'])
self.tv.tag_bind('call', '<ButtonPress-1>',
lambda evt: events.append(1))
self.tv.tag_bind('call', '<ButtonRelease-1>',
lambda evt: events.append(2))
self.tv.pack()
self.tv.wait_visibility()
self.tv.update()
pos_y = set()
found = set()
for i in range(0, 100, 10):
if len(found) == 2: # item1 and item2 already found
break
item_id = self.tv.identify_row(i)
if item_id and item_id not in found:
pos_y.add(i)
found.add(item_id)
self.assertEqual(len(pos_y), 2) # item1 and item2 y pos
for y in pos_y:
simulate_mouse_click(self.tv, 0, y)
# by now there should be 4 things in the events list, since each
# item had a bind for two events that were simulated above
self.assertEqual(len(events), 4)
for evt in zip(events[::2], events[1::2]):
self.assertEqual(evt, (1, 2))
def test_tag_configure(self):
# Just testing parameter passing for now
self.assertRaises(TypeError, self.tv.tag_configure)
self.assertRaises(tkinter.TclError, self.tv.tag_configure,
'test', sky='blue')
self.tv.tag_configure('test', foreground='blue')
self.assertEqual(str(self.tv.tag_configure('test', 'foreground')),
'blue')
self.assertEqual(str(self.tv.tag_configure('test', foreground=None)),
'blue')
self.assertIsInstance(self.tv.tag_configure('test'), dict)
@add_standard_options(StandardTtkOptionsTests)
class SeparatorTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'orient', 'style', 'takefocus',
# 'state'?
)
default_orient = 'horizontal'
def create(self, **kwargs):
return ttk.Separator(self.root, **kwargs)
@add_standard_options(StandardTtkOptionsTests)
class SizegripTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'style', 'takefocus',
# 'state'?
)
def create(self, **kwargs):
return ttk.Sizegrip(self.root, **kwargs)
tests_gui = (
ButtonTest, CheckbuttonTest, ComboboxTest, EntryTest,
FrameTest, LabelFrameTest, LabelTest, MenubuttonTest,
NotebookTest, PanedWindowTest, ProgressbarTest,
RadiobuttonTest, ScaleTest, ScrollbarTest, SeparatorTest,
SizegripTest, TreeviewTest, WidgetTest,
)
if __name__ == "__main__":
unittest.main()
|
Danielhiversen/home-assistant
|
refs/heads/master
|
homeassistant/components/binary_sensor/rachio.py
|
6
|
"""
Integration with the Rachio Iro sprinkler system controller.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.rachio/
"""
from abc import abstractmethod
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.components.rachio import (DOMAIN as DOMAIN_RACHIO,
KEY_DEVICE_ID,
KEY_STATUS,
KEY_SUBTYPE,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
STATUS_OFFLINE,
STATUS_ONLINE,
SUBTYPE_OFFLINE,
SUBTYPE_ONLINE,)
from homeassistant.helpers.dispatcher import dispatcher_connect
DEPENDENCIES = ['rachio']
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Rachio binary sensors."""
devices = []
for controller in hass.data[DOMAIN_RACHIO].controllers:
devices.append(RachioControllerOnlineBinarySensor(hass, controller))
add_entities(devices)
_LOGGER.info("%d Rachio binary sensor(s) added", len(devices))
class RachioControllerBinarySensor(BinarySensorDevice):
"""Represent a binary sensor that reflects a Rachio state."""
def __init__(self, hass, controller, poll=True):
"""Set up a new Rachio controller binary sensor."""
self._controller = controller
if poll:
self._state = self._poll_update()
else:
self._state = None
dispatcher_connect(hass, SIGNAL_RACHIO_CONTROLLER_UPDATE,
self._handle_any_update)
@property
def should_poll(self) -> bool:
"""Declare that this entity pushes its state to HA."""
return False
@property
def is_on(self) -> bool:
"""Return whether the sensor has a 'true' value."""
return self._state
def _handle_any_update(self, *args, **kwargs) -> None:
"""Determine whether an update event applies to this device."""
if args[0][KEY_DEVICE_ID] != self._controller.controller_id:
# For another device
return
# For this device
self._handle_update()
@abstractmethod
def _poll_update(self, data=None) -> bool:
"""Request the state from the API."""
pass
@abstractmethod
def _handle_update(self, *args, **kwargs) -> None:
"""Handle an update to the state of this sensor."""
pass
class RachioControllerOnlineBinarySensor(RachioControllerBinarySensor):
"""Represent a binary sensor that reflects if the controller is online."""
def __init__(self, hass, controller):
"""Set up a new Rachio controller online binary sensor."""
super().__init__(hass, controller, poll=False)
self._state = self._poll_update(controller.init_data)
@property
def name(self) -> str:
"""Return the name of this sensor including the controller name."""
return "{} online".format(self._controller.name)
@property
def unique_id(self) -> str:
"""Return a unique id for this entity."""
return "{}-online".format(self._controller.controller_id)
@property
def device_class(self) -> str:
"""Return the class of this device, from component DEVICE_CLASSES."""
return 'connectivity'
@property
def icon(self) -> str:
"""Return the name of an icon for this sensor."""
return 'mdi:wifi-strength-4' if self.is_on\
else 'mdi:wifi-strength-off-outline'
def _poll_update(self, data=None) -> bool:
"""Request the state from the API."""
if data is None:
data = self._controller.rachio.device.get(
self._controller.controller_id)[1]
if data[KEY_STATUS] == STATUS_ONLINE:
return True
if data[KEY_STATUS] == STATUS_OFFLINE:
return False
_LOGGER.warning('"%s" reported in unknown state "%s"', self.name,
data[KEY_STATUS])
def _handle_update(self, *args, **kwargs) -> None:
"""Handle an update to the state of this sensor."""
if args[0][KEY_SUBTYPE] == SUBTYPE_ONLINE:
self._state = True
elif args[0][KEY_SUBTYPE] == SUBTYPE_OFFLINE:
self._state = False
self.schedule_update_ha_state()
|
onceuponatimeforever/oh-mainline
|
refs/heads/master
|
vendor/packages/twill/twill/other_packages/__init__.py
|
9480
|
#
|
edunham/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/XMLHttpRequest/resources/echo-method.py
|
228
|
def main(request, response):
response.send_body_for_head_request = True
headers = [("Content-type", "text/plain")]
content = request.method
return headers, content
|
MatthieuBizien/scikit-learn
|
refs/heads/master
|
sklearn/decomposition/tests/test_nmf.py
|
23
|
import numpy as np
from scipy import linalg
from sklearn.decomposition import (NMF, ProjectedGradientNMF,
non_negative_factorization)
from sklearn.decomposition import nmf # For testing internals
from scipy.sparse import csc_matrix
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_raise_message, assert_no_warnings
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import ignore_warnings
from sklearn.base import clone
random_state = np.random.mtrand.RandomState(0)
def test_initialize_nn_output():
# Test that initialization does not return negative values
data = np.abs(random_state.randn(10, 10))
for init in ('random', 'nndsvd', 'nndsvda', 'nndsvdar'):
W, H = nmf._initialize_nmf(data, 10, init=init, random_state=0)
assert_false((W < 0).any() or (H < 0).any())
@ignore_warnings
def test_parameter_checking():
A = np.ones((2, 2))
name = 'spam'
msg = "Invalid solver parameter: got 'spam' instead of one of"
assert_raise_message(ValueError, msg, NMF(solver=name).fit, A)
msg = "Invalid init parameter: got 'spam' instead of one of"
assert_raise_message(ValueError, msg, NMF(init=name).fit, A)
msg = "Invalid sparseness parameter: got 'spam' instead of one of"
assert_raise_message(ValueError, msg, NMF(sparseness=name).fit, A)
msg = "Negative values in data passed to"
assert_raise_message(ValueError, msg, NMF().fit, -A)
assert_raise_message(ValueError, msg, nmf._initialize_nmf, -A,
2, 'nndsvd')
clf = NMF(2, tol=0.1).fit(A)
assert_raise_message(ValueError, msg, clf.transform, -A)
def test_initialize_close():
# Test NNDSVD error
# Test that _initialize_nmf error is less than the standard deviation of
# the entries in the matrix.
A = np.abs(random_state.randn(10, 10))
W, H = nmf._initialize_nmf(A, 10, init='nndsvd')
error = linalg.norm(np.dot(W, H) - A)
sdev = linalg.norm(A - A.mean())
assert_true(error <= sdev)
def test_initialize_variants():
# Test NNDSVD variants correctness
# Test that the variants 'nndsvda' and 'nndsvdar' differ from basic
# 'nndsvd' only where the basic version has zeros.
data = np.abs(random_state.randn(10, 10))
W0, H0 = nmf._initialize_nmf(data, 10, init='nndsvd')
Wa, Ha = nmf._initialize_nmf(data, 10, init='nndsvda')
War, Har = nmf._initialize_nmf(data, 10, init='nndsvdar',
random_state=0)
for ref, evl in ((W0, Wa), (W0, War), (H0, Ha), (H0, Har)):
assert_true(np.allclose(evl[ref != 0], ref[ref != 0]))
@ignore_warnings
def test_nmf_fit_nn_output():
# Test that the decomposition does not contain negative values
A = np.c_[5 * np.ones(5) - np.arange(1, 6),
5 * np.ones(5) + np.arange(1, 6)]
for solver in ('pg', 'cd'):
for init in (None, 'nndsvd', 'nndsvda', 'nndsvdar'):
model = NMF(n_components=2, solver=solver, init=init,
random_state=0)
transf = model.fit_transform(A)
assert_false((model.components_ < 0).any() or
(transf < 0).any())
@ignore_warnings
def test_nmf_fit_close():
# Test that the fit is not too far away
for solver in ('pg', 'cd'):
pnmf = NMF(5, solver=solver, init='nndsvd', random_state=0)
X = np.abs(random_state.randn(6, 5))
assert_less(pnmf.fit(X).reconstruction_err_, 0.05)
def test_nls_nn_output():
# Test that NLS solver doesn't return negative values
A = np.arange(1, 5).reshape(1, -1)
Ap, _, _ = nmf._nls_subproblem(np.dot(A.T, -A), A.T, A, 0.001, 100)
assert_false((Ap < 0).any())
def test_nls_close():
# Test that the NLS results should be close
A = np.arange(1, 5).reshape(1, -1)
Ap, _, _ = nmf._nls_subproblem(np.dot(A.T, A), A.T, np.zeros_like(A),
0.001, 100)
assert_true((np.abs(Ap - A) < 0.01).all())
@ignore_warnings
def test_nmf_transform():
# Test that NMF.transform returns close values
A = np.abs(random_state.randn(6, 5))
for solver in ('pg', 'cd'):
m = NMF(solver=solver, n_components=4, init='nndsvd', random_state=0)
ft = m.fit_transform(A)
t = m.transform(A)
assert_array_almost_equal(ft, t, decimal=2)
def test_nmf_transform_custom_init():
# Smoke test that checks if NMF.transform works with custom initialization
A = np.abs(random_state.randn(6, 5))
n_components = 4
avg = np.sqrt(A.mean() / n_components)
H_init = np.abs(avg * random_state.randn(n_components, 5))
W_init = np.abs(avg * random_state.randn(6, n_components))
m = NMF(solver='cd', n_components=n_components, init='custom', random_state=0)
ft = m.fit_transform(A, W=W_init, H=H_init)
t = m.transform(A)
@ignore_warnings
def test_nmf_inverse_transform():
# Test that NMF.inverse_transform returns close values
random_state = np.random.RandomState(0)
A = np.abs(random_state.randn(6, 4))
for solver in ('pg', 'cd'):
m = NMF(solver=solver, n_components=4, init='random', random_state=0)
ft = m.fit_transform(A)
t = m.transform(A)
A_new = m.inverse_transform(t)
assert_array_almost_equal(A, A_new, decimal=2)
@ignore_warnings
def test_n_components_greater_n_features():
# Smoke test for the case of more components than features.
A = np.abs(random_state.randn(30, 10))
NMF(n_components=15, random_state=0, tol=1e-2).fit(A)
@ignore_warnings
def test_projgrad_nmf_sparseness():
# Test sparseness
# Test that sparsity constraints actually increase sparseness in the
# part where they are applied.
tol = 1e-2
A = np.abs(random_state.randn(10, 10))
m = ProjectedGradientNMF(n_components=5, random_state=0, tol=tol).fit(A)
data_sp = ProjectedGradientNMF(n_components=5, sparseness='data',
random_state=0,
tol=tol).fit(A).data_sparseness_
comp_sp = ProjectedGradientNMF(n_components=5, sparseness='components',
random_state=0,
tol=tol).fit(A).comp_sparseness_
assert_greater(data_sp, m.data_sparseness_)
assert_greater(comp_sp, m.comp_sparseness_)
@ignore_warnings
def test_sparse_input():
# Test that sparse matrices are accepted as input
from scipy.sparse import csc_matrix
A = np.abs(random_state.randn(10, 10))
A[:, 2 * np.arange(5)] = 0
A_sparse = csc_matrix(A)
for solver in ('pg', 'cd'):
est1 = NMF(solver=solver, n_components=5, init='random',
random_state=0, tol=1e-2)
est2 = clone(est1)
W1 = est1.fit_transform(A)
W2 = est2.fit_transform(A_sparse)
H1 = est1.components_
H2 = est2.components_
assert_array_almost_equal(W1, W2)
assert_array_almost_equal(H1, H2)
@ignore_warnings
def test_sparse_transform():
# Test that transform works on sparse data. Issue #2124
A = np.abs(random_state.randn(3, 2))
A[A > 1.0] = 0
A = csc_matrix(A)
for solver in ('pg', 'cd'):
model = NMF(solver=solver, random_state=0, tol=1e-4, n_components=2)
A_fit_tr = model.fit_transform(A)
A_tr = model.transform(A)
assert_array_almost_equal(A_fit_tr, A_tr, decimal=1)
@ignore_warnings
def test_non_negative_factorization_consistency():
# Test that the function is called in the same way, either directly
# or through the NMF class
A = np.abs(random_state.randn(10, 10))
A[:, 2 * np.arange(5)] = 0
for solver in ('pg', 'cd'):
W_nmf, H, _ = non_negative_factorization(
A, solver=solver, random_state=1, tol=1e-2)
W_nmf_2, _, _ = non_negative_factorization(
A, H=H, update_H=False, solver=solver, random_state=1, tol=1e-2)
model_class = NMF(solver=solver, random_state=1, tol=1e-2)
W_cls = model_class.fit_transform(A)
W_cls_2 = model_class.transform(A)
assert_array_almost_equal(W_nmf, W_cls, decimal=10)
assert_array_almost_equal(W_nmf_2, W_cls_2, decimal=10)
@ignore_warnings
def test_non_negative_factorization_checking():
A = np.ones((2, 2))
# Test parameters checking is public function
nnmf = non_negative_factorization
assert_no_warnings(nnmf, A, A, A, np.int64(1))
msg = "Number of components must be a positive integer; got (n_components=1.5)"
assert_raise_message(ValueError, msg, nnmf, A, A, A, 1.5)
msg = "Number of components must be a positive integer; got (n_components='2')"
assert_raise_message(ValueError, msg, nnmf, A, A, A, '2')
msg = "Negative values in data passed to NMF (input H)"
assert_raise_message(ValueError, msg, nnmf, A, A, -A, 2, 'custom')
msg = "Negative values in data passed to NMF (input W)"
assert_raise_message(ValueError, msg, nnmf, A, -A, A, 2, 'custom')
msg = "Array passed to NMF (input H) is full of zeros"
assert_raise_message(ValueError, msg, nnmf, A, A, 0 * A, 2, 'custom')
def test_safe_compute_error():
A = np.abs(random_state.randn(10, 10))
A[:, 2 * np.arange(5)] = 0
A_sparse = csc_matrix(A)
W, H = nmf._initialize_nmf(A, 5, init='random', random_state=0)
error = nmf._safe_compute_error(A, W, H)
error_sparse = nmf._safe_compute_error(A_sparse, W, H)
assert_almost_equal(error, error_sparse)
|
tiangolo/ansible
|
refs/heads/devel
|
v1/ansible/inventory/__init__.py
|
88
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
import fnmatch
import os
import sys
import re
import subprocess
import ansible.constants as C
from ansible.inventory.ini import InventoryParser
from ansible.inventory.script import InventoryScript
from ansible.inventory.dir import InventoryDirectory
from ansible.inventory.group import Group
from ansible.inventory.host import Host
from ansible import errors
from ansible import utils
class Inventory(object):
"""
Host inventory for ansible.
"""
__slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
'_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
def __init__(self, host_list=C.DEFAULT_HOST_LIST, vault_password=None):
# the host file file, or script path, or list of hosts
# if a list, inventory data will NOT be loaded
self.host_list = host_list
self._vault_password=vault_password
# caching to avoid repeated calculations, particularly with
# external inventory scripts.
self._vars_per_host = {}
self._vars_per_group = {}
self._hosts_cache = {}
self._groups_list = {}
self._pattern_cache = {}
# to be set by calling set_playbook_basedir by playbook code
self._playbook_basedir = None
# the inventory object holds a list of groups
self.groups = []
# a list of host(names) to contain current inquiries to
self._restriction = None
self._also_restriction = None
self._subset = None
if isinstance(host_list, basestring):
if "," in host_list:
host_list = host_list.split(",")
host_list = [ h for h in host_list if h and h.strip() ]
if host_list is None:
self.parser = None
elif isinstance(host_list, list):
self.parser = None
all = Group('all')
self.groups = [ all ]
ipv6_re = re.compile('\[([a-f:A-F0-9]*[%[0-z]+]?)\](?::(\d+))?')
for x in host_list:
m = ipv6_re.match(x)
if m:
all.add_host(Host(m.groups()[0], m.groups()[1]))
else:
if ":" in x:
tokens = x.rsplit(":", 1)
# if there is ':' in the address, then this is an ipv6
if ':' in tokens[0]:
all.add_host(Host(x))
else:
all.add_host(Host(tokens[0], tokens[1]))
else:
all.add_host(Host(x))
elif os.path.exists(host_list):
if os.path.isdir(host_list):
# Ensure basedir is inside the directory
self.host_list = os.path.join(self.host_list, "")
self.parser = InventoryDirectory(filename=host_list)
self.groups = self.parser.groups.values()
else:
# check to see if the specified file starts with a
# shebang (#!/), so if an error is raised by the parser
# class we can show a more apropos error
shebang_present = False
try:
inv_file = open(host_list)
first_line = inv_file.readlines()[0]
inv_file.close()
if first_line.startswith('#!'):
shebang_present = True
except:
pass
if utils.is_executable(host_list):
try:
self.parser = InventoryScript(filename=host_list)
self.groups = self.parser.groups.values()
except:
if not shebang_present:
raise errors.AnsibleError("The file %s is marked as executable, but failed to execute correctly. " % host_list + \
"If this is not supposed to be an executable script, correct this with `chmod -x %s`." % host_list)
else:
raise
else:
try:
self.parser = InventoryParser(filename=host_list)
self.groups = self.parser.groups.values()
except:
if shebang_present:
raise errors.AnsibleError("The file %s looks like it should be an executable inventory script, but is not marked executable. " % host_list + \
"Perhaps you want to correct this with `chmod +x %s`?" % host_list)
else:
raise
utils.plugins.vars_loader.add_directory(self.basedir(), with_subdir=True)
else:
raise errors.AnsibleError("Unable to find an inventory file, specify one with -i ?")
self._vars_plugins = [ x for x in utils.plugins.vars_loader.all(self) ]
# get group vars from group_vars/ files and vars plugins
for group in self.groups:
group.vars = utils.combine_vars(group.vars, self.get_group_variables(group.name, vault_password=self._vault_password))
# get host vars from host_vars/ files and vars plugins
for host in self.get_hosts():
host.vars = utils.combine_vars(host.vars, self.get_host_variables(host.name, vault_password=self._vault_password))
def _match(self, str, pattern_str):
try:
if pattern_str.startswith('~'):
return re.search(pattern_str[1:], str)
else:
return fnmatch.fnmatch(str, pattern_str)
except Exception, e:
raise errors.AnsibleError('invalid host pattern: %s' % pattern_str)
def _match_list(self, items, item_attr, pattern_str):
results = []
try:
if not pattern_str.startswith('~'):
pattern = re.compile(fnmatch.translate(pattern_str))
else:
pattern = re.compile(pattern_str[1:])
except Exception, e:
raise errors.AnsibleError('invalid host pattern: %s' % pattern_str)
for item in items:
if pattern.match(getattr(item, item_attr)):
results.append(item)
return results
def get_hosts(self, pattern="all"):
"""
find all host names matching a pattern string, taking into account any inventory restrictions or
applied subsets.
"""
# process patterns
if isinstance(pattern, list):
pattern = ';'.join(pattern)
patterns = pattern.replace(";",":").split(":")
hosts = self._get_hosts(patterns)
# exclude hosts not in a subset, if defined
if self._subset:
subset = self._get_hosts(self._subset)
hosts = [ h for h in hosts if h in subset ]
# exclude hosts mentioned in any restriction (ex: failed hosts)
if self._restriction is not None:
hosts = [ h for h in hosts if h.name in self._restriction ]
if self._also_restriction is not None:
hosts = [ h for h in hosts if h.name in self._also_restriction ]
return hosts
def _get_hosts(self, patterns):
"""
finds hosts that match a list of patterns. Handles negative
matches as well as intersection matches.
"""
# Host specifiers should be sorted to ensure consistent behavior
pattern_regular = []
pattern_intersection = []
pattern_exclude = []
for p in patterns:
if p.startswith("!"):
pattern_exclude.append(p)
elif p.startswith("&"):
pattern_intersection.append(p)
elif p:
pattern_regular.append(p)
# if no regular pattern was given, hence only exclude and/or intersection
# make that magically work
if pattern_regular == []:
pattern_regular = ['all']
# when applying the host selectors, run those without the "&" or "!"
# first, then the &s, then the !s.
patterns = pattern_regular + pattern_intersection + pattern_exclude
hosts = []
for p in patterns:
# avoid resolving a pattern that is a plain host
if p in self._hosts_cache:
hosts.append(self.get_host(p))
else:
that = self.__get_hosts(p)
if p.startswith("!"):
hosts = [ h for h in hosts if h not in that ]
elif p.startswith("&"):
hosts = [ h for h in hosts if h in that ]
else:
to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ]
hosts.extend(to_append)
return hosts
def __get_hosts(self, pattern):
"""
finds hosts that positively match a particular pattern. Does not
take into account negative matches.
"""
if pattern in self._pattern_cache:
return self._pattern_cache[pattern]
(name, enumeration_details) = self._enumeration_info(pattern)
hpat = self._hosts_in_unenumerated_pattern(name)
result = self._apply_ranges(pattern, hpat)
self._pattern_cache[pattern] = result
return result
def _enumeration_info(self, pattern):
"""
returns (pattern, limits) taking a regular pattern and finding out
which parts of it correspond to start/stop offsets. limits is
a tuple of (start, stop) or None
"""
# Do not parse regexes for enumeration info
if pattern.startswith('~'):
return (pattern, None)
# The regex used to match on the range, which can be [x] or [x-y].
pattern_re = re.compile("^(.*)\[([-]?[0-9]+)(?:(?:-)([0-9]+))?\](.*)$")
m = pattern_re.match(pattern)
if m:
(target, first, last, rest) = m.groups()
first = int(first)
if last:
if first < 0:
raise errors.AnsibleError("invalid range: negative indices cannot be used as the first item in a range")
last = int(last)
else:
last = first
return (target, (first, last))
else:
return (pattern, None)
def _apply_ranges(self, pat, hosts):
"""
given a pattern like foo, that matches hosts, return all of hosts
given a pattern like foo[0:5], where foo matches hosts, return the first 6 hosts
"""
# If there are no hosts to select from, just return the
# empty set. This prevents trying to do selections on an empty set.
# issue#6258
if not hosts:
return hosts
(loose_pattern, limits) = self._enumeration_info(pat)
if not limits:
return hosts
(left, right) = limits
if left == '':
left = 0
if right == '':
right = 0
left=int(left)
right=int(right)
try:
if left != right:
return hosts[left:right]
else:
return [ hosts[left] ]
except IndexError:
raise errors.AnsibleError("no hosts matching the pattern '%s' were found" % pat)
def _create_implicit_localhost(self, pattern):
new_host = Host(pattern)
new_host.set_variable("ansible_python_interpreter", sys.executable)
new_host.set_variable("ansible_connection", "local")
ungrouped = self.get_group("ungrouped")
if ungrouped is None:
self.add_group(Group('ungrouped'))
ungrouped = self.get_group('ungrouped')
self.get_group('all').add_child_group(ungrouped)
ungrouped.add_host(new_host)
return new_host
def _hosts_in_unenumerated_pattern(self, pattern):
""" Get all host names matching the pattern """
results = []
hosts = []
hostnames = set()
# ignore any negative checks here, this is handled elsewhere
pattern = pattern.replace("!","").replace("&", "")
def __append_host_to_results(host):
if host not in results and host.name not in hostnames:
hostnames.add(host.name)
results.append(host)
groups = self.get_groups()
for group in groups:
if pattern == 'all':
for host in group.get_hosts():
__append_host_to_results(host)
else:
if self._match(group.name, pattern):
for host in group.get_hosts():
__append_host_to_results(host)
else:
matching_hosts = self._match_list(group.get_hosts(), 'name', pattern)
for host in matching_hosts:
__append_host_to_results(host)
if pattern in ["localhost", "127.0.0.1"] and len(results) == 0:
new_host = self._create_implicit_localhost(pattern)
results.append(new_host)
return results
def clear_pattern_cache(self):
''' called exclusively by the add_host plugin to allow patterns to be recalculated '''
self._pattern_cache = {}
def groups_for_host(self, host):
if host in self._hosts_cache:
return self._hosts_cache[host].get_groups()
else:
return []
def groups_list(self):
if not self._groups_list:
groups = {}
for g in self.groups:
groups[g.name] = [h.name for h in g.get_hosts()]
ancestors = g.get_ancestors()
for a in ancestors:
if a.name not in groups:
groups[a.name] = [h.name for h in a.get_hosts()]
self._groups_list = groups
return self._groups_list
def get_groups(self):
return self.groups
def get_host(self, hostname):
if hostname not in self._hosts_cache:
self._hosts_cache[hostname] = self._get_host(hostname)
return self._hosts_cache[hostname]
def _get_host(self, hostname):
if hostname in ['localhost','127.0.0.1']:
for host in self.get_group('all').get_hosts():
if host.name in ['localhost', '127.0.0.1']:
return host
return self._create_implicit_localhost(hostname)
else:
for group in self.groups:
for host in group.get_hosts():
if hostname == host.name:
return host
return None
def get_group(self, groupname):
for group in self.groups:
if group.name == groupname:
return group
return None
def get_group_variables(self, groupname, update_cached=False, vault_password=None):
if groupname not in self._vars_per_group or update_cached:
self._vars_per_group[groupname] = self._get_group_variables(groupname, vault_password=vault_password)
return self._vars_per_group[groupname]
def _get_group_variables(self, groupname, vault_password=None):
group = self.get_group(groupname)
if group is None:
raise errors.AnsibleError("group not found: %s" % groupname)
vars = {}
# plugin.get_group_vars retrieves just vars for specific group
vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')]
for updated in vars_results:
if updated is not None:
vars = utils.combine_vars(vars, updated)
# Read group_vars/ files
vars = utils.combine_vars(vars, self.get_group_vars(group))
return vars
def get_variables(self, hostname, update_cached=False, vault_password=None):
host = self.get_host(hostname)
if not host:
raise errors.AnsibleError("host not found: %s" % hostname)
return host.get_variables()
def get_host_variables(self, hostname, update_cached=False, vault_password=None):
if hostname not in self._vars_per_host or update_cached:
self._vars_per_host[hostname] = self._get_host_variables(hostname, vault_password=vault_password)
return self._vars_per_host[hostname]
def _get_host_variables(self, hostname, vault_password=None):
host = self.get_host(hostname)
if host is None:
raise errors.AnsibleError("host not found: %s" % hostname)
vars = {}
# plugin.run retrieves all vars (also from groups) for host
vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')]
for updated in vars_results:
if updated is not None:
vars = utils.combine_vars(vars, updated)
# plugin.get_host_vars retrieves just vars for specific host
vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')]
for updated in vars_results:
if updated is not None:
vars = utils.combine_vars(vars, updated)
# still need to check InventoryParser per host vars
# which actually means InventoryScript per host,
# which is not performant
if self.parser is not None:
vars = utils.combine_vars(vars, self.parser.get_host_variables(host))
# Read host_vars/ files
vars = utils.combine_vars(vars, self.get_host_vars(host))
return vars
def add_group(self, group):
if group.name not in self.groups_list():
self.groups.append(group)
self._groups_list = None # invalidate internal cache
else:
raise errors.AnsibleError("group already in inventory: %s" % group.name)
def list_hosts(self, pattern="all"):
""" return a list of hostnames for a pattern """
result = [ h.name for h in self.get_hosts(pattern) ]
if len(result) == 0 and pattern in ["localhost", "127.0.0.1"]:
result = [pattern]
return result
def list_groups(self):
return sorted([ g.name for g in self.groups ], key=lambda x: x)
# TODO: remove this function
def get_restriction(self):
return self._restriction
def restrict_to(self, restriction):
"""
Restrict list operations to the hosts given in restriction. This is used
to exclude failed hosts in main playbook code, don't use this for other
reasons.
"""
if not isinstance(restriction, list):
restriction = [ restriction ]
self._restriction = restriction
def also_restrict_to(self, restriction):
"""
Works like restict_to but offers an additional restriction. Playbooks use this
to implement serial behavior.
"""
if not isinstance(restriction, list):
restriction = [ restriction ]
self._also_restriction = restriction
def subset(self, subset_pattern):
"""
Limits inventory results to a subset of inventory that matches a given
pattern, such as to select a given geographic of numeric slice amongst
a previous 'hosts' selection that only select roles, or vice versa.
Corresponds to --limit parameter to ansible-playbook
"""
if subset_pattern is None:
self._subset = None
else:
subset_pattern = subset_pattern.replace(',',':')
subset_pattern = subset_pattern.replace(";",":").split(":")
results = []
# allow Unix style @filename data
for x in subset_pattern:
if x.startswith("@"):
fd = open(x[1:])
results.extend(fd.read().split("\n"))
fd.close()
else:
results.append(x)
self._subset = results
def lift_restriction(self):
""" Do not restrict list operations """
self._restriction = None
def lift_also_restriction(self):
""" Clears the also restriction """
self._also_restriction = None
def is_file(self):
""" did inventory come from a file? """
if not isinstance(self.host_list, basestring):
return False
return os.path.exists(self.host_list)
def basedir(self):
""" if inventory came from a file, what's the directory? """
if not self.is_file():
return None
dname = os.path.dirname(self.host_list)
if dname is None or dname == '' or dname == '.':
cwd = os.getcwd()
return os.path.abspath(cwd)
return os.path.abspath(dname)
def src(self):
""" if inventory came from a file, what's the directory and file name? """
if not self.is_file():
return None
return self.host_list
def playbook_basedir(self):
""" returns the directory of the current playbook """
return self._playbook_basedir
def set_playbook_basedir(self, dir):
"""
sets the base directory of the playbook so inventory can use it as a
basedir for host_ and group_vars, and other things.
"""
# Only update things if dir is a different playbook basedir
if dir != self._playbook_basedir:
self._playbook_basedir = dir
# get group vars from group_vars/ files
for group in self.groups:
group.vars = utils.combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
# get host vars from host_vars/ files
for host in self.get_hosts():
host.vars = utils.combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True))
# invalidate cache
self._vars_per_host = {}
self._vars_per_group = {}
def get_host_vars(self, host, new_pb_basedir=False):
""" Read host_vars/ files """
return self._get_hostgroup_vars(host=host, group=None, new_pb_basedir=new_pb_basedir)
def get_group_vars(self, group, new_pb_basedir=False):
""" Read group_vars/ files """
return self._get_hostgroup_vars(host=None, group=group, new_pb_basedir=new_pb_basedir)
def _get_hostgroup_vars(self, host=None, group=None, new_pb_basedir=False):
"""
Loads variables from group_vars/<groupname> and host_vars/<hostname> in directories parallel
to the inventory base directory or in the same directory as the playbook. Variables in the playbook
dir will win over the inventory dir if files are in both.
"""
results = {}
scan_pass = 0
_basedir = self.basedir()
# look in both the inventory base directory and the playbook base directory
# unless we do an update for a new playbook base dir
if not new_pb_basedir:
basedirs = [_basedir, self._playbook_basedir]
else:
basedirs = [self._playbook_basedir]
for basedir in basedirs:
# this can happen from particular API usages, particularly if not run
# from /usr/bin/ansible-playbook
if basedir is None:
continue
scan_pass = scan_pass + 1
# it's not an eror if the directory does not exist, keep moving
if not os.path.exists(basedir):
continue
# save work of second scan if the directories are the same
if _basedir == self._playbook_basedir and scan_pass != 1:
continue
if group and host is None:
# load vars in dir/group_vars/name_of_group
base_path = os.path.join(basedir, "group_vars/%s" % group.name)
results = utils.load_vars(base_path, results, vault_password=self._vault_password)
elif host and group is None:
# same for hostvars in dir/host_vars/name_of_host
base_path = os.path.join(basedir, "host_vars/%s" % host.name)
results = utils.load_vars(base_path, results, vault_password=self._vault_password)
# all done, results is a dictionary of variables for this particular host.
return results
|
takis/odoo
|
refs/heads/8.0
|
addons/account_asset/__init__.py
|
446
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_asset
import account_asset_invoice
import wizard
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
indranil93/Aspect-Based-Sentiment-Analysis
|
refs/heads/master
|
catpol.py
|
1
|
import pickle
import a
import sys
from nltk.stem.wordnet import WordNetLemmatizer
lem=WordNetLemmatizer()
sentic=pickle.load(open('sentic_dump.p','rb')) #sentnic dictionary
sentence=pickle.load(open('sentence_dump.p','rb')) #parser output dictionary
sentword=pickle.load(open('sentiword_dump.p','rb')) #sentiwordnet dictionary
aspect=pickle.load(open('aspect_dump_new.p', 'rb')) #aspect_term extractor dictionary
adav=["JJ", "JJR", "JJS", "RB", "RBR", "RBS"]
polarit=pickle.load(open('polarities.p','rb'))
cat=pickle.load(open('new_file.p','rb'))
nnegative = ['not', 'Not', "n't"]
noun = ["NN", "NNS", "NNP", "NNPS"]
positive =1
negative = -1
neutral=0
#print polarit
polcat_dict={}
def extractor(words = {}, sid=0): #sid = sentence id, and words = aspect terms
#print sid
print words
inner={}
#print words
for j in words: # one by one aspect terms theeskuntam
lit={}
p='neutral'
# print j
print polarit[sid]
if lem.lemmatize(j) in polarit[sid].keys(): #j is the aspect term
p= polarit[sid][lem.lemmatize(j)]#,'adfaaafadfasdf'
if j in polarit[sid].keys():
p= polarit[sid][j]
# print p
for l in words[j]:
print l
lit[l]=p
#print lit[l]
inner[j]=lit
#print inner[j]
polcat_dict[sid]=inner#print polcat_dict[sid]
if __name__ == "__main__":
#words = {}
# words = {"word" : {"pos_tag" : "verb"},}
for sid in cat.keys(): #aspect dictionary lo key sentence id and value aa sentence lo unna aspect terms
#get words as dictionary #graph
#print sid
#subNoun = isNounSubject(sentence[sid])
extractor(cat[sid], sid) #one by one sentence id pampistam
#print polcat_dict
pickle.dump(polcat_dict,open('polcat.p','wb'))
|
abramhindle/UnnaturalCodeFork
|
refs/heads/master
|
python/testdata/launchpad/lib/lp/services/authserver/tests/test_authserver.py
|
1
|
# Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for the internal codehosting API."""
__metaclass__ = type
from zope.component import getUtility
from zope.publisher.xmlrpc import TestRequest
from lp.services.authserver.xmlrpc import AuthServerAPIView
from lp.testing import (
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import DatabaseFunctionalLayer
from lp.xmlrpc import faults
from lp.xmlrpc.interfaces import IPrivateApplication
class GetUserAndSSHKeysTests(TestCaseWithFactory):
"""Tests for the implementation of `IAuthServer.getUserAndSSHKeys`.
"""
layer = DatabaseFunctionalLayer
def setUp(self):
TestCaseWithFactory.setUp(self)
private_root = getUtility(IPrivateApplication)
self.authserver = AuthServerAPIView(
private_root.authserver, TestRequest())
def test_user_not_found(self):
# getUserAndSSHKeys returns the NoSuchPersonWithName fault if there is
# no Person of the given name.
self.assertEqual(
faults.NoSuchPersonWithName('no-one'),
self.authserver.getUserAndSSHKeys('no-one'))
def test_user_no_keys(self):
# getUserAndSSHKeys returns a dict with keys ['id', 'name', 'keys'].
# 'keys' refers to a list of SSH public keys in LP, which is empty for
# a freshly created user.
new_person = self.factory.makePerson()
self.assertEqual(
dict(id=new_person.id, name=new_person.name, keys=[]),
self.authserver.getUserAndSSHKeys(new_person.name))
def test_user_with_keys(self):
# For a user with registered SSH keys, getUserAndSSHKeys returns the
# name of the key type (RSA or DSA) and the text of the keys under
# 'keys' in the dict.
new_person = self.factory.makePerson()
with person_logged_in(new_person):
key = self.factory.makeSSHKey(person=new_person)
self.assertEqual(
dict(id=new_person.id, name=new_person.name,
keys=[(key.keytype.title, key.keytext)]),
self.authserver.getUserAndSSHKeys(new_person.name))
|
dukenmarga/civil-engineering-toolbox
|
refs/heads/master
|
controller/__init__.py
|
12133432
| |
seanwestfall/django
|
refs/heads/master
|
tests/managers_regress/__init__.py
|
12133432
| |
retomerz/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/conf/locale/sr/__init__.py
|
12133432
| |
codepantry/django
|
refs/heads/master
|
tests/gis_tests/geoapp/__init__.py
|
12133432
| |
gauravbose/digital-menu
|
refs/heads/master
|
tests/mail/__init__.py
|
12133432
| |
hackerbot/DjangoDev
|
refs/heads/master
|
django/conf/locale/mk/__init__.py
|
12133432
| |
rkokkelk/CouchPotatoServer
|
refs/heads/master
|
libs/suds/servicedefinition.py
|
201
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{service definition} provides a textual representation of a service.
"""
from logging import getLogger
from suds import *
import suds.metrics as metrics
from suds.sax import Namespace
log = getLogger(__name__)
class ServiceDefinition:
"""
A service definition provides an object used to generate a textual description
of a service.
@ivar wsdl: A wsdl.
@type wsdl: L{wsdl.Definitions}
@ivar service: The service object.
@type service: L{suds.wsdl.Service}
@ivar ports: A list of port-tuple: (port, [(method-name, pdef)])
@type ports: [port-tuple,..]
@ivar prefixes: A list of remapped prefixes.
@type prefixes: [(prefix,uri),..]
@ivar types: A list of type definitions
@type types: [I{Type},..]
"""
def __init__(self, wsdl, service):
"""
@param wsdl: A wsdl object
@type wsdl: L{Definitions}
@param service: A service B{name}.
@type service: str
"""
self.wsdl = wsdl
self.service = service
self.ports = []
self.params = []
self.types = []
self.prefixes = []
self.addports()
self.paramtypes()
self.publictypes()
self.getprefixes()
self.pushprefixes()
def pushprefixes(self):
"""
Add our prefixes to the wsdl so that when users invoke methods
and reference the prefixes, the will resolve properly.
"""
for ns in self.prefixes:
self.wsdl.root.addPrefix(ns[0], ns[1])
def addports(self):
"""
Look through the list of service ports and construct a list of tuples where
each tuple is used to describe a port and it's list of methods as:
(port, [method]). Each method is tuple: (name, [pdef,..] where each pdef is
a tuple: (param-name, type).
"""
timer = metrics.Timer()
timer.start()
for port in self.service.ports:
p = self.findport(port)
for op in port.binding.operations.values():
m = p[0].method(op.name)
binding = m.binding.input
method = (m.name, binding.param_defs(m))
p[1].append(method)
metrics.log.debug("method '%s' created: %s", m.name, timer)
p[1].sort()
timer.stop()
def findport(self, port):
"""
Find and return a port tuple for the specified port.
Created and added when not found.
@param port: A port.
@type port: I{service.Port}
@return: A port tuple.
@rtype: (port, [method])
"""
for p in self.ports:
if p[0] == p: return p
p = (port, [])
self.ports.append(p)
return p
def getprefixes(self):
"""
Add prefixes foreach namespace referenced by parameter types.
"""
namespaces = []
for l in (self.params, self.types):
for t,r in l:
ns = r.namespace()
if ns[1] is None: continue
if ns[1] in namespaces: continue
if Namespace.xs(ns) or Namespace.xsd(ns):
continue
namespaces.append(ns[1])
if t == r: continue
ns = t.namespace()
if ns[1] is None: continue
if ns[1] in namespaces: continue
namespaces.append(ns[1])
i = 0
namespaces.sort()
for u in namespaces:
p = self.nextprefix()
ns = (p, u)
self.prefixes.append(ns)
def paramtypes(self):
""" get all parameter types """
for m in [p[1] for p in self.ports]:
for p in [p[1] for p in m]:
for pd in p:
if pd[1] in self.params: continue
item = (pd[1], pd[1].resolve())
self.params.append(item)
def publictypes(self):
""" get all public types """
for t in self.wsdl.schema.types.values():
if t in self.params: continue
if t in self.types: continue
item = (t, t)
self.types.append(item)
tc = lambda x,y: cmp(x[0].name, y[0].name)
self.types.sort(cmp=tc)
def nextprefix(self):
"""
Get the next available prefix. This means a prefix starting with 'ns' with
a number appended as (ns0, ns1, ..) that is not already defined on the
wsdl document.
"""
used = [ns[0] for ns in self.prefixes]
used += [ns[0] for ns in self.wsdl.root.nsprefixes.items()]
for n in range(0,1024):
p = 'ns%d'%n
if p not in used:
return p
raise Exception('prefixes exhausted')
def getprefix(self, u):
"""
Get the prefix for the specified namespace (uri)
@param u: A namespace uri.
@type u: str
@return: The namspace.
@rtype: (prefix, uri).
"""
for ns in Namespace.all:
if u == ns[1]: return ns[0]
for ns in self.prefixes:
if u == ns[1]: return ns[0]
raise Exception('ns (%s) not mapped' % u)
def xlate(self, type):
"""
Get a (namespace) translated I{qualified} name for specified type.
@param type: A schema type.
@type type: I{suds.xsd.sxbasic.SchemaObject}
@return: A translated I{qualified} name.
@rtype: str
"""
resolved = type.resolve()
name = resolved.name
if type.unbounded():
name += '[]'
ns = resolved.namespace()
if ns[1] == self.wsdl.tns[1]:
return name
prefix = self.getprefix(ns[1])
return ':'.join((prefix, name))
def description(self):
"""
Get a textual description of the service for which this object represents.
@return: A textual description.
@rtype: str
"""
s = []
indent = (lambda n : '\n%*s'%(n*3,' '))
s.append('Service ( %s ) tns="%s"' % (self.service.name, self.wsdl.tns[1]))
s.append(indent(1))
s.append('Prefixes (%d)' % len(self.prefixes))
for p in self.prefixes:
s.append(indent(2))
s.append('%s = "%s"' % p)
s.append(indent(1))
s.append('Ports (%d):' % len(self.ports))
for p in self.ports:
s.append(indent(2))
s.append('(%s)' % p[0].name)
s.append(indent(3))
s.append('Methods (%d):' % len(p[1]))
for m in p[1]:
sig = []
s.append(indent(4))
sig.append(m[0])
sig.append('(')
for p in m[1]:
sig.append(self.xlate(p[1]))
sig.append(' ')
sig.append(p[0])
sig.append(', ')
sig.append(')')
try:
s.append(''.join(sig))
except:
pass
s.append(indent(3))
s.append('Types (%d):' % len(self.types))
for t in self.types:
s.append(indent(4))
s.append(self.xlate(t[0]))
s.append('\n\n')
return ''.join(s)
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
try:
return self.description()
except Exception, e:
log.exception(e)
return tostr(e)
|
tmimori/frappe
|
refs/heads/develop
|
frappe/patches/v4_1/enable_outgoing_email_settings.py
|
73
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("core", "doctype", "outgoing_email_settings")
if (frappe.db.get_value("Outgoing Email Settings", "Outgoing Email Settings", "mail_server") or "").strip():
frappe.db.set_value("Outgoing Email Settings", "Outgoing Email Settings", "enabled", 1)
|
upliftaero/MissionPlanner
|
refs/heads/master
|
Lib/encodings/utf_32.py
|
86
|
"""
Python 'utf-32' Codec
"""
import codecs, sys
### Codec APIs
encode = codecs.utf_32_encode
def decode(input, errors='strict'):
return codecs.utf_32_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.encoder = None
def encode(self, input, final=False):
if self.encoder is None:
result = codecs.utf_32_encode(input, self.errors)[0]
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
return self.encoder(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.encoder = None
def getstate(self):
# state info we return to the caller:
# 0: stream is in natural order for this platform
# 2: endianness hasn't been determined yet
# (we're never writing in unnatural order)
return (2 if self.encoder is None else 0)
def setstate(self, state):
if state:
self.encoder = None
else:
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors='strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.decoder = None
def _buffer_decode(self, input, errors, final):
if self.decoder is None:
(output, consumed, byteorder) = \
codecs.utf_32_ex_decode(input, errors, 0, final)
if byteorder == -1:
self.decoder = codecs.utf_32_le_decode
elif byteorder == 1:
self.decoder = codecs.utf_32_be_decode
elif consumed >= 4:
raise UnicodeError("UTF-32 stream does not start with BOM")
return (output, consumed)
return self.decoder(input, self.errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.decoder = None
def getstate(self):
# additonal state info from the base class must be None here,
# as it isn't passed along to the caller
state = codecs.BufferedIncrementalDecoder.getstate(self)[0]
# additional state info we pass to the caller:
# 0: stream is in natural order for this platform
# 1: stream is in unnatural order
# 2: endianness hasn't been determined yet
if self.decoder is None:
return (state, 2)
addstate = int((sys.byteorder == "big") !=
(self.decoder is codecs.utf_32_be_decode))
return (state, addstate)
def setstate(self, state):
# state[1] will be ignored by BufferedIncrementalDecoder.setstate()
codecs.BufferedIncrementalDecoder.setstate(self, state)
state = state[1]
if state == 0:
self.decoder = (codecs.utf_32_be_decode
if sys.byteorder == "big"
else codecs.utf_32_le_decode)
elif state == 1:
self.decoder = (codecs.utf_32_le_decode
if sys.byteorder == "big"
else codecs.utf_32_be_decode)
else:
self.decoder = None
class StreamWriter(codecs.StreamWriter):
def __init__(self, stream, errors='strict'):
self.encoder = None
codecs.StreamWriter.__init__(self, stream, errors)
def reset(self):
codecs.StreamWriter.reset(self)
self.encoder = None
def encode(self, input, errors='strict'):
if self.encoder is None:
result = codecs.utf_32_encode(input, errors)
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
else:
return self.encoder(input, errors)
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors='strict'):
(object, consumed, byteorder) = \
codecs.utf_32_ex_decode(input, errors, 0, False)
if byteorder == -1:
self.decode = codecs.utf_32_le_decode
elif byteorder == 1:
self.decode = codecs.utf_32_be_decode
elif consumed>=4:
raise UnicodeError,"UTF-32 stream does not start with BOM"
return (object, consumed)
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-32',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
Glasgow2015/team-10
|
refs/heads/master
|
env/lib/python2.7/site-packages/django/forms/extras/__init__.py
|
197
|
from django.forms.extras.widgets import SelectDateWidget
__all__ = ['SelectDateWidget']
|
HoracioAlvarado/fwd
|
refs/heads/master
|
venv/Lib/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py
|
20
|
# postgresql/psycopg2.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: postgresql+psycopg2
:name: psycopg2
:dbapi: psycopg2
:connectstring: postgresql+psycopg2://user:password@host:port/dbname\
[?key=value&key=value...]
:url: http://pypi.python.org/pypi/psycopg2/
psycopg2 Connect Arguments
-----------------------------------
psycopg2-specific keyword arguments which are accepted by
:func:`.create_engine()` are:
* ``server_side_cursors``: Enable the usage of "server side cursors" for SQL
statements which support this feature. What this essentially means from a
psycopg2 point of view is that the cursor is created using a name, e.g.
``connection.cursor('some name')``, which has the effect that result rows
are not immediately pre-fetched and buffered after statement execution, but
are instead left on the server and only retrieved as needed. SQLAlchemy's
:class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering
behavior when this feature is enabled, such that groups of 100 rows at a
time are fetched over the wire to reduce conversational overhead.
Note that the ``stream_results=True`` execution option is a more targeted
way of enabling this mode on a per-execution basis.
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
per connection. True by default.
.. seealso::
:ref:`psycopg2_disable_native_unicode`
* ``isolation_level``: This option, available for all PostgreSQL dialects,
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
dialect.
.. seealso::
:ref:`psycopg2_isolation_level`
* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
using psycopg2's ``set_client_encoding()`` method.
.. seealso::
:ref:`psycopg2_unicode`
Unix Domain Connections
------------------------
psycopg2 supports connecting via Unix domain connections. When the ``host``
portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2,
which specifies Unix-domain communication rather than TCP/IP communication::
create_engine("postgresql+psycopg2://user:password@/dbname")
By default, the socket file used is to connect to a Unix-domain socket
in ``/tmp``, or whatever socket directory was specified when PostgreSQL
was built. This value can be overridden by passing a pathname to psycopg2,
using ``host`` as an additional keyword argument::
create_engine("postgresql+psycopg2://user:password@/dbname?\
host=/var/lib/postgresql")
See also:
`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/\
libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
.. _psycopg2_execution_options:
Per-Statement/Connection Execution Options
-------------------------------------------
The following DBAPI-specific options are respected when used with
:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
* ``isolation_level`` - Set the transaction isolation level for the lifespan of a
:class:`.Connection` (can only be set on a connection, not a statement
or query). See :ref:`psycopg2_isolation_level`.
* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors -
this feature makes use of "named" cursors in combination with special
result handling methods so that result rows are not fully buffered.
If ``None`` or not set, the ``server_side_cursors`` option of the
:class:`.Engine` is used.
* ``max_row_buffer`` - when using ``stream_results``, an integer value that
specifies the maximum number of rows to buffer at a time. This is
interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
buffer will grow to ultimately store 1000 rows at a time.
.. versionadded:: 1.0.6
.. _psycopg2_unicode:
Unicode with Psycopg2
----------------------
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
extension, such that the DBAPI receives and returns all strings as Python
Unicode objects directly - SQLAlchemy passes these values through without
change. Psycopg2 here will encode/decode string values based on the
current "client encoding" setting; by default this is the value in
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
Typically, this can be changed to ``utf8``, as a more useful default::
# postgresql.conf file
# client_encoding = sql_ascii # actually, defaults to database
# encoding
client_encoding = utf8
A second way to affect the client encoding is to set it within Psycopg2
locally. SQLAlchemy will call psycopg2's
:meth:`psycopg2:connection.set_client_encoding` method
on all new connections based on the value passed to
:func:`.create_engine` using the ``client_encoding`` parameter::
# set_client_encoding() setting;
# works for *all* Postgresql versions
engine = create_engine("postgresql://user:pass@host/dbname",
client_encoding='utf8')
This overrides the encoding specified in the Postgresql client configuration.
When using the parameter in this way, the psycopg2 driver emits
``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
in all Postgresql versions.
Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
is **not the same** as the more recently added ``client_encoding`` parameter
now supported by libpq directly. This is enabled when ``client_encoding``
is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
using the :paramref:`.create_engine.connect_args` parameter::
# libpq direct parameter setting;
# only works for Postgresql **9.1 and above**
engine = create_engine("postgresql://user:pass@host/dbname",
connect_args={'client_encoding': 'utf8'})
# using the query string is equivalent
engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
The above parameter was only added to libpq as of version 9.1 of Postgresql,
so using the previous method is better for cross-version support.
.. _psycopg2_disable_native_unicode:
Disabling Native Unicode
^^^^^^^^^^^^^^^^^^^^^^^^
SQLAlchemy can also be instructed to skip the usage of the psycopg2
``UNICODE`` extension and to instead utilize its own unicode encode/decode
services, which are normally reserved only for those DBAPIs that don't
fully support unicode directly. Passing ``use_native_unicode=False`` to
:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
SQLAlchemy will instead encode data itself into Python bytestrings on the way
in and coerce from bytes on the way back,
using the value of the :func:`.create_engine` ``encoding`` parameter, which
defaults to ``utf-8``.
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
obsolete as most DBAPIs now support unicode fully.
Bound Parameter Styles
----------------------
The default parameter style for the psycopg2 dialect is "pyformat", where
SQL is rendered using ``%(paramname)s`` style. This format has the limitation
that it does not accommodate the unusual case of parameter names that
actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
generates bound parameter names based on the name of a column, the presence
of these characters in a column name can lead to problems.
There are two solutions to the issue of a :class:`.schema.Column` that contains
one of these characters in its name. One is to specify the
:paramref:`.schema.Column.key` for columns that have such names::
measurement = Table('measurement', metadata,
Column('Size (meters)', Integer, key='size_meters')
)
Above, an INSERT statement such as ``measurement.insert()`` will use
``size_meters`` as the parameter name, and a SQL expression such as
``measurement.c.size_meters > 10`` will derive the bound parameter name
from the ``size_meters`` key as well.
.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
as the source of naming when anonymous bound parameters are created
in SQL expressions; previously, this behavior only applied to
:meth:`.Table.insert` and :meth:`.Table.update` parameter names.
The other solution is to use a positional format; psycopg2 allows use of the
"format" paramstyle, which can be passed to
:paramref:`.create_engine.paramstyle`::
engine = create_engine(
'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
With the above engine, instead of a statement like::
INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
{'Size (meters)': 1}
we instead see::
INSERT INTO measurement ("Size (meters)") VALUES (%s)
(1, )
Where above, the dictionary style is converted into a tuple with positional
style.
Transactions
------------
The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
.. _psycopg2_isolation_level:
Psycopg2 Transaction Isolation Level
-------------------------------------
As discussed in :ref:`postgresql_isolation_level`,
all Postgresql dialects support setting of transaction isolation level
both via the ``isolation_level`` parameter passed to :func:`.create_engine`,
as well as the ``isolation_level`` argument used by
:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these
options make use of psycopg2's ``set_isolation_level()`` connection method,
rather than emitting a Postgresql directive; this is because psycopg2's
API-level setting is always emitted at the start of each transaction in any
case.
The psycopg2 dialect supports these constants for isolation level:
* ``READ COMMITTED``
* ``READ UNCOMMITTED``
* ``REPEATABLE READ``
* ``SERIALIZABLE``
* ``AUTOCOMMIT``
.. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using
psycopg2.
.. seealso::
:ref:`postgresql_isolation_level`
:ref:`pg8000_isolation_level`
NOTICE logging
---------------
The psycopg2 dialect will log Postgresql NOTICE messages via the
``sqlalchemy.dialects.postgresql`` logger::
import logging
logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
.. _psycopg2_hstore::
HSTORE type
------------
The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
by default when psycopg2 version 2.4 or greater is used, and
it is detected that the target database has the HSTORE type set up for use.
In other words, when the dialect makes the first
connection, a sequence like the following is performed:
1. Request the available HSTORE oids using
``psycopg2.extras.HstoreAdapter.get_oids()``.
If this function returns a list of HSTORE identifiers, we then determine
that the ``HSTORE`` extension is present.
This function is **skipped** if the version of psycopg2 installed is
less than version 2.4.
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
we've detected that ``HSTORE`` oids are available, the
``psycopg2.extensions.register_hstore()`` extension is invoked for all
connections.
The ``register_hstore()`` extension has the effect of **all Python
dictionaries being accepted as parameters regardless of the type of target
column in SQL**. The dictionaries are converted by this extension into a
textual HSTORE expression. If this behavior is not desired, disable the
use of the hstore extension by setting ``use_native_hstore`` to ``False`` as
follows::
engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test",
use_native_hstore=False)
The ``HSTORE`` type is **still supported** when the
``psycopg2.extensions.register_hstore()`` extension is not used. It merely
means that the coercion between Python dictionaries and the HSTORE
string format, on both the parameter side and the result side, will take
place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2``
which may be more performant.
"""
from __future__ import absolute_import
import re
import logging
from ... import util, exc
import decimal
from ... import processors
from ...engine import result as _result
from ...sql import expression
from ... import types as sqltypes
from .base import PGDialect, PGCompiler, \
PGIdentifierPreparer, PGExecutionContext, \
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
_INT_TYPES, UUID
from .hstore import HSTORE
from .json import JSON, JSONB
try:
from uuid import UUID as _python_UUID
except ImportError:
_python_UUID = None
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
class _PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
return None
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(
decimal.Decimal,
self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
raise exc.InvalidRequestError(
"Unknown PG numeric type: %d" % coltype)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
return None
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
return processors.to_float
else:
raise exc.InvalidRequestError(
"Unknown PG numeric type: %d" % coltype)
class _PGEnum(ENUM):
def result_processor(self, dialect, coltype):
if self.native_enum and util.py2k and self.convert_unicode is True:
# we can't easily use PG's extensions here because
# the OID is on the fly, and we need to give it a python
# function anyway - not really worth it.
self.convert_unicode = "force_nocheck"
return super(_PGEnum, self).result_processor(dialect, coltype)
class _PGHStore(HSTORE):
def bind_processor(self, dialect):
if dialect._has_native_hstore:
return None
else:
return super(_PGHStore, self).bind_processor(dialect)
def result_processor(self, dialect, coltype):
if dialect._has_native_hstore:
return None
else:
return super(_PGHStore, self).result_processor(dialect, coltype)
class _PGJSON(JSON):
def result_processor(self, dialect, coltype):
if dialect._has_native_json:
return None
else:
return super(_PGJSON, self).result_processor(dialect, coltype)
class _PGJSONB(JSONB):
def result_processor(self, dialect, coltype):
if dialect._has_native_jsonb:
return None
else:
return super(_PGJSONB, self).result_processor(dialect, coltype)
class _PGUUID(UUID):
def bind_processor(self, dialect):
if not self.as_uuid and dialect.use_native_uuid:
nonetype = type(None)
def process(value):
if value is not None:
value = _python_UUID(value)
return value
return process
def result_processor(self, dialect, coltype):
if not self.as_uuid and dialect.use_native_uuid:
def process(value):
if value is not None:
value = str(value)
return value
return process
# When we're handed literal SQL, ensure it's a SELECT query. Since
# 8.3, combining cursors and "FOR UPDATE" has been fine.
SERVER_SIDE_CURSOR_RE = re.compile(
r'\s*SELECT',
re.I | re.UNICODE)
_server_side_id = util.counter()
class PGExecutionContext_psycopg2(PGExecutionContext):
def create_cursor(self):
# TODO: coverage for server side cursors + select.for_update()
if self.dialect.server_side_cursors:
is_server_side = \
self.execution_options.get('stream_results', True) and (
(self.compiled and isinstance(self.compiled.statement,
expression.Selectable)
or
(
(not self.compiled or
isinstance(self.compiled.statement,
expression.TextClause))
and self.statement and SERVER_SIDE_CURSOR_RE.match(
self.statement))
)
)
else:
is_server_side = \
self.execution_options.get('stream_results', False)
self.__is_server_side = is_server_side
if is_server_side:
# use server-side cursors:
# http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
ident = "c_%s_%s" % (hex(id(self))[2:],
hex(_server_side_id())[2:])
return self._dbapi_connection.cursor(ident)
else:
return self._dbapi_connection.cursor()
def get_result_proxy(self):
# TODO: ouch
if logger.isEnabledFor(logging.INFO):
self._log_notices(self.cursor)
if self.__is_server_side:
return _result.BufferedRowResultProxy(self)
else:
return _result.ResultProxy(self)
def _log_notices(self, cursor):
for notice in cursor.connection.notices:
# NOTICE messages have a
# newline character at the end
logger.info(notice.rstrip())
cursor.connection.notices[:] = []
class PGCompiler_psycopg2(PGCompiler):
def visit_mod_binary(self, binary, operator, **kw):
return self.process(binary.left, **kw) + " %% " + \
self.process(binary.right, **kw)
def post_process_text(self, text):
return text.replace('%', '%%')
class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value.replace('%', '%%')
class PGDialect_psycopg2(PGDialect):
driver = 'psycopg2'
if util.py2k:
supports_unicode_statements = False
default_paramstyle = 'pyformat'
# set to true based on psycopg2 version
supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_psycopg2
statement_compiler = PGCompiler_psycopg2
preparer = PGIdentifierPreparer_psycopg2
psycopg2_version = (0, 0)
FEATURE_VERSION_MAP = dict(
native_json=(2, 5),
native_jsonb=(2, 5, 4),
sane_multi_rowcount=(2, 0, 9),
array_oid=(2, 4, 3),
hstore_adapter=(2, 4)
)
_has_native_hstore = False
_has_native_json = False
_has_native_jsonb = False
engine_config_types = PGDialect.engine_config_types.union([
('use_native_unicode', util.asbool),
])
colspecs = util.update_copy(
PGDialect.colspecs,
{
sqltypes.Numeric: _PGNumeric,
ENUM: _PGEnum, # needs force_unicode
sqltypes.Enum: _PGEnum, # needs force_unicode
HSTORE: _PGHStore,
JSON: _PGJSON,
JSONB: _PGJSONB,
UUID: _PGUUID
}
)
def __init__(self, server_side_cursors=False, use_native_unicode=True,
client_encoding=None,
use_native_hstore=True, use_native_uuid=True,
**kwargs):
PGDialect.__init__(self, **kwargs)
self.server_side_cursors = server_side_cursors
self.use_native_unicode = use_native_unicode
self.use_native_hstore = use_native_hstore
self.use_native_uuid = use_native_uuid
self.supports_unicode_binds = use_native_unicode
self.client_encoding = client_encoding
if self.dbapi and hasattr(self.dbapi, '__version__'):
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
self.dbapi.__version__)
if m:
self.psycopg2_version = tuple(
int(x)
for x in m.group(1, 2, 3)
if x is not None)
def initialize(self, connection):
super(PGDialect_psycopg2, self).initialize(connection)
self._has_native_hstore = self.use_native_hstore and \
self._hstore_oids(connection.connection) \
is not None
self._has_native_json = \
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json']
self._has_native_jsonb = \
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb']
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
self.supports_sane_multi_rowcount = \
self.psycopg2_version >= \
self.FEATURE_VERSION_MAP['sane_multi_rowcount']
@classmethod
def dbapi(cls):
import psycopg2
return psycopg2
@classmethod
def _psycopg2_extensions(cls):
from psycopg2 import extensions
return extensions
@classmethod
def _psycopg2_extras(cls):
from psycopg2 import extras
return extras
@util.memoized_property
def _isolation_lookup(self):
extensions = self._psycopg2_extensions()
return {
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
'READ UNCOMMITTED': extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
'REPEATABLE READ': extensions.ISOLATION_LEVEL_REPEATABLE_READ,
'SERIALIZABLE': extensions.ISOLATION_LEVEL_SERIALIZABLE
}
def set_isolation_level(self, connection, level):
try:
level = self._isolation_lookup[level.replace('_', ' ')]
except KeyError:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
)
connection.set_isolation_level(level)
def on_connect(self):
extras = self._psycopg2_extras()
extensions = self._psycopg2_extensions()
fns = []
if self.client_encoding is not None:
def on_connect(conn):
conn.set_client_encoding(self.client_encoding)
fns.append(on_connect)
if self.isolation_level is not None:
def on_connect(conn):
self.set_isolation_level(conn, self.isolation_level)
fns.append(on_connect)
if self.dbapi and self.use_native_uuid:
def on_connect(conn):
extras.register_uuid(None, conn)
fns.append(on_connect)
if self.dbapi and self.use_native_unicode:
def on_connect(conn):
extensions.register_type(extensions.UNICODE, conn)
extensions.register_type(extensions.UNICODEARRAY, conn)
fns.append(on_connect)
if self.dbapi and self.use_native_hstore:
def on_connect(conn):
hstore_oids = self._hstore_oids(conn)
if hstore_oids is not None:
oid, array_oid = hstore_oids
kw = {'oid': oid}
if util.py2k:
kw['unicode'] = True
if self.psycopg2_version >= \
self.FEATURE_VERSION_MAP['array_oid']:
kw['array_oid'] = array_oid
extras.register_hstore(conn, **kw)
fns.append(on_connect)
if self.dbapi and self._json_deserializer:
def on_connect(conn):
if self._has_native_json:
extras.register_default_json(
conn, loads=self._json_deserializer)
if self._has_native_jsonb:
extras.register_default_jsonb(
conn, loads=self._json_deserializer)
fns.append(on_connect)
if fns:
def on_connect(conn):
for fn in fns:
fn(conn)
return on_connect
else:
return None
@util.memoized_instancemethod
def _hstore_oids(self, conn):
if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']:
extras = self._psycopg2_extras()
oids = extras.HstoreAdapter.get_oids(conn)
if oids is not None and oids[0]:
return oids[0:2]
return None
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if 'port' in opts:
opts['port'] = int(opts['port'])
opts.update(url.query)
return ([], opts)
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.Error):
# check the "closed" flag. this might not be
# present on old psycopg2 versions. Also,
# this flag doesn't actually help in a lot of disconnect
# situations, so don't rely on it.
if getattr(connection, 'closed', False):
return True
# checks based on strings. in the case that .closed
# didn't cut it, fall back onto these.
str_e = str(e).partition("\n")[0]
for msg in [
# these error messages from libpq: interfaces/libpq/fe-misc.c
# and interfaces/libpq/fe-secure.c.
'terminating connection',
'closed the connection',
'connection not open',
'could not receive data from server',
'could not send data to server',
# psycopg2 client errors, psycopg2/conenction.h,
# psycopg2/cursor.h
'connection already closed',
'cursor already closed',
# not sure where this path is originally from, it may
# be obsolete. It really says "losed", not "closed".
'losed the connection unexpectedly',
# these can occur in newer SSL
'connection has been closed unexpectedly',
'SSL SYSCALL error: Bad file descriptor',
'SSL SYSCALL error: EOF detected',
]:
idx = str_e.find(msg)
if idx >= 0 and '"' not in str_e[:idx]:
return True
return False
dialect = PGDialect_psycopg2
|
yoer/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/admin_docs/urls.py
|
53
|
# coding: utf-8
from __future__ import absolute_import
from django.conf.urls import patterns
from . import views
urlpatterns = patterns('',
(r'^xview/func/$', views.xview_dec(views.xview)),
(r'^xview/class/$', views.xview_dec(views.XViewClass.as_view())),
)
|
marios-zindilis/musicbrainz-django-models
|
refs/heads/master
|
musicbrainz_django_models/tests/test_release_group_annotation.py
|
10
|
# Tests for this model are in musicbrainz_django_server/tests/test_model_annotation.py
|
thedrow/django
|
refs/heads/master
|
django/contrib/gis/db/backends/postgis/pgraster.py
|
491
|
import binascii
import struct
from django.forms import ValidationError
from .const import (
GDAL_TO_POSTGIS, GDAL_TO_STRUCT, POSTGIS_HEADER_STRUCTURE, POSTGIS_TO_GDAL,
STRUCT_SIZE,
)
def pack(structure, data):
"""
Pack data into hex string with little endian format.
"""
return binascii.hexlify(struct.pack('<' + structure, *data)).upper()
def unpack(structure, data):
"""
Unpack little endian hexlified binary string into a list.
"""
return struct.unpack('<' + structure, binascii.unhexlify(data))
def chunk(data, index):
"""
Split a string into two parts at the input index.
"""
return data[:index], data[index:]
def get_pgraster_srid(data):
"""
Extract the SRID from a PostGIS raster string.
"""
if data is None:
return
# The positional arguments here extract the hex-encoded srid from the
# header of the PostGIS raster string. This can be understood through
# the POSTGIS_HEADER_STRUCTURE constant definition in the const module.
return unpack('i', data[106:114])[0]
def from_pgraster(data):
"""
Convert a PostGIS HEX String into a dictionary.
"""
if data is None:
return
# Split raster header from data
header, data = chunk(data, 122)
header = unpack(POSTGIS_HEADER_STRUCTURE, header)
# Parse band data
bands = []
pixeltypes = []
while data:
# Get pixel type for this band
pixeltype, data = chunk(data, 2)
pixeltype = unpack('B', pixeltype)[0]
# Subtract nodata byte from band nodata value if it exists
has_nodata = pixeltype >= 64
if has_nodata:
pixeltype -= 64
# Convert datatype from PostGIS to GDAL & get pack type and size
pixeltype = POSTGIS_TO_GDAL[pixeltype]
pack_type = GDAL_TO_STRUCT[pixeltype]
pack_size = 2 * STRUCT_SIZE[pack_type]
# Parse band nodata value. The nodata value is part of the
# PGRaster string even if the nodata flag is True, so it always
# has to be chunked off the data string.
nodata, data = chunk(data, pack_size)
nodata = unpack(pack_type, nodata)[0]
# Chunk and unpack band data (pack size times nr of pixels)
band, data = chunk(data, pack_size * header[10] * header[11])
band_result = {'data': binascii.unhexlify(band)}
# If the nodata flag is True, set the nodata value.
if has_nodata:
band_result['nodata_value'] = nodata
# Append band data to band list
bands.append(band_result)
# Store pixeltype of this band in pixeltypes array
pixeltypes.append(pixeltype)
# Check that all bands have the same pixeltype.
# This is required by GDAL. PostGIS rasters could have different pixeltypes
# for bands of the same raster.
if len(set(pixeltypes)) != 1:
raise ValidationError("Band pixeltypes are not all equal.")
return {
'srid': int(header[9]),
'width': header[10], 'height': header[11],
'datatype': pixeltypes[0],
'origin': (header[5], header[6]),
'scale': (header[3], header[4]),
'skew': (header[7], header[8]),
'bands': bands,
}
def to_pgraster(rast):
"""
Convert a GDALRaster into PostGIS Raster format.
"""
# Return if the raster is null
if rast is None or rast == '':
return
# Prepare the raster header data as a tuple. The first two numbers are
# the endianness and the PostGIS Raster Version, both are fixed by
# PostGIS at the moment.
rasterheader = (
1, 0, len(rast.bands), rast.scale.x, rast.scale.y,
rast.origin.x, rast.origin.y, rast.skew.x, rast.skew.y,
rast.srs.srid, rast.width, rast.height,
)
# Hexlify raster header
result = pack(POSTGIS_HEADER_STRUCTURE, rasterheader)
for band in rast.bands:
# The PostGIS raster band header has exactly two elements, a 8BUI byte
# and the nodata value.
#
# The 8BUI stores both the PostGIS pixel data type and a nodata flag.
# It is composed as the datatype integer plus 64 as a flag for existing
# nodata values:
# 8BUI_VALUE = PG_PIXEL_TYPE (0-11) + FLAG (0 or 64)
#
# For example, if the byte value is 71, then the datatype is
# 71-64 = 7 (32BSI) and the nodata value is True.
structure = 'B' + GDAL_TO_STRUCT[band.datatype()]
# Get band pixel type in PostGIS notation
pixeltype = GDAL_TO_POSTGIS[band.datatype()]
# Set the nodata flag
if band.nodata_value is not None:
pixeltype += 64
# Pack band header
bandheader = pack(structure, (pixeltype, band.nodata_value or 0))
# Hexlify band data
band_data_hex = binascii.hexlify(band.data(as_memoryview=True)).upper()
# Add packed header and band data to result
result += bandheader + band_data_hex
# Cast raster to string before passing it to the DB
return result.decode()
|
scottsievert/swix
|
refs/heads/master
|
python_testing/testing.py
|
6
|
from __future__ import division
from pylab import *
from sklearn import datasets
from sklearn import svm
#import cv2
def svmTest():
def sk_learn():
data = datasets.load_digits()
N_test = int(1050)
x_train = data['data'][:-N_test]
y_train = data['target'][:-N_test]
x_test = data['data'][-N_test:]
y_test = data['target'][-N_test:]
np.savetxt("csvs/x_train.csv", x_train, delimiter=",")#@asdjkk
np.savetxt("csvs/y_train.csv", y_train, delimiter=",", newline=",")
np.savetxt("csvs/x_test.csv", x_test, delimiter=",")
np.savetxt("csvs/y_test.csv", y_test, delimiter=",", newline=",")
ml = svm.LinearSVC()
ml = ml.fit(x_train, y_train)
yhat_test = ml.predict(x_test)
print argwhere(abs(yhat_test - y_test) < 0.5).shape[0] / y_test.shape[0]
data = datasets.load_digits()
N_test = int(1050)
x_train = data['data'][:-N_test]
y_train = data['target'][:-N_test]
x_test = data['data'][-N_test:]
y_test = data['target'][-N_test:]
x_train = asarray(x_train, dtype=float32)
y_train = asarray(y_train, dtype=float32)
x_test = asarray(x_test, dtype=float32)
params = dict(kernel_type=cv2.SVM_SIGMOID, svm_type=cv2.SVM_C_SVC)
cv_svm = cv2.SVM()
cv_svm.train(x_train, y_train, params=params)
yhat = cv_svm.predict_all(x_test)
print "Percent correct:", argwhere(abs(yhat.flat[:] - y_test) < 0.5).shape[0] / yhat.shape[0]
def pinvTest():
M = 3
N = 4
x = arange(M*N).reshape(M,N)
y = pinv(x)
def kronTest():
A = array([1, 2, 3, 4, 5, 6]).reshape(2, 3)
B = array([3, 2, 5, 0, 1, 2]).reshape(3,2)
print kron(A, B)
#def convolveTest():
from scipy.signal import fftconvolve
x = arange(10)
k = ones(4) / 4
y = fftconvolve(x, k, mode='same')
print np.around(y, decimals=3)
|
adlius/osf.io
|
refs/heads/develop
|
website/identifiers/clients/datacite.py
|
6
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import re
import datetime
from website.identifiers.clients.base import AbstractIdentifierClient
from website import settings
from datacite import DataCiteMDSClient, schema40
logger = logging.getLogger(__name__)
class DataCiteClient(AbstractIdentifierClient):
def __init__(self, base_url, prefix, client=None):
self.base_url = base_url
self.prefix = prefix
self._client = client or DataCiteMDSClient(
url=self.base_url,
username=settings.DATACITE_USERNAME,
password=settings.DATACITE_PASSWORD,
prefix=self.prefix
)
def build_metadata(self, node):
"""Return the formatted datacite metadata XML as a string.
"""
data = {
'identifier': {
'identifier': self.build_doi(node),
'identifierType': 'DOI',
},
'creators': [
{'creatorName': user.fullname,
'givenName': user.given_name,
'familyName': user.family_name} for user in node.visible_contributors
],
'titles': [
{'title': node.title}
],
'publisher': 'Open Science Framework',
'publicationYear': str(datetime.datetime.now().year),
'resourceType': {
'resourceType': 'Project',
'resourceTypeGeneral': 'Text'
}
}
if node.description:
data['descriptions'] = [{
'descriptionType': 'Abstract',
'description': node.description
}]
if node.node_license:
data['rightsList'] = [{
'rights': node.node_license.name,
'rightsURI': node.node_license.url
}]
# Validate dictionary
assert schema40.validate(data)
# Generate DataCite XML from dictionary.
return schema40.tostring(data)
def build_doi(self, object):
return settings.DOI_FORMAT.format(prefix=self.prefix, guid=object._id)
def get_identifier(self, identifier):
self._client.doi_get(identifier)
def create_identifier(self, node, category):
if category == 'doi':
metadata = self.build_metadata(node)
resp = self._client.metadata_post(metadata)
# Typical response: 'OK (10.70102/FK2osf.io/cq695)' to doi 10.70102/FK2osf.io/cq695
doi = re.match(r'OK \((?P<doi>[a-zA-Z0-9 .\/]{0,})\)', resp).groupdict()['doi']
if settings.DATACITE_MINT_DOIS:
self._client.doi_post(doi, node.absolute_url)
return {'doi': doi}
else:
raise NotImplementedError('Creating an identifier with category {} is not supported'.format(category))
def update_identifier(self, node, category):
if not node.is_public or node.is_deleted:
if category == 'doi':
doi = self.build_doi(node)
self._client.metadata_delete(doi)
return {'doi': doi}
else:
raise NotImplementedError('Updating metadata not supported for {}'.format(category))
else:
return self.create_identifier(node, category)
|
dimara/synnefo
|
refs/heads/develop
|
snf-cyclades-app/synnefo/db/migrations/0093_auto__add_field_ipaddress_ipversion.py
|
10
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'IPAddress.ipversion'
db.add_column('db_ipaddress', 'ipversion',
self.gf('django.db.models.fields.IntegerField')(null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'IPAddress.ipversion'
db.delete_column('db_ipaddress', 'ipversion')
models = {
'db.backend': {
'Meta': {'ordering': "['clustername']", 'object_name': 'Backend'},
'clustername': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'ctotal': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'dfree': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'disk_templates': ('synnefo.db.fields.SeparatedValuesField', [], {'null': 'True'}),
'drained': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'dtotal': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'hypervisor': ('django.db.models.fields.CharField', [], {'default': "'kvm'", 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'unique': 'True'}),
'mfree': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'mtotal': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'offline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password_hash': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'pinst_cnt': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'port': ('django.db.models.fields.PositiveIntegerField', [], {'default': '5080'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'db.backendnetwork': {
'Meta': {'unique_together': "(('network', 'backend'),)", 'object_name': 'BackendNetwork'},
'backend': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'networks'", 'on_delete': 'models.PROTECT', 'to': "orm['db.Backend']"}),
'backendjobid': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'backendjobstatus': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'backendlogmsg': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'backendopcode': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'backendtime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1, 1, 1, 0, 0)'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mac_prefix': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'backend_networks'", 'on_delete': 'models.PROTECT', 'to': "orm['db.Network']"}),
'operstate': ('django.db.models.fields.CharField', [], {'default': "'PENDING'", 'max_length': '30'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'db.bridgepooltable': {
'Meta': {'object_name': 'BridgePoolTable'},
'available_map': ('django.db.models.fields.TextField', [], {'default': "''"}),
'base': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offset': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'reserved_map': ('django.db.models.fields.TextField', [], {'default': "''"}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'db.flavor': {
'Meta': {'unique_together': "(('cpu', 'ram', 'disk', 'disk_template'),)", 'object_name': 'Flavor'},
'cpu': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'disk': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'disk_template': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ram': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'db.ipaddress': {
'Meta': {'unique_together': "(('network', 'address', 'deleted'),)", 'object_name': 'IPAddress'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'floating_ip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ipversion': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ips'", 'on_delete': 'models.PROTECT', 'to': "orm['db.Network']"}),
'nic': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ips'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['db.NetworkInterface']"}),
'serial': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ips'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['db.QuotaHolderSerial']"}),
'subnet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ips'", 'on_delete': 'models.PROTECT', 'to': "orm['db.Subnet']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'db.ipaddresslog': {
'Meta': {'object_name': 'IPAddressLog'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'allocated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'network_id': ('django.db.models.fields.IntegerField', [], {}),
'released_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'server_id': ('django.db.models.fields.IntegerField', [], {})
},
'db.ippooltable': {
'Meta': {'object_name': 'IPPoolTable'},
'available_map': ('django.db.models.fields.TextField', [], {'default': "''"}),
'base': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offset': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'reserved_map': ('django.db.models.fields.TextField', [], {'default': "''"}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'subnet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ip_pools'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['db.Subnet']"})
},
'db.macprefixpooltable': {
'Meta': {'object_name': 'MacPrefixPoolTable'},
'available_map': ('django.db.models.fields.TextField', [], {'default': "''"}),
'base': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offset': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'reserved_map': ('django.db.models.fields.TextField', [], {'default': "''"}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'db.network': {
'Meta': {'object_name': 'Network'},
'action': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '32', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'drained': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'external_router': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flavor': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'floating_ip_pool': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'mac_prefix': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'machines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['db.VirtualMachine']", 'through': "orm['db.NetworkInterface']", 'symmetrical': 'False'}),
'mode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'serial': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'network'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['db.QuotaHolderSerial']"}),
'state': ('django.db.models.fields.CharField', [], {'default': "'PENDING'", 'max_length': '32'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'})
},
'db.networkinterface': {
'Meta': {'object_name': 'NetworkInterface'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'device_owner': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'firewall_profile': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'mac': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'machine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'nics'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['db.VirtualMachine']"}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'nics'", 'on_delete': 'models.PROTECT', 'to': "orm['db.Network']"}),
'security_groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['db.SecurityGroup']", 'null': 'True', 'symmetrical': 'False'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'ACTIVE'", 'max_length': '32'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'db.quotaholderserial': {
'Meta': {'ordering': "['serial']", 'object_name': 'QuotaHolderSerial'},
'accept': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'resolved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'serial': ('django.db.models.fields.BigIntegerField', [], {'primary_key': 'True', 'db_index': 'True'})
},
'db.securitygroup': {
'Meta': {'object_name': 'SecurityGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'db.subnet': {
'Meta': {'object_name': 'Subnet'},
'cidr': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'dhcp': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dns_nameservers': ('synnefo.db.fields.SeparatedValuesField', [], {'null': 'True'}),
'gateway': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'host_routes': ('synnefo.db.fields.SeparatedValuesField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ipversion': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subnets'", 'on_delete': 'models.PROTECT', 'to': "orm['db.Network']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'db.virtualmachine': {
'Meta': {'object_name': 'VirtualMachine'},
'action': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '30', 'null': 'True'}),
'backend': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'virtual_machines'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['db.Backend']"}),
'backend_hash': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'backendjobid': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'backendjobstatus': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'backendlogmsg': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'backendopcode': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'backendtime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1, 1, 1, 0, 0)'}),
'buildpercentage': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'flavor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['db.Flavor']", 'on_delete': 'models.PROTECT'}),
'hostid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'imageid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'operstate': ('django.db.models.fields.CharField', [], {'default': "'BUILD'", 'max_length': '30'}),
'serial': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'virtual_machine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['db.QuotaHolderSerial']"}),
'suspended': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'task': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'task_job_id': ('django.db.models.fields.BigIntegerField', [], {'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'})
},
'db.virtualmachinediagnostic': {
'Meta': {'ordering': "['-created']", 'object_name': 'VirtualMachineDiagnostic'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'machine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'diagnostics'", 'to': "orm['db.VirtualMachine']"}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'source_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'db.virtualmachinemetadata': {
'Meta': {'unique_together': "(('meta_key', 'vm'),)", 'object_name': 'VirtualMachineMetadata'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'meta_key': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'meta_value': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'vm': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'metadata'", 'to': "orm['db.VirtualMachine']"})
}
}
complete_apps = ['db']
|
ellisonbg/altair
|
refs/heads/master
|
altair/vega/data.py
|
2
|
import pandas as pd
from toolz.curried import curry, pipe
from ..utils.core import sanitize_dataframe
from ..utils.data import (
MaxRowsError, sample, to_csv, to_json, to_values, check_data_type
)
@curry
def limit_rows(data, max_rows=5000):
"""Raise MaxRowsError if the data model has more than max_rows."""
if not isinstance(data, (list, pd.DataFrame)):
raise TypeError('Expected dict or DataFrame, got: {}'.format(type(data)))
if len(data) > max_rows:
raise MaxRowsError('The number of rows in your dataset is greater than the max of {}'.format(max_rows))
return data
@curry
def default_data_transformer(data):
return pipe(data, limit_rows, to_values)
__all__ = (
'MaxRowsError',
'curry',
'default_data_transformer',
'limit_rows',
'pipe',
'sanitize_dataframe',
'sample',
'to_csv',
'to_json',
'to_values',
'check_data_type'
)
|
favll/pogom
|
refs/heads/master
|
pogom/pgoapi/protos/POGOProtos/Networking/Requests/RequestType_pb2.py
|
6
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Requests/RequestType.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Requests/RequestType.proto',
package='POGOProtos.Networking.Requests',
syntax='proto3',
serialized_pb=_b('\n0POGOProtos/Networking/Requests/RequestType.proto\x12\x1ePOGOProtos.Networking.Requests*\x9d\r\n\x0bRequestType\x12\x10\n\x0cMETHOD_UNSET\x10\x00\x12\x11\n\rPLAYER_UPDATE\x10\x01\x12\x0e\n\nGET_PLAYER\x10\x02\x12\x11\n\rGET_INVENTORY\x10\x04\x12\x15\n\x11\x44OWNLOAD_SETTINGS\x10\x05\x12\x1b\n\x17\x44OWNLOAD_ITEM_TEMPLATES\x10\x06\x12\"\n\x1e\x44OWNLOAD_REMOTE_CONFIG_VERSION\x10\x07\x12\x0f\n\x0b\x46ORT_SEARCH\x10\x65\x12\r\n\tENCOUNTER\x10\x66\x12\x11\n\rCATCH_POKEMON\x10g\x12\x10\n\x0c\x46ORT_DETAILS\x10h\x12\x0c\n\x08ITEM_USE\x10i\x12\x13\n\x0fGET_MAP_OBJECTS\x10j\x12\x17\n\x13\x46ORT_DEPLOY_POKEMON\x10n\x12\x17\n\x13\x46ORT_RECALL_POKEMON\x10o\x12\x13\n\x0fRELEASE_POKEMON\x10p\x12\x13\n\x0fUSE_ITEM_POTION\x10q\x12\x14\n\x10USE_ITEM_CAPTURE\x10r\x12\x11\n\rUSE_ITEM_FLEE\x10s\x12\x13\n\x0fUSE_ITEM_REVIVE\x10t\x12\x10\n\x0cTRADE_SEARCH\x10u\x12\x0f\n\x0bTRADE_OFFER\x10v\x12\x12\n\x0eTRADE_RESPONSE\x10w\x12\x10\n\x0cTRADE_RESULT\x10x\x12\x16\n\x12GET_PLAYER_PROFILE\x10y\x12\x11\n\rGET_ITEM_PACK\x10z\x12\x11\n\rBUY_ITEM_PACK\x10{\x12\x10\n\x0c\x42UY_GEM_PACK\x10|\x12\x12\n\x0e\x45VOLVE_POKEMON\x10}\x12\x14\n\x10GET_HATCHED_EGGS\x10~\x12\x1f\n\x1b\x45NCOUNTER_TUTORIAL_COMPLETE\x10\x7f\x12\x15\n\x10LEVEL_UP_REWARDS\x10\x80\x01\x12\x19\n\x14\x43HECK_AWARDED_BADGES\x10\x81\x01\x12\x11\n\x0cUSE_ITEM_GYM\x10\x85\x01\x12\x14\n\x0fGET_GYM_DETAILS\x10\x86\x01\x12\x15\n\x10START_GYM_BATTLE\x10\x87\x01\x12\x0f\n\nATTACK_GYM\x10\x88\x01\x12\x1b\n\x16RECYCLE_INVENTORY_ITEM\x10\x89\x01\x12\x18\n\x13\x43OLLECT_DAILY_BONUS\x10\x8a\x01\x12\x16\n\x11USE_ITEM_XP_BOOST\x10\x8b\x01\x12\x1b\n\x16USE_ITEM_EGG_INCUBATOR\x10\x8c\x01\x12\x10\n\x0bUSE_INCENSE\x10\x8d\x01\x12\x18\n\x13GET_INCENSE_POKEMON\x10\x8e\x01\x12\x16\n\x11INCENSE_ENCOUNTER\x10\x8f\x01\x12\x16\n\x11\x41\x44\x44_FORT_MODIFIER\x10\x90\x01\x12\x13\n\x0e\x44ISK_ENCOUNTER\x10\x91\x01\x12!\n\x1c\x43OLLECT_DAILY_DEFENDER_BONUS\x10\x92\x01\x12\x14\n\x0fUPGRADE_POKEMON\x10\x93\x01\x12\x19\n\x14SET_FAVORITE_POKEMON\x10\x94\x01\x12\x15\n\x10NICKNAME_POKEMON\x10\x95\x01\x12\x10\n\x0b\x45QUIP_BADGE\x10\x96\x01\x12\x19\n\x14SET_CONTACT_SETTINGS\x10\x97\x01\x12\x16\n\x11SET_BUDDY_POKEMON\x10\x98\x01\x12\x15\n\x10GET_BUDDY_WALKED\x10\x99\x01\x12\x15\n\x10GET_ASSET_DIGEST\x10\xac\x02\x12\x16\n\x11GET_DOWNLOAD_URLS\x10\xad\x02\x12\x1c\n\x17GET_SUGGESTED_CODENAMES\x10\x91\x03\x12\x1d\n\x18\x43HECK_CODENAME_AVAILABLE\x10\x92\x03\x12\x13\n\x0e\x43LAIM_CODENAME\x10\x93\x03\x12\x0f\n\nSET_AVATAR\x10\x94\x03\x12\x14\n\x0fSET_PLAYER_TEAM\x10\x95\x03\x12\x1b\n\x16MARK_TUTORIAL_COMPLETE\x10\x96\x03\x12\x16\n\x11LOAD_SPAWN_POINTS\x10\xf4\x03\x12\x14\n\x0f\x43HECK_CHALLENGE\x10\xd8\x04\x12\x15\n\x10VERIFY_CHALLENGE\x10\xd9\x04\x12\t\n\x04\x45\x43HO\x10\x9a\x05\x12\x1b\n\x16\x44\x45\x42UG_UPDATE_INVENTORY\x10\xbc\x05\x12\x18\n\x13\x44\x45\x42UG_DELETE_PLAYER\x10\xbd\x05\x12\x17\n\x12SFIDA_REGISTRATION\x10\xa0\x06\x12\x15\n\x10SFIDA_ACTION_LOG\x10\xa1\x06\x12\x18\n\x13SFIDA_CERTIFICATION\x10\xa2\x06\x12\x11\n\x0cSFIDA_UPDATE\x10\xa3\x06\x12\x11\n\x0cSFIDA_ACTION\x10\xa4\x06\x12\x11\n\x0cSFIDA_DOWSER\x10\xa5\x06\x12\x12\n\rSFIDA_CAPTURE\x10\xa6\x06\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_REQUESTTYPE = _descriptor.EnumDescriptor(
name='RequestType',
full_name='POGOProtos.Networking.Requests.RequestType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='METHOD_UNSET', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLAYER_UPDATE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_PLAYER', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_INVENTORY', index=3, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOWNLOAD_SETTINGS', index=4, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOWNLOAD_ITEM_TEMPLATES', index=5, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOWNLOAD_REMOTE_CONFIG_VERSION', index=6, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_SEARCH', index=7, number=101,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ENCOUNTER', index=8, number=102,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CATCH_POKEMON', index=9, number=103,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_DETAILS', index=10, number=104,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ITEM_USE', index=11, number=105,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_MAP_OBJECTS', index=12, number=106,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_DEPLOY_POKEMON', index=13, number=110,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_RECALL_POKEMON', index=14, number=111,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RELEASE_POKEMON', index=15, number=112,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_POTION', index=16, number=113,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_CAPTURE', index=17, number=114,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_FLEE', index=18, number=115,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_REVIVE', index=19, number=116,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_SEARCH', index=20, number=117,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_OFFER', index=21, number=118,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_RESPONSE', index=22, number=119,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_RESULT', index=23, number=120,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_PLAYER_PROFILE', index=24, number=121,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_ITEM_PACK', index=25, number=122,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BUY_ITEM_PACK', index=26, number=123,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BUY_GEM_PACK', index=27, number=124,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EVOLVE_POKEMON', index=28, number=125,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_HATCHED_EGGS', index=29, number=126,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ENCOUNTER_TUTORIAL_COMPLETE', index=30, number=127,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LEVEL_UP_REWARDS', index=31, number=128,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHECK_AWARDED_BADGES', index=32, number=129,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_GYM', index=33, number=133,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_GYM_DETAILS', index=34, number=134,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='START_GYM_BATTLE', index=35, number=135,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ATTACK_GYM', index=36, number=136,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RECYCLE_INVENTORY_ITEM', index=37, number=137,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COLLECT_DAILY_BONUS', index=38, number=138,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_XP_BOOST', index=39, number=139,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_EGG_INCUBATOR', index=40, number=140,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_INCENSE', index=41, number=141,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_INCENSE_POKEMON', index=42, number=142,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INCENSE_ENCOUNTER', index=43, number=143,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADD_FORT_MODIFIER', index=44, number=144,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DISK_ENCOUNTER', index=45, number=145,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COLLECT_DAILY_DEFENDER_BONUS', index=46, number=146,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UPGRADE_POKEMON', index=47, number=147,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_FAVORITE_POKEMON', index=48, number=148,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NICKNAME_POKEMON', index=49, number=149,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EQUIP_BADGE', index=50, number=150,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_CONTACT_SETTINGS', index=51, number=151,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_BUDDY_POKEMON', index=52, number=152,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_BUDDY_WALKED', index=53, number=153,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_ASSET_DIGEST', index=54, number=300,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_DOWNLOAD_URLS', index=55, number=301,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_SUGGESTED_CODENAMES', index=56, number=401,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHECK_CODENAME_AVAILABLE', index=57, number=402,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLAIM_CODENAME', index=58, number=403,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_AVATAR', index=59, number=404,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_PLAYER_TEAM', index=60, number=405,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MARK_TUTORIAL_COMPLETE', index=61, number=406,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOAD_SPAWN_POINTS', index=62, number=500,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHECK_CHALLENGE', index=63, number=600,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VERIFY_CHALLENGE', index=64, number=601,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ECHO', index=65, number=666,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DEBUG_UPDATE_INVENTORY', index=66, number=700,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DEBUG_DELETE_PLAYER', index=67, number=701,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_REGISTRATION', index=68, number=800,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_ACTION_LOG', index=69, number=801,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_CERTIFICATION', index=70, number=802,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_UPDATE', index=71, number=803,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_ACTION', index=72, number=804,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_DOWSER', index=73, number=805,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_CAPTURE', index=74, number=806,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=85,
serialized_end=1778,
)
_sym_db.RegisterEnumDescriptor(_REQUESTTYPE)
RequestType = enum_type_wrapper.EnumTypeWrapper(_REQUESTTYPE)
METHOD_UNSET = 0
PLAYER_UPDATE = 1
GET_PLAYER = 2
GET_INVENTORY = 4
DOWNLOAD_SETTINGS = 5
DOWNLOAD_ITEM_TEMPLATES = 6
DOWNLOAD_REMOTE_CONFIG_VERSION = 7
FORT_SEARCH = 101
ENCOUNTER = 102
CATCH_POKEMON = 103
FORT_DETAILS = 104
ITEM_USE = 105
GET_MAP_OBJECTS = 106
FORT_DEPLOY_POKEMON = 110
FORT_RECALL_POKEMON = 111
RELEASE_POKEMON = 112
USE_ITEM_POTION = 113
USE_ITEM_CAPTURE = 114
USE_ITEM_FLEE = 115
USE_ITEM_REVIVE = 116
TRADE_SEARCH = 117
TRADE_OFFER = 118
TRADE_RESPONSE = 119
TRADE_RESULT = 120
GET_PLAYER_PROFILE = 121
GET_ITEM_PACK = 122
BUY_ITEM_PACK = 123
BUY_GEM_PACK = 124
EVOLVE_POKEMON = 125
GET_HATCHED_EGGS = 126
ENCOUNTER_TUTORIAL_COMPLETE = 127
LEVEL_UP_REWARDS = 128
CHECK_AWARDED_BADGES = 129
USE_ITEM_GYM = 133
GET_GYM_DETAILS = 134
START_GYM_BATTLE = 135
ATTACK_GYM = 136
RECYCLE_INVENTORY_ITEM = 137
COLLECT_DAILY_BONUS = 138
USE_ITEM_XP_BOOST = 139
USE_ITEM_EGG_INCUBATOR = 140
USE_INCENSE = 141
GET_INCENSE_POKEMON = 142
INCENSE_ENCOUNTER = 143
ADD_FORT_MODIFIER = 144
DISK_ENCOUNTER = 145
COLLECT_DAILY_DEFENDER_BONUS = 146
UPGRADE_POKEMON = 147
SET_FAVORITE_POKEMON = 148
NICKNAME_POKEMON = 149
EQUIP_BADGE = 150
SET_CONTACT_SETTINGS = 151
SET_BUDDY_POKEMON = 152
GET_BUDDY_WALKED = 153
GET_ASSET_DIGEST = 300
GET_DOWNLOAD_URLS = 301
GET_SUGGESTED_CODENAMES = 401
CHECK_CODENAME_AVAILABLE = 402
CLAIM_CODENAME = 403
SET_AVATAR = 404
SET_PLAYER_TEAM = 405
MARK_TUTORIAL_COMPLETE = 406
LOAD_SPAWN_POINTS = 500
CHECK_CHALLENGE = 600
VERIFY_CHALLENGE = 601
ECHO = 666
DEBUG_UPDATE_INVENTORY = 700
DEBUG_DELETE_PLAYER = 701
SFIDA_REGISTRATION = 800
SFIDA_ACTION_LOG = 801
SFIDA_CERTIFICATION = 802
SFIDA_UPDATE = 803
SFIDA_ACTION = 804
SFIDA_DOWSER = 805
SFIDA_CAPTURE = 806
DESCRIPTOR.enum_types_by_name['RequestType'] = _REQUESTTYPE
# @@protoc_insertion_point(module_scope)
|
gustavo-guimaraes/siga
|
refs/heads/master
|
backend/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/big5prober.py
|
2930
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import Big5SMModel
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(Big5SMModel)
self._mDistributionAnalyzer = Big5DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "Big5"
|
cheery/essence
|
refs/heads/master
|
treemode/argon/keyboard.py
|
5
|
import pygame
bindings = {
pygame.K_0: "0",
pygame.K_GREATER: "greater",
pygame.K_RALT: "ralt",
pygame.K_1: "1",
pygame.K_HASH: "hash",
pygame.K_RCTRL: "rctrl",
pygame.K_2: "2",
pygame.K_HELP: "help",
pygame.K_RETURN: "return",
pygame.K_3: "3",
pygame.K_HOME: "home",
pygame.K_RIGHT: "right",
pygame.K_4: "4",
pygame.K_INSERT: "insert",
pygame.K_RIGHTBRACKET: "rightbracket",
pygame.K_5: "5",
pygame.K_KP0: "kp0",
pygame.K_RIGHTPAREN: "rightparen",
pygame.K_6: "6",
pygame.K_KP1: "kp1",
pygame.K_RMETA: "rmeta",
pygame.K_7: "7",
pygame.K_KP2: "kp2",
pygame.K_RSHIFT: "rshift",
pygame.K_8: "8",
pygame.K_KP3: "kp3",
pygame.K_RSUPER: "rsuper",
pygame.K_9: "9",
pygame.K_KP4: "kp4",
pygame.K_SCROLLOCK: "scrollock",
pygame.K_AMPERSAND: "ampersand",
pygame.K_KP5: "kp5",
pygame.K_SEMICOLON: "semicolon",
pygame.K_ASTERISK: "asterisk",
pygame.K_KP6: "kp6",
pygame.K_SLASH: "slash",
pygame.K_AT: "at",
pygame.K_KP7: "kp7",
pygame.K_SPACE: "space",
pygame.K_BACKQUOTE: "backquote",
pygame.K_KP8: "kp8",
pygame.K_SYSREQ: "sysreq",
pygame.K_BACKSLASH: "backslash",
pygame.K_KP9: "kp9",
pygame.K_TAB: "tab",
pygame.K_BACKSPACE: "backspace",
pygame.K_KP_DIVIDE: "kp_divide",
pygame.K_UNDERSCORE: "underscore",
pygame.K_BREAK: "break",
pygame.K_KP_ENTER: "kp_enter",
pygame.K_UNKNOWN: "unknown",
pygame.K_CAPSLOCK: "capslock",
pygame.K_KP_EQUALS: "kp_equals",
pygame.K_UP: "up",
pygame.K_CARET: "caret",
pygame.K_KP_MINUS: "kp_minus",
pygame.K_a: "a",
pygame.K_CLEAR: "clear",
pygame.K_KP_MULTIPLY: "kp_multiply",
pygame.K_b: "b",
pygame.K_COLON: "colon",
pygame.K_KP_PERIOD: "kp_period",
pygame.K_c: "c",
pygame.K_COMMA: "comma",
pygame.K_KP_PLUS: "kp_plus",
pygame.K_d: "d",
pygame.K_DELETE: "delete",
pygame.K_LALT: "lalt",
pygame.K_e: "e",
pygame.K_DOLLAR: "dollar",
pygame.K_LAST: "last",
pygame.K_f: "f",
pygame.K_DOWN: "down",
pygame.K_LCTRL: "lctrl",
pygame.K_g: "g",
pygame.K_END: "end",
pygame.K_LEFT: "left",
pygame.K_h: "h",
pygame.K_EQUALS: "equals",
pygame.K_LEFTBRACKET: "leftbracket",
pygame.K_i: "i",
pygame.K_ESCAPE: "escape",
pygame.K_LEFTPAREN: "leftparen",
pygame.K_j: "j",
pygame.K_EURO: "euro",
pygame.K_LESS: "less",
pygame.K_k: "k",
pygame.K_EXCLAIM: "exclaim",
pygame.K_LMETA: "lmeta",
pygame.K_l: "l",
pygame.K_F1: "f1",
pygame.K_LSHIFT: "lshift",
pygame.K_m: "m",
pygame.K_F10: "f10",
pygame.K_LSUPER: "lsuper",
pygame.K_n: "n",
pygame.K_F11: "f11",
pygame.K_MENU: "menu",
pygame.K_o: "o",
pygame.K_F12: "f12",
pygame.K_MINUS: "minus",
pygame.K_p: "p",
pygame.K_F13: "f13",
pygame.K_MODE: "mode",
pygame.K_q: "q",
pygame.K_F14: "f14",
pygame.K_NUMLOCK: "numlock",
pygame.K_r: "r",
pygame.K_F15: "f15",
pygame.K_PAGEDOWN: "pagedown",
pygame.K_s: "s",
pygame.K_F2: "f2",
pygame.K_PAGEUP: "pageup",
pygame.K_t: "t",
pygame.K_F3: "f3",
pygame.K_PAUSE: "pause",
pygame.K_u: "u",
pygame.K_F4: "f4",
pygame.K_PERIOD: "period",
pygame.K_v: "v",
pygame.K_F5: "f5",
pygame.K_PLUS: "plus",
pygame.K_w: "w",
pygame.K_F6: "f6",
pygame.K_POWER: "power",
pygame.K_x: "x",
pygame.K_F7: "f7",
pygame.K_PRINT: "print",
pygame.K_y: "y",
pygame.K_F8: "f8",
pygame.K_QUESTION: "question",
pygame.K_z: "z",
pygame.K_F9: "f9",
pygame.K_QUOTE: "quote",
pygame.K_FIRST: "first",
pygame.K_QUOTEDBL: "quotedbl",
}
modifier_bindings = [
(pygame.KMOD_ALT, "alt"),
(pygame.KMOD_LSHIFT, "lshift"),
(pygame.KMOD_RCTRL, "rctrl"),
(pygame.KMOD_CAPS, "caps"),
(pygame.KMOD_META, "meta"),
(pygame.KMOD_RMETA, "rmeta"),
(pygame.KMOD_CTRL, "ctrl"),
(pygame.KMOD_MODE, "mode"),
(pygame.KMOD_RSHIFT, "rshift"),
(pygame.KMOD_LALT, "lalt"),
(pygame.KMOD_NONE, "none"),
(pygame.KMOD_SHIFT, "shift"),
(pygame.KMOD_LCTRL, "lctrl"),
(pygame.KMOD_NUM, "num"),
(pygame.KMOD_LMETA, "lmeta"),
(pygame.KMOD_RALT, "ralt"),
]
def parse_modifiers(mod):
for mask, name in modifier_bindings:
if mod & mask:
yield name
|
aljex/iTerm2
|
refs/heads/Aljex
|
tests/esctest/tests/apc.py
|
25
|
from esc import NUL, ST, S7C1T, S8C1T
import escargs
import esccmd
import escio
from escutil import AssertScreenCharsInRectEqual, knownBug, optionRequired
from esctypes import Rect
class APCTests(object):
@knownBug(terminal="iTerm2", reason="Not implemented.")
def test_APC_Basic(self):
esccmd.APC()
escio.Write("xyz")
escio.Write(ST)
escio.Write("A")
AssertScreenCharsInRectEqual(Rect(1, 1, 3, 1),
[ "A" + NUL * 2 ])
@knownBug(terminal="iTerm2", reason="8-bit controls not implemented.")
@optionRequired(terminal="xterm", option=escargs.DISABLE_WIDE_CHARS)
def test_APC_8bit(self):
escio.use8BitControls = True
escio.Write(S8C1T)
esccmd.APC()
escio.Write("xyz")
escio.Write(ST)
escio.Write("A")
escio.Write(S7C1T)
escio.use8BitControls = False
AssertScreenCharsInRectEqual(Rect(1, 1, 3, 1),
[ "A" + NUL * 2 ])
|
komsas/OpenUpgrade
|
refs/heads/master
|
addons/crm_profiling/wizard/open_questionnaire.py
|
44
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class open_questionnaire_line(osv.osv_memory):
_name = 'open.questionnaire.line'
_rec_name = 'question_id'
_columns = {
'question_id': fields.many2one('crm_profiling.question','Question', required=True),
'answer_id': fields.many2one('crm_profiling.answer', 'Answer'),
'wizard_id': fields.many2one('open.questionnaire', 'Questionnaire'),
}
class open_questionnaire(osv.osv_memory):
_name = 'open.questionnaire'
_columns = {
'questionnaire_id': fields.many2one('crm_profiling.questionnaire', 'Questionnaire name'),
'question_ans_ids': fields.one2many('open.questionnaire.line', 'wizard_id', 'Question / Answers'),
}
def default_get(self, cr, uid, fields, context=None):
if context is None: context = {}
res = super(open_questionnaire, self).default_get(cr, uid, fields, context=context)
questionnaire_id = context.get('questionnaire_id', False)
if questionnaire_id and 'question_ans_ids' in fields:
query = """
select question as question_id from profile_questionnaire_quest_rel where questionnaire = %s"""
cr.execute(query, (questionnaire_id,))
result = cr.dictfetchall()
res.update(question_ans_ids=result)
return res
def questionnaire_compute(self, cr, uid, ids, context=None):
""" Adds selected answers in partner form """
model = context.get('active_model')
answers = []
if model == 'res.partner':
data = self.browse(cr, uid, ids[0], context=context)
for d in data.question_ans_ids:
if d.answer_id:
answers.append(d.answer_id.id)
self.pool[model]._questionnaire_compute(cr, uid, answers, context=context)
return {'type': 'ir.actions.act_window_close'}
def build_form(self, cr, uid, ids, context=None):
""" Dynamically generates form according to selected questionnaire """
models_data = self.pool.get('ir.model.data')
result = models_data._get_id(cr, uid, 'crm_profiling', 'open_questionnaire_form')
res_id = models_data.browse(cr, uid, result, context=context).res_id
datas = self.browse(cr, uid, ids[0], context=context)
context.update({'questionnaire_id': datas.questionnaire_id.id})
return {
'name': _('Questionnaire'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'open.questionnaire',
'type': 'ir.actions.act_window',
'views': [(res_id,'form')],
'target': 'new',
'context': context
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mohanprasath/Course-Work
|
refs/heads/master
|
data_analysis/uh_data_analysis_with_python/hy-data-analysis-with-python-spring-2020/part05-e01_split_date_continues/test/test_split_date_continues.py
|
1
|
#!/usr/bin/env python3
import unittest
from unittest.mock import patch
import numpy as np
import pandas as pd
from tmc import points
from tmc.utils import load, get_out, patch_helper
module_name="src.split_date_continues"
split_date_continues = load(module_name, "split_date_continues")
main = load(module_name, "main")
ph = patch_helper(module_name)
@points('p05-01.1')
class SplitDateContinues(unittest.TestCase):
# @classmethod
# def setUpClass(cls):
# cls.df = split_date_continues()
def setUp(self):
self.df = split_date_continues()
def test_shape(self):
self.assertEqual(self.df.shape, (37128, 25), msg="Incorrect shape!")
def test_columns(self):
np.testing.assert_array_equal(self.df.columns[:6],
['Weekday', 'Day', 'Month', 'Year', 'Hour', 'Auroransilta'],
err_msg="First six column names were incorrect!")
def test_dtypes(self):
np.testing.assert_array_equal(self.df.dtypes[:6],
[object, int, int, int, int, float],
err_msg="Incorrect column types in first six columns!")
def test_content(self):
value = self.df.loc[0, "Auroransilta"]
self.assertTrue(np.isnan(value),
msg="Incorrect value on row 0 column Auroransilta, expected NaN got %f!" % value)
self.assertEqual(self.df.loc[0, "Baana"], 8.0,
msg="Incorrect value on row 0 column Baana!")
def test_calls(self):
with patch(ph("split_date_continues"), wraps=split_date_continues) as psplit,\
patch(ph("pd.read_csv"), wraps=pd.read_csv) as prc,\
patch(ph("pd.concat"), wraps=pd.concat) as pconcat:
main()
psplit.assert_called_once()
prc.assert_called_once()
pconcat.assert_called()
if __name__ == '__main__':
unittest.main()
|
sarvex/django
|
refs/heads/master
|
django/contrib/staticfiles/templatetags/__init__.py
|
12133432
| |
chewable/django
|
refs/heads/master
|
tests/regressiontests/templates/__init__.py
|
12133432
| |
chugunovyar/factoryForBuild
|
refs/heads/master
|
env/lib/python2.7/site-packages/django/contrib/sitemaps/management/commands/__init__.py
|
12133432
| |
EricssonResearch/calvin-base
|
refs/heads/master
|
calvinextras/calvinsys/io/servomotor/raspberry_pi/__init__.py
|
12133432
| |
apexdatasolutions/VistA
|
refs/heads/master
|
Scripts/Testing/PyUnit/RPCBrokerCheck.py
|
5
|
#---------------------------------------------------------------------------
# Copyright 2013 The Open Source Electronic Health Record Agent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
## RPC Broker Check
##
'''
This command makes sure that the RPC Broker is listening before
running the tests created for the SSEP
@copyright The Open Source Electronic Health Record Agent
@license http://www.apache.org/licenses/LICENSE-2.0
'''
import sys
import os
import time
def CheckRPCListener(VistA,tcp_host,tcp_port):
VistA.send('D CALL^XWBTCPMT\r')
VistA.expect('IP ADDRESS')
VistA.send(tcp_host+ '\r')
VistA.expect('PORT')
VistA.send(str(tcp_port)+ '\r')
index = VistA.expect(['Success','Failed'])
return index
if __name__ == "__main__":
import argparse
curDir = os.path.dirname(os.path.abspath(__file__))
scriptDir = os.path.normpath(os.path.join(curDir, "../../"))
if scriptDir not in sys.path:
sys.path.append(scriptDir)
from VistATestClient import createTestClientArgParser,VistATestClientFactory
# Arg Parser to get address and port of RPC Listener along with a log file
# Inherits the connection arguments of the testClientParser
testClientParser = createTestClientArgParser()
ssepTestParser= argparse.ArgumentParser(description='Test the M2M broker via XML files',
parents=[testClientParser])
ssepTestParser.add_argument("-ha",required=True,dest='host',
help='Address of the host where RPC Broker is listening')
ssepTestParser.add_argument("-hp",required=True,dest='port',
help='Port of the host machine where RPC Broker is listening')
# A global variable so that each test is able to use the port and address of the host
global results
results = ssepTestParser.parse_args()
testClient = VistATestClientFactory.createVistATestClientWithArgs(results)
assert testClient
with testClient:
# If checkresult == 0, RPC listener is set up correctly and tests should be run
# else, don't bother running the tests
print "Testing connection to RPC Listener on host: " + results.host + " and port: " + results.port
checkresult = CheckRPCListener(testClient.getConnection(),results.host,results.port)
if checkresult == 0:
print "Connection Successful"
else:
print "Connection Unsuccessful"
|
TheWardoctor/Wardoctors-repo
|
refs/heads/master
|
script.module.covenant/lib/resources/lib/sources/en/mzmovies.py
|
7
|
# -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib, urlparse, re
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import source_utils
from resources.lib.modules import dom_parser
from resources.lib.modules import directstream
from resources.lib.modules import cfscrape
class source:
def __init__(self):
self.priority = 0
self.language = ['en']
self.domains = ['mehlizmovies.com']
self.base_link = 'https://www.mehlizmovies.com/'
self.search_link = '?s=%s'
self.search_link2 = '/search/%s/feed/rss2/'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search([localtitle] + source_utils.aliases_to_array(aliases), year)
if not url and title != localtitle: url = self.__search([title] + source_utils.aliases_to_array(
aliases),year)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = self.__search([localtvshowtitle] + source_utils.aliases_to_array(aliases), year)
if not url and tvshowtitle != localtvshowtitle: url = self.__search(
[tvshowtitle] + source_utils.aliases_to_array(aliases), year)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
url = urlparse.urljoin(self.base_link, url)
scraper = cfscrape.create_scraper()
data = scraper.get(url).content
data = client.parseDOM(data, 'ul', attrs={'class': 'episodios'})
links = client.parseDOM(data, 'div', attrs={'class': 'episodiotitle'})
sp = zip(client.parseDOM(data, 'div', attrs={'class': 'numerando'}), client.parseDOM(links, 'a', ret='href'))
Sea_Epi = '%dx%d'% (int(season), int(episode))
for i in sp:
sep = i[0]
if sep == Sea_Epi:
url = source_utils.strip_domain(i[1])
return url
except:
return
def __search(self, titles, year):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.getsearch(titles[0])))
query = urlparse.urljoin(self.base_link, query)
t = cleantitle.get(titles[0])
scraper = cfscrape.create_scraper()
data = scraper.get(query).content
#data = client.request(query, referer=self.base_link)
data = client.parseDOM(data, 'div', attrs={'class': 'result-item'})
r = dom_parser.parse_dom(data, 'div', attrs={'class': 'title'})
r = zip(dom_parser.parse_dom(r, 'a'), dom_parser.parse_dom(data, 'span', attrs={'class': 'year'}))
url = []
for i in range(len(r)):
title = cleantitle.get(r[i][0][1])
title = re.sub('(\d+p|4k|3d|hd|season\d+)','',title)
y = r[i][1][1]
link = r[i][0][0]['href']
if 'season' in title: continue
if t == title and y == year:
if 'season' in link:
url.append(source_utils.strip_domain(link))
print url[0]
return url[0]
else: url.append(source_utils.strip_domain(link))
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
links = self.links_found(url)
hostdict = hostDict + hostprDict
for url in links:
try:
valid, host = source_utils.is_host_valid(url, hostdict)
if 'mehliz' in url:
host = 'MZ'; direct = True; urls = (self.mz_server(url))
elif 'ok.ru' in url:
host = 'vk'; direct = True; urls = (directstream.odnoklassniki(url))
else:
direct = False; urls = [{'quality': 'SD', 'url': url}]
for x in urls:
sources.append({'source': host, 'quality': x['quality'], 'language': 'en',
'url': x['url'], 'direct': direct, 'debridonly': False})
except:
pass
return sources
except:
return sources
def links_found(self,urls):
try:
scraper = cfscrape.create_scraper()
links = []
if type(urls) is list:
for item in urls:
query = urlparse.urljoin(self.base_link, item)
r = scraper.get(query).content
data = client.parseDOM(r, 'div', attrs={'id': 'playex'})
data = client.parseDOM(data, 'div', attrs={'id': 'option-\d+'})
links += client.parseDOM(data, 'iframe', ret='src')
print links
else:
query = urlparse.urljoin(self.base_link, urls)
r = scraper.get(query).content
data = client.parseDOM(r, 'div', attrs={'id': 'playex'})
data = client.parseDOM(data, 'div', attrs={'id': 'option-\d+'})
links += client.parseDOM(data, 'iframe', ret='src')
return links
except:
return urls
def mz_server(self,url):
try:
scraper = cfscrape.create_scraper()
urls = []
data = scraper.get(url).content
data = re.findall('''file:\s*["']([^"']+)",label:\s*"(\d{3,}p)"''', data, re.DOTALL)
for url, label in data:
label = source_utils.label_to_quality(label)
if label == 'SD': continue
urls.append({'url': url, 'quality': label})
return urls
except:
return url
def resolve(self, url):
return url
|
mKeRix/home-assistant
|
refs/heads/dev
|
homeassistant/components/sonarr/config_flow.py
|
6
|
"""Config flow for Sonarr."""
import logging
from typing import Any, Dict, Optional
from sonarr import Sonarr, SonarrAccessRestricted, SonarrError
import voluptuous as vol
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow, OptionsFlow
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_PORT,
CONF_SSL,
CONF_VERIFY_SSL,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
CONF_BASE_PATH,
CONF_UPCOMING_DAYS,
CONF_WANTED_MAX_ITEMS,
DEFAULT_BASE_PATH,
DEFAULT_PORT,
DEFAULT_SSL,
DEFAULT_UPCOMING_DAYS,
DEFAULT_VERIFY_SSL,
DEFAULT_WANTED_MAX_ITEMS,
)
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
async def validate_input(hass: HomeAssistantType, data: dict) -> Dict[str, Any]:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
session = async_get_clientsession(hass)
sonarr = Sonarr(
host=data[CONF_HOST],
port=data[CONF_PORT],
api_key=data[CONF_API_KEY],
base_path=data[CONF_BASE_PATH],
tls=data[CONF_SSL],
verify_ssl=data[CONF_VERIFY_SSL],
session=session,
)
await sonarr.update()
return True
class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Sonarr."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return SonarrOptionsFlowHandler(config_entry)
async def async_step_import(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by configuration file."""
return await self.async_step_user(user_input)
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by the user."""
if user_input is None:
return self._show_setup_form()
if CONF_VERIFY_SSL not in user_input:
user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL
try:
await validate_input(self.hass, user_input)
except SonarrAccessRestricted:
return self._show_setup_form({"base": "invalid_auth"})
except SonarrError:
return self._show_setup_form({"base": "cannot_connect"})
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
return self.async_abort(reason="unknown")
return self.async_create_entry(title=user_input[CONF_HOST], data=user_input)
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
"""Show the setup form to the user."""
data_schema = {
vol.Required(CONF_HOST): str,
vol.Required(CONF_API_KEY): str,
vol.Optional(CONF_BASE_PATH, default=DEFAULT_BASE_PATH): str,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool,
}
if self.show_advanced_options:
data_schema[
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL)
] = bool
return self.async_show_form(
step_id="user", data_schema=vol.Schema(data_schema), errors=errors or {},
)
class SonarrOptionsFlowHandler(OptionsFlow):
"""Handle Sonarr client options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input: Optional[ConfigType] = None):
"""Manage Sonarr options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
options = {
vol.Optional(
CONF_UPCOMING_DAYS,
default=self.config_entry.options.get(
CONF_UPCOMING_DAYS, DEFAULT_UPCOMING_DAYS
),
): int,
vol.Optional(
CONF_WANTED_MAX_ITEMS,
default=self.config_entry.options.get(
CONF_WANTED_MAX_ITEMS, DEFAULT_WANTED_MAX_ITEMS
),
): int,
}
return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.