gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import copy
import os
import shutil
import subprocess
import sys
import time
import mock
import pytest
import requests
from vcr import VCR
from yagocd import Yagocd
from yagocd.session import Session
TESTING_VERSIONS = [
('16.1.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.2.1', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.3.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.6.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.7.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.8.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.9.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.10.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.11.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('16.12.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('17.1.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('17.2.0', 'gocd-server-deprecated', 'gocd-agent-deprecated'),
('v17.3.0', 'gocd-server', 'gocd-agent-alpine-3.5'),
('v17.5.0', 'gocd-server', 'gocd-agent-alpine-3.5'),
]
@pytest.fixture(scope='session')
def tests_dir():
return os.path.dirname(os.path.realpath(__file__))
@pytest.fixture()
def mock_session():
session = mock.patch('yagocd.session.Session').start()
session.server_url = 'http://example.com'
session.server_version = '999.999.999'
return session
@pytest.fixture(scope="class")
def session_fixture():
options = copy.deepcopy(Yagocd.DEFAULT_OPTIONS)
options['server'] = 'http://localhost:8153/'
return Session(
auth=('admin', '12345'),
options=options
)
root_cassette_library_dir = os.path.join(tests_dir(), 'fixtures/cassettes')
@pytest.fixture()
def my_vcr(gocd_docker):
return VCR(
path_transformer=VCR.ensure_suffix('.yaml'),
cassette_library_dir=os.path.join(root_cassette_library_dir, gocd_docker),
)
SERVER_CONTAINER_NAME = 'yagocd-server'
AGENT_CONTAINER_NAME = 'yagocd-agent'
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
def pytest_addoption(parser):
parser.addoption(
"--use-docker", action="store_true", default=False,
help=(
"This flag controls execution of the GoCD server in Docker container. "
"It's possible to remove some cassettes and use this flag to generate new ones."
)
)
parser.addoption(
"--fresh-run", action="store_true", default=False,
help=(
"Execute fresh run of tests: all cassettes will be deleted."
"You have to explicitly set `--use-docker` flag if you would like to use docker."
)
)
@pytest.fixture(scope='session')
def use_docker(request):
return request.config.getoption("--use-docker")
@pytest.fixture(scope='session')
def fresh_run(request):
return request.config.getoption("--fresh-run")
@pytest.fixture(scope="session", params=TESTING_VERSIONS)
def gocd_docker(request, use_docker, fresh_run):
version_tag, server_image, agent_image = request.param
if use_docker:
start_docker_server(server_image=server_image, version_tag=version_tag)
start_docker_agent(agent_image=agent_image, version_tag=version_tag)
wait_till_started()
def fin():
stop_containers()
request.addfinalizer(fin)
if fresh_run:
cassette_library_dir = os.path.join(root_cassette_library_dir, version_tag)
if os.path.exists(cassette_library_dir):
print("Removing existing cassettes from '{}'...".format(cassette_library_dir)) # noqa
shutil.rmtree(cassette_library_dir)
print( # noqa
'\n',
'*' * 47 + '\n',
'********** Using GoCD version {} **********\n'.format(request.param),
'*' * 47 + '\n',
)
return version_tag
def start_docker_server(server_image, version_tag):
print('Starting GoCD server in docker container [{} version]...'.format(version_tag)) # noqa
output = subprocess.check_output([
"/usr/local/bin/docker",
"ps",
"--quiet",
"--filter=name={container_name}".format(
container_name=SERVER_CONTAINER_NAME),
])
if not output:
subprocess.check_call([
"/usr/local/bin/docker",
"run",
"-p=8153:8153",
"-p=8154:8154",
"--detach",
"--volume={current_dir}/docker:/workspace".format(
current_dir=CURRENT_DIR
),
"--workdir=/workspace",
"--name={container_name}".format(
container_name=SERVER_CONTAINER_NAME
),
"gocd/{server_image}:{tag}".format(server_image=server_image, tag=version_tag),
"/bin/bash",
"-c",
"'/workspace/bootstrap.sh'",
])
def start_docker_agent(agent_image, version_tag):
print('Starting GoCD agent in docker container [{} version]...'.format(version_tag)) # noqa
output = subprocess.check_output([
"/usr/local/bin/docker",
"ps",
"--quiet",
"--filter=name={container_name}".format(
container_name=AGENT_CONTAINER_NAME),
])
if not output:
subprocess.check_call([
"/usr/local/bin/docker",
"run",
"--detach",
"--link={server_container_name}:go-server".format(
server_container_name=SERVER_CONTAINER_NAME
),
"--name={container_name}".format(
container_name=AGENT_CONTAINER_NAME
),
"-eGO_SERVER_URL=https://172.17.0.2:8154/go",
"gocd/{agent_image}:{tag}".format(agent_image=agent_image, tag=version_tag),
])
def wait_till_started():
sys.stdout.write('Waiting for availability of GoCD server in Docker.')
while True:
try:
requests.get("http://localhost:8153/go")
except requests.exceptions.ConnectionError:
sys.stdout.write('.')
time.sleep(5)
else:
break
def stop_containers():
subprocess.check_call([
"/usr/local/bin/docker",
"rm",
"-f",
SERVER_CONTAINER_NAME,
AGENT_CONTAINER_NAME
])
|
|
# -*- test-case-name: twisted.test.test_plugin -*-
# Copyright (c) 2005 Divmod, Inc.
# See LICENSE for details.
"""
Plugin system for Twisted.
@author: U{Jp Calderone<mailto:exarkun@twistedmatrix.com>}
@author: U{Glyph Lefkowitz<mailto:glyph@twistedmatrix.com>}
"""
from __future__ import generators
import os, errno
from zope.interface import Interface, providedBy
try:
import cPickle as pickle
except ImportError:
import pickle
from twisted.python.components import getAdapterFactory
from twisted.python.reflect import namedAny
from twisted.python.win32 import ERROR_FILE_NOT_FOUND, ERROR_PATH_NOT_FOUND
from twisted.python.win32 import ERROR_INVALID_NAME, WindowsError
from twisted.python import log
try:
from os import stat_float_times
from os.path import getmtime as _getmtime
def getmtime(x):
sft = stat_float_times()
stat_float_times(True)
try:
return _getmtime(x)
finally:
stat_float_times(sft)
except:
from os.path import getmtime
class IPlugin(Interface):
"""Interface that must be implemented by all plugins.
Only objects which implement this interface will be considered for
return by C{getPlugins}. To be useful, plugins should also
implement some other application-specific interface.
"""
class ITestPlugin(Interface):
"""A plugin for use by the plugin system's unit tests.
Do not use this.
"""
class ITestPlugin2(Interface):
"""See L{ITestPlugin}.
"""
class CachedPlugin(object):
def __init__(self, dropin, name, description, provided):
self.dropin = dropin
self.name = name
self.description = description
self.provided = provided
self.dropin.plugins.append(self)
def __repr__(self):
return '<CachedPlugin %r/%r (provides %r)>' % (
self.name, self.dropin.moduleName,
', '.join([i.__name__ for i in self.provided]))
def load(self):
return namedAny(self.dropin.moduleName + '.' + self.name)
def __conform__(self, interface, registry=None, default=None):
for providedInterface in self.provided:
if providedInterface.isOrExtends(interface):
return self.load()
if getAdapterFactory(providedInterface, interface, None) is not None:
return interface(self.load(), default)
return default
# backwards compat HOORJ
getComponent = __conform__
class CachedDropin(object):
def __init__(self, moduleName, description):
self.moduleName = moduleName
self.description = description
self.plugins = []
def _generateCacheEntry(provider):
dropin = CachedDropin(provider.__name__,
provider.__doc__)
for k, v in provider.__dict__.iteritems():
plugin = IPlugin(v, None)
if plugin is not None:
cachedPlugin = CachedPlugin(dropin, k, v.__doc__, list(providedBy(plugin)))
return dropin
try:
fromkeys = dict.fromkeys
except AttributeError:
def fromkeys(keys, value=None):
d = {}
for k in keys:
d[k] = value
return d
_exts = fromkeys(['.py', '.so', '.pyd', '.dll'])
def getCache(module):
topcache = {}
for p in module.__path__:
dropcache = os.path.join(p, "dropin.cache")
try:
cache = pickle.load(file(dropcache))
lastCached = getmtime(dropcache)
dirtyCache = False
except:
cache = {}
lastCached = 0
dirtyCache = True
try:
dropinNames = os.listdir(p)
except WindowsError, e:
# WindowsError is an OSError subclass, so if not for this clause
# the OSError clause below would be handling these. Windows
# error codes aren't the same as POSIX error codes, so we need
# to handle them differently.
# Under Python 2.5 on Windows, WindowsError has a winerror
# attribute and an errno attribute. The winerror attribute is
# bound to the Windows error code while the errno attribute is
# bound to a translation of that code to a perhaps equivalent
# POSIX error number.
# Under Python 2.4 on Windows, WindowsError only has an errno
# attribute. It is bound to the Windows error code.
# For simplicity of code and to keep the number of paths through
# this suite minimal, we grab the Windows error code under
# either version.
# Furthermore, attempting to use os.listdir on a non-existent
# path in Python 2.4 will result in a Windows error code of
# ERROR_PATH_NOT_FOUND. However, in Python 2.5,
# ERROR_FILE_NOT_FOUND results instead. -exarkun
err = getattr(e, 'winerror', e.errno)
if err in (ERROR_PATH_NOT_FOUND, ERROR_FILE_NOT_FOUND):
continue
elif err == ERROR_INVALID_NAME:
log.msg("Invalid path %r in search path for %s" % (p, module.__name__))
continue
else:
raise
except OSError, ose:
if ose.errno not in (errno.ENOENT, errno.ENOTDIR):
raise
else:
continue
else:
pys = {}
for dropinName in dropinNames:
moduleName, moduleExt = os.path.splitext(dropinName)
if moduleName != '__init__' and moduleExt in _exts:
pyFile = os.path.join(p, dropinName)
try:
pys[moduleName] = getmtime(pyFile)
except:
log.err()
for moduleName, lastChanged in pys.iteritems():
if lastChanged >= lastCached or moduleName not in cache:
dirtyCache = True
try:
provider = namedAny(module.__name__ + '.' + moduleName)
except:
log.err()
else:
entry = _generateCacheEntry(provider)
cache[moduleName] = entry
for moduleName in cache.keys():
if moduleName not in pys:
dirtyCache = True
del cache[moduleName]
topcache.update(cache)
if dirtyCache:
newCacheData = pickle.dumps(cache, 2)
tmpCacheFile = dropcache + ".new"
try:
stage = 'opening'
f = file(tmpCacheFile, 'wb')
stage = 'writing'
f.write(newCacheData)
stage = 'closing'
f.close()
stage = 'renaming'
os.rename(tmpCacheFile, dropcache)
except (OSError, IOError), e:
# A large number of errors can occur here. There's nothing we
# can really do about any of them, but they are also non-fatal
# (they only slow us down by preventing results from being
# cached). Notify the user of the error, but proceed as if it
# had not occurred.
log.msg("Error %s plugin cache file %r (%r): %r" % (
stage, tmpCacheFile, dropcache, os.strerror(e.errno)))
return topcache
import twisted.plugins
def getPlugins(interface, package=twisted.plugins):
"""Retrieve all plugins implementing the given interface beneath the given module.
@param interface: An interface class. Only plugins which
implement this interface will be returned.
@param package: A package beneath which plugins are installed. For
most uses, the default value is correct.
@return: An iterator of plugins.
"""
allDropins = getCache(package)
for dropin in allDropins.itervalues():
for plugin in dropin.plugins:
try:
adapted = interface(plugin, None)
except:
log.err()
else:
if adapted is not None:
yield adapted
# Old, backwards compatible name. Don't use this.
getPlugIns = getPlugins
__all__ = ['getPlugins']
|
|
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import testtools
import webob.exc
from nova.api.openstack.compute.contrib import hosts as os_hosts
from nova.compute import power_state
from nova.compute import vm_states
from nova import context as context_maker
from nova import db
from nova import exception
from nova import test
from nova.tests import fake_hosts
from nova.tests import utils
def stub_service_get_all(context, disabled=None):
return fake_hosts.SERVICES_LIST
def stub_service_get_by_host_and_topic(context, host_name, topic):
for service in stub_service_get_all(context):
if service['host'] == host_name and service['topic'] == topic:
return service
def stub_set_host_enabled(context, host_name, enabled):
"""Simulates three possible behaviours for VM drivers or compute
drivers when enabling or disabling a host.
'enabled' means new instances can go to this host
'disabled' means they can't
"""
results = {True: "enabled", False: "disabled"}
if host_name == "notimplemented":
# The vm driver for this host doesn't support this feature
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
elif host_name == "host_c2":
# Simulate a failure
return results[not enabled]
else:
# Do the right thing
return results[enabled]
def stub_set_host_maintenance(context, host_name, mode):
# We'll simulate success and failure by assuming
# that 'host_c1' always succeeds, and 'host_c2'
# always fails
results = {True: "on_maintenance", False: "off_maintenance"}
if host_name == "notimplemented":
# The vm driver for this host doesn't support this feature
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
elif host_name == "host_c2":
# Simulate a failure
return results[not mode]
else:
# Do the right thing
return results[mode]
def stub_host_power_action(context, host_name, action):
if host_name == "notimplemented":
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
return action
def _create_instance(**kwargs):
"""Create a test instance."""
ctxt = context_maker.get_admin_context()
return db.instance_create(ctxt, _create_instance_dict(**kwargs))
def _create_instance_dict(**kwargs):
"""Create a dictionary for a test instance."""
inst = {}
inst['image_ref'] = 'cedef40a-ed67-4d10-800e-17455edce175'
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = kwargs.get('user_id', 'admin')
inst['project_id'] = kwargs.get('project_id', 'fake')
inst['instance_type_id'] = '1'
if 'host' in kwargs:
inst['host'] = kwargs.get('host')
inst['vcpus'] = kwargs.get('vcpus', 1)
inst['memory_mb'] = kwargs.get('memory_mb', 20)
inst['root_gb'] = kwargs.get('root_gb', 30)
inst['ephemeral_gb'] = kwargs.get('ephemeral_gb', 30)
inst['vm_state'] = kwargs.get('vm_state', vm_states.ACTIVE)
inst['power_state'] = kwargs.get('power_state', power_state.RUNNING)
inst['task_state'] = kwargs.get('task_state', None)
inst['availability_zone'] = kwargs.get('availability_zone', None)
inst['ami_launch_index'] = 0
inst['launched_on'] = kwargs.get('launched_on', 'dummy')
return inst
class FakeRequest(object):
environ = {"nova.context": context_maker.get_admin_context()}
GET = {}
class FakeRequestWithNovaZone(object):
environ = {"nova.context": context_maker.get_admin_context()}
GET = {"zone": "nova"}
class HostTestCase(test.TestCase):
"""Test Case for hosts."""
def setUp(self):
super(HostTestCase, self).setUp()
self.controller = os_hosts.HostController()
self.hosts_api = self.controller.api
self.req = FakeRequest()
# Pretend we have fake_hosts.HOST_LIST in the DB
self.stubs.Set(db, 'service_get_all',
stub_service_get_all)
# Only hosts in our fake DB exist
self.stubs.Set(db, 'service_get_by_host_and_topic',
stub_service_get_by_host_and_topic)
# 'host_c1' always succeeds, and 'host_c2'
self.stubs.Set(self.hosts_api, 'set_host_enabled',
stub_set_host_enabled)
# 'host_c1' always succeeds, and 'host_c2'
self.stubs.Set(self.hosts_api, 'set_host_maintenance',
stub_set_host_maintenance)
self.stubs.Set(self.hosts_api, 'host_power_action',
stub_host_power_action)
def _test_host_update(self, host, key, val, expected_value):
body = {key: val}
result = self.controller.update(self.req, host, body)
self.assertEqual(result[key], expected_value)
def test_list_hosts(self):
"""Verify that the compute hosts are returned."""
result = self.controller.index(self.req)
self.assertIn('hosts', result)
hosts = result['hosts']
self.assertEqual(fake_hosts.HOST_LIST, hosts)
def test_list_hosts_with_zone(self):
result = self.controller.index(FakeRequestWithNovaZone())
self.assertIn('hosts', result)
hosts = result['hosts']
self.assertEqual(fake_hosts.HOST_LIST_NOVA_ZONE, hosts)
def test_disable_host(self):
self._test_host_update('host_c1', 'status', 'disable', 'disabled')
self._test_host_update('host_c2', 'status', 'disable', 'enabled')
def test_enable_host(self):
self._test_host_update('host_c1', 'status', 'enable', 'enabled')
self._test_host_update('host_c2', 'status', 'enable', 'disabled')
def test_enable_maintenance(self):
self._test_host_update('host_c1', 'maintenance_mode',
'enable', 'on_maintenance')
def test_disable_maintenance(self):
self._test_host_update('host_c1', 'maintenance_mode',
'disable', 'off_maintenance')
def _test_host_update_notimpl(self, key, val):
def stub_service_get_all_notimpl(self, req):
return [{'host': 'notimplemented', 'topic': None,
'availability_zone': None}]
self.stubs.Set(db, 'service_get_all',
stub_service_get_all_notimpl)
body = {key: val}
self.assertRaises(webob.exc.HTTPNotImplemented,
self.controller.update,
self.req, 'notimplemented', body=body)
def test_disable_host_notimpl(self):
self._test_host_update_notimpl('status', 'disable')
def test_enable_maintenance_notimpl(self):
self._test_host_update_notimpl('maintenance_mode', 'enable')
def test_host_startup(self):
result = self.controller.startup(self.req, "host_c1")
self.assertEqual(result["power_action"], "startup")
def test_host_shutdown(self):
result = self.controller.shutdown(self.req, "host_c1")
self.assertEqual(result["power_action"], "shutdown")
def test_host_reboot(self):
result = self.controller.reboot(self.req, "host_c1")
self.assertEqual(result["power_action"], "reboot")
def _test_host_power_action_notimpl(self, method):
self.assertRaises(webob.exc.HTTPNotImplemented,
method, self.req, "notimplemented")
def test_host_startup_notimpl(self):
self._test_host_power_action_notimpl(self.controller.startup)
def test_host_shutdown_notimpl(self):
self._test_host_power_action_notimpl(self.controller.shutdown)
def test_host_reboot_notimpl(self):
self._test_host_power_action_notimpl(self.controller.reboot)
def test_host_status_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.update(self.req, dest, body={'status': 'enable'})
def test_host_maintenance_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.update(self.req, dest,
body={'maintenance_mode': 'enable'})
def test_host_power_action_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.reboot(self.req, dest)
def test_bad_status_value(self):
bad_body = {"status": "bad"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body)
bad_body2 = {"status": "disablabc"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body2)
def test_bad_update_key(self):
bad_body = {"crazy": "bad"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body)
def test_bad_update_key_and_correct_update_key(self):
bad_body = {"status": "disable", "crazy": "bad"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body)
def test_good_update_keys(self):
body = {"status": "disable", "maintenance_mode": "enable"}
result = self.controller.update(self.req, 'host_c1', body)
self.assertEqual(result["host"], "host_c1")
self.assertEqual(result["status"], "disabled")
self.assertEqual(result["maintenance_mode"], "on_maintenance")
def test_show_forbidden(self):
self.req.environ["nova.context"].is_admin = False
dest = 'dummydest'
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.show,
self.req, dest)
self.req.environ["nova.context"].is_admin = True
def test_show_host_not_exist(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.show(self.req, dest)
def _create_compute_service(self):
"""Create compute-manager(ComputeNode and Service record)."""
ctxt = self.req.environ["nova.context"]
dic = {'host': 'dummy', 'binary': 'nova-compute', 'topic': 'compute',
'report_count': 0}
s_ref = db.service_create(ctxt, dic)
dic = {'service_id': s_ref['id'],
'vcpus': 16, 'memory_mb': 32, 'local_gb': 100,
'vcpus_used': 16, 'memory_mb_used': 32, 'local_gb_used': 10,
'hypervisor_type': 'qemu', 'hypervisor_version': 12003,
'cpu_info': '', 'stats': ''}
db.compute_node_create(ctxt, dic)
return db.service_get(ctxt, s_ref['id'])
def test_show_no_project(self):
"""No instances are running on the given host."""
ctxt = context_maker.get_admin_context()
s_ref = self._create_compute_service()
result = self.controller.show(self.req, s_ref['host'])
proj = ['(total)', '(used_now)', '(used_max)']
column = ['host', 'project', 'cpu', 'memory_mb', 'disk_gb']
self.assertEqual(len(result['host']), 3)
for resource in result['host']:
self.assertIn(resource['resource']['project'], proj)
self.assertEqual(len(resource['resource']), 5)
self.assertTrue(set(resource['resource'].keys()) == set(column))
db.service_destroy(ctxt, s_ref['id'])
def test_show_works_correctly(self):
"""show() works correctly as expected."""
ctxt = context_maker.get_admin_context()
s_ref = self._create_compute_service()
i_ref1 = _create_instance(project_id='p-01', host=s_ref['host'])
i_ref2 = _create_instance(project_id='p-02', vcpus=3,
host=s_ref['host'])
result = self.controller.show(self.req, s_ref['host'])
proj = ['(total)', '(used_now)', '(used_max)', 'p-01', 'p-02']
column = ['host', 'project', 'cpu', 'memory_mb', 'disk_gb']
self.assertEqual(len(result['host']), 5)
for resource in result['host']:
self.assertIn(resource['resource']['project'], proj)
self.assertEqual(len(resource['resource']), 5)
self.assertTrue(set(resource['resource'].keys()) == set(column))
db.service_destroy(ctxt, s_ref['id'])
db.instance_destroy(ctxt, i_ref1['uuid'])
db.instance_destroy(ctxt, i_ref2['uuid'])
class HostSerializerTest(test.TestCase):
def setUp(self):
super(HostSerializerTest, self).setUp()
self.deserializer = os_hosts.HostUpdateDeserializer()
def test_index_serializer(self):
serializer = os_hosts.HostIndexTemplate()
text = serializer.serialize(fake_hosts.OS_API_HOST_LIST)
tree = etree.fromstring(text)
self.assertEqual('hosts', tree.tag)
self.assertEqual(len(fake_hosts.HOST_LIST), len(tree))
for i in range(len(fake_hosts.HOST_LIST)):
self.assertEqual('host', tree[i].tag)
self.assertEqual(fake_hosts.HOST_LIST[i]['host_name'],
tree[i].get('host_name'))
self.assertEqual(fake_hosts.HOST_LIST[i]['service'],
tree[i].get('service'))
self.assertEqual(fake_hosts.HOST_LIST[i]['zone'],
tree[i].get('zone'))
def test_update_serializer_with_status(self):
exemplar = dict(host='host_c1', status='enabled')
serializer = os_hosts.HostUpdateTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_update_serializer_with_maintenance_mode(self):
exemplar = dict(host='host_c1', maintenance_mode='enabled')
serializer = os_hosts.HostUpdateTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_update_serializer_with_maintenance_mode_and_status(self):
exemplar = dict(host='host_c1',
maintenance_mode='enabled',
status='enabled')
serializer = os_hosts.HostUpdateTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_action_serializer(self):
exemplar = dict(host='host_c1', power_action='reboot')
serializer = os_hosts.HostActionTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_update_deserializer(self):
exemplar = dict(status='enabled', maintenance_mode='disable')
intext = """<?xml version='1.0' encoding='UTF-8'?>
<updates>
<status>enabled</status>
<maintenance_mode>disable</maintenance_mode>
</updates>"""
result = self.deserializer.deserialize(intext)
self.assertEqual(dict(body=exemplar), result)
def test_corrupt_xml(self):
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
|
|
"""Dataset layout and data preparation.
Currently the following layouts are supported:
- standard
The training, validation and development data are in
train.txt, valid.txt and test.txt. All files are read
sequentially.
- lambada
Like standard, but the training data is stored in an
HDF5 file "train.h5". The training data is read randomly
by taking random spans.
TODO: Unit test SNLI data
"""
import os
import functools
import h5py
import numpy
import logging
logger = logging.getLogger()
import fuel
from fuel.transformers import (
Mapping, Batch, Padding, AgnosticSourcewiseTransformer,
FilterSources, Transformer)
from fuel.schemes import IterationScheme, ConstantScheme, ShuffledExampleScheme
from fuel.streams import DataStream
from fuel.datasets import H5PYDataset
from dictlearn.vocab import Vocabulary
from dictlearn.datasets import TextDataset, SQuADDataset, PutTextTransfomer
from dictlearn.util import str2vec
# We have to pad all the words to contain the same
# number of characters.
MAX_NUM_CHARACTERS = 100
def _str2vec(word):
return str2vec(word, MAX_NUM_CHARACTERS)
def vectorize(words):
"""Replaces words with vectors."""
return [_str2vec(word) for word in words]
def listify(example):
return tuple(list(source) for source in example)
def add_bos(bos, source_data):
return [bos] + source_data
def add_eos(eos, source_data):
source_data = list(source_data)
source_data.append(eos)
return source_data
class SourcewiseMapping(AgnosticSourcewiseTransformer):
def __init__(self, data_stream, mapping, *args, **kwargs):
kwargs.setdefault('which_sources', data_stream.sources)
super(SourcewiseMapping, self).__init__(
data_stream, data_stream.produces_examples, *args, **kwargs)
self._mapping = mapping
def transform_any_source(self, source_data, _):
return self._mapping(source_data)
class RandomSpanScheme(IterationScheme):
requests_examples = True
def __init__(self, dataset_size, span_size, seed=None):
self._dataset_size = dataset_size
self._span_size = span_size
if not seed:
seed = fuel.config.default_seed
self._rng = numpy.random.RandomState(seed)
def get_request_iterator(self):
# As for now this scheme produces an infinite stateless scheme,
# it can itself play the role of an iterator. If we want to add
# a state later, this trick will not cut it any more.
return self
def __iter__(self):
return self
def next(self):
start = self._rng.randint(0, self._dataset_size - self._span_size)
return slice(start, start + self._span_size)
class Data(object):
"""Builds the data stream for different parts of the data.
TODO: refactor, only leave the caching logic.
"""
def __init__(self, path, layout, vocab=None):
self._path = os.path.join(fuel.config.data_path[0], path)
self._layout = layout
if not self._layout in ['standard', 'lambada', 'squad', 'snli', 'mnli']:
raise "layout {} is not supported".format(self._layout)
self._vocab = vocab
self._dataset_cache = {}
@property
def vocab(self):
if not self._vocab:
logger.debug("Loading default vocab")
self._vocab = Vocabulary(
os.path.join(self._path, "vocab.txt"))
return self._vocab
def get_dataset_path(self, part):
if self._layout == 'standard':
part_map = {'train': 'train.txt',
'valid': 'valid.txt',
'test': 'test.txt',
'test_unseen': 'test_unseen.txt'}
elif self._layout == 'lambada':
part_map = {'train' : 'train.h5',
'valid' : 'lambada_development_plain_text.txt',
'test' : 'lambada_test_plain_text.txt'}
elif self._layout == 'squad':
part_map = {'train' : 'train.h5',
'dev' : 'dev.h5'}
elif self._layout == 'snli':
part_map = {'train' : 'train.h5',
'valid' : 'valid.h5',
'test': 'test.h5'}
elif self._layout == 'mnli':
part_map = {'train': 'train.h5',
'valid_matched': 'valid_matched.h5',
'valid_mismatched': 'valid_mismatched.h5'}
else:
raise NotImplementedError('Not implemented layout ' + self._layout)
return os.path.join(self._path, part_map[part])
def get_dataset(self, part, max_length=None):
if not part in self._dataset_cache:
part_path = self.get_dataset_path(part)
if self._layout == 'lambada' and part == 'train':
self._dataset_cache[part] = H5PYDataset(part_path, ('train',))
elif self._layout == 'squad':
self._dataset_cache[part] = SQuADDataset(part_path, ('all',))
elif self._layout == 'snli' or self._layout == 'mnli':
self._dataset_cache[part] = H5PYDataset(h5py.File(part_path, "r"), \
('all',), sources=('sentence1', 'sentence2', 'label',), load_in_memory=True)
else:
self._dataset_cache[part] = TextDataset(part_path, max_length)
return self._dataset_cache[part]
def get_stream(self, *args, **kwargs):
raise NotImplementedError()
class LanguageModellingData(Data):
def get_stream(self, part, batch_size=None, max_length=None, seed=None):
dataset = self.get_dataset(part, max_length)
if self._layout == 'lambada' and part == 'train':
stream = DataStream(
dataset,
iteration_scheme=RandomSpanScheme(
dataset.num_examples, max_length, seed))
stream = Mapping(stream, listify)
else:
stream = dataset.get_example_stream()
stream = SourcewiseMapping(stream, functools.partial(add_bos, Vocabulary.BOS))
stream = SourcewiseMapping(stream, vectorize)
if not batch_size:
return stream
stream = Batch(
stream,
iteration_scheme=ConstantScheme(batch_size))
stream = Padding(stream)
return stream
def select_random_answer(rng, example):
index = rng.randint(0, len(example['answer_begins']))
example['answer_begins'] = example['answer_begins'][index]
example['answer_ends'] = example['answer_ends'][index]
return example
def retrieve_and_pad_squad(retrieval, example):
contexts = example['contexts']
questions = example['questions']
text = list(contexts) + list(questions)
defs, def_mask, def_map = retrieval.retrieve_and_pad(text)
context_defs = def_map[:, 0] < len(contexts)
contexts_def_map = def_map[context_defs]
questions_def_map = (
def_map[numpy.logical_not(context_defs)]
- numpy.array([len(contexts), 0, 0]))
return {'defs': defs,
'def_mask': def_mask,
'contexts_def_map': contexts_def_map,
'questions_def_map': questions_def_map}
def retrieve_and_pad_snli(retrieval, example):
# TODO(kudkudak): We could joint retrieve retrieve_and_pad_squad and retrieve_and_pad_snli
# this will be done along lookup refactor
s1, s2, label = example
assert label.ndim == 1
text = list(s1) + list(s2)
defs, def_mask, def_map = retrieval.retrieve_and_pad(text)
context_defs = def_map[:, 0] < len(s1)
s1_def_map = def_map[context_defs]
# Def map is (batch_index, time_step, def_index)
s2_def_map = (
def_map[numpy.logical_not(context_defs)]
- numpy.array([len(s1), 0, 0]))
return [defs, def_mask, s1_def_map, s2_def_map]
def digitize(vocab, source_data):
return numpy.array([vocab.encode(words) for words in source_data])
def keep_text(example):
return {'contexts_text': vectorize(example['contexts']),
'questions_text': vectorize(example['questions'])}
class ExtractiveQAData(Data):
def __init__(self, retrieval=None, *args, **kwargs):
super(ExtractiveQAData, self).__init__(*args, **kwargs)
self._retrieval = retrieval
def get_stream(self, part, batch_size=None, shuffle=False, max_length=None,
raw_text=False, q_ids=False, seed=None, dataset=None):
if not seed:
seed = fuel.config.default_seed
rng = numpy.random.RandomState(seed)
if not dataset:
dataset = self.get_dataset(part)
if shuffle:
stream = DataStream(
dataset,
iteration_scheme=ShuffledExampleScheme(dataset.num_examples, rng=rng))
else:
stream = dataset.get_example_stream()
if not q_ids:
stream = FilterSources(stream, [source for source in dataset.sources
if source != 'q_ids'])
else:
stream = SourcewiseMapping(stream, _str2vec, which_sources=('q_ids'))
stream = PutTextTransfomer(stream, dataset, raw_text=True)
# <eos> is added for two purposes: to serve a sentinel for coattention,
# and also to ensure the answer span ends at a token
eos = self.vocab.EOS
stream = SourcewiseMapping(stream, functools.partial(add_eos, eos),
which_sources=('contexts', 'questions'))
stream = Mapping(stream, functools.partial(select_random_answer, rng),
mapping_accepts=dict)
if not batch_size:
if self._retrieval:
raise NotImplementedError()
return stream
if raw_text:
stream = Mapping(stream, keep_text, mapping_accepts=dict,
add_sources=('contexts_text', 'questions_text'))
stream = Batch(stream, iteration_scheme=ConstantScheme(batch_size))
if self._retrieval:
stream = Mapping(
stream,
functools.partial(retrieve_and_pad_squad, self._retrieval),
mapping_accepts=dict,
add_sources=('defs', 'def_mask', 'contexts_def_map', 'questions_def_map'))
stream = SourcewiseMapping(stream, functools.partial(digitize, self.vocab),
which_sources=('contexts', 'questions'))
stream = Padding(
stream, mask_sources=['contexts', 'questions'] + (['contexts_text'] if raw_text else []))
return stream
# TODO(kudkudak): Introduce this to Fuel
class FixedMapping(Transformer):
"""Applies a mapping to the data of the wrapped data stream.
Parameters
----------
data_stream : instance of :class:`DataStream`
The wrapped data stream.
mapping : callable
The mapping to be applied.
add_sources : tuple of str, optional
When given, the data produced by the mapping is added to original
data under source names `add_sources`.
"""
def __init__(self, data_stream, mapping, add_sources=None, **kwargs):
super(FixedMapping, self).__init__(
data_stream, data_stream.produces_examples, **kwargs)
self.mapping = mapping
self.add_sources = add_sources
@property
def sources(self):
return self.data_stream.sources + (self.add_sources
if self.add_sources else ())
def get_data(self, request=None):
if request is not None:
raise ValueError
data = next(self.child_epoch_iterator)
image = self.mapping(data)
if not self.add_sources:
return image
# This is the fixed line. We need to transform data to list(data) to concatenate the two!
return tuple(list(data) + image)
class SNLIData(Data):
def __init__(self, *args, **kwargs):
super(SNLIData, self).__init__(*args, **kwargs)
self._retrieval = None
def set_retrieval(self, retrieval):
self._retrieval = retrieval
def get_stream(self, part, batch_size, seed=None, raw_text=False):
d = self.get_dataset(part)
print("Dataset with {} examples".format(d.num_examples))
it = ShuffledExampleScheme(d.num_examples, rng=numpy.random.RandomState(seed))
stream = DataStream(d, iteration_scheme=it)
stream = Batch(stream, iteration_scheme=ConstantScheme(batch_size))
if self._retrieval:
stream = FixedMapping(
stream,
functools.partial(retrieve_and_pad_snli, self._retrieval),
add_sources=("defs", "def_mask", "sentence1_def_map", "sentence2_def_map")) # This is because there is bug in Fuel :( Cannot concatenate tuple and list
if not raw_text:
stream = SourcewiseMapping(stream, functools.partial(digitize, self.vocab),
which_sources=('sentence1', 'sentence2'))
stream = Padding(stream, mask_sources=('sentence1', 'sentence2')) # Increases amount of outputs by x2
return stream
|
|
import urllib.parse
import urllib.error
import time
import warnings
import pprint
from unittest import mock
import io
from openid.message import Message, OPENID_NS, OPENID2_NS, IDENTIFIER_SELECT, \
OPENID1_NS, BARE_NS
from openid import cryptutil, oidutil, kvform
from openid.store.nonce import mkNonce, split as splitNonce
from openid.discover import Service, OPENID_2_0_TYPE, \
OPENID_1_1_TYPE, OPENID_1_0_TYPE, OPENID_IDP_2_0_TYPE, DiscoveryFailure
from openid.consumer import AuthRequest, Response, \
DiffieHellmanSHA1ConsumerSession, Consumer, PlainTextConsumerSession, \
DiffieHellmanSHA256ConsumerSession, ServerError, ProtocolError, \
makeKVPost, NONCE_ARG, AuthenticationError, SetupNeeded, \
validate_fields, validate_return_to
from openid import association
from openid.dh import DiffieHellman
from openid import fetchers
from openid.store import memstore
from openid.extension import Extension
from . import support
from .support import CatchLogs, HTTPResponse
assocs = [
('another 20-byte key.', 'Snarky'),
('\x00' * 20, 'Zeros'),
]
def mkSuccess(q):
"""Convenience function to create a Response with the given
arguments, all signed."""
signed_list = ['openid.' + k for k in list(q.keys())]
return Response(Message.fromOpenIDArgs(q), signed_list)
def parseQuery(qs):
q = {}
for (k, v) in urllib.parse.parse_qsl(qs):
assert k not in q
q[k] = v
return q
def associate(qs, assoc_secret, assoc_handle):
"""Do the server's half of the associate call, using the given
secret and handle."""
q = parseQuery(qs)
assert q['openid.mode'] == 'associate'
assert q['openid.assoc_type'] == 'HMAC-SHA1'
reply_dict = {
'assoc_type': 'HMAC-SHA1',
'assoc_handle': assoc_handle,
'expires_in': '600',
}
assert q.get('openid.session_type') == 'DH-SHA1'
assert len(q) == 6 or len(q) == 4
reply_dict['session_type'] = 'DH-SHA1'
session_answer = {'dh_server_public': b'BPO9HA==', 'enc_mac_key': b'GDLS57xsQHrez41tLAxbVIrpQTk='}
reply_dict.update(session_answer)
return kvform.dictToKV(reply_dict)
GOODSIG = "[A Good Signature]"
class GoodAssociation:
expiresIn = 3600
handle = "-blah-"
def checkMessageSignature(self, message):
return message.getArg(OPENID_NS, 'sig') == GOODSIG
class GoodAssocStore(memstore.MemoryStore):
def getAssociation(self, server_url, handle=None):
return GoodAssociation()
def _nsdict(data):
default = {'openid.ns': OPENID2_NS}
default.update(data)
return default
class TestFetcher(object):
def __init__(self, user_url, user_page, xxx_todo_changeme):
(assoc_secret, assoc_handle) = xxx_todo_changeme
self.get_responses = {
user_url: user_page
}
self.assoc_secret = assoc_secret
self.assoc_handle = assoc_handle
self.num_assocs = 0
def fetch(self, url, body=None, headers=None):
if body is None:
if url in self.get_responses:
return HTTPResponse(url, 200, body=self.get_responses[url])
else:
if type(body) != bytes:
raise TypeError('POST data must by bytes')
body = body.decode('utf-8')
try:
body.index('openid.mode=associate')
except ValueError:
pass # fall through
else:
assert body.find('DH-SHA1') != -1
response = associate(
body, self.assoc_secret, self.assoc_handle)
self.num_assocs += 1
return HTTPResponse(url, 200, body=response)
raise urllib.error.HTTPError(url, 404, '', {}, io.BytesIO(b'Not found'))
def create_session(type):
"""
Create custom DH object so tests run quickly.
"""
assert type == 'DH-SHA1'
dh = DiffieHellman(100389557, 2)
return DiffieHellmanSHA1ConsumerSession(dh)
def _test_success(server_url, user_url, delegate_url, links, immediate=False):
if isinstance(server_url, bytes):
server_url = str(server_url, encoding="utf-8")
if isinstance(user_url, bytes):
user_url = str(user_url, encoding="utf-8")
if isinstance(delegate_url, bytes):
delegate_url = str(delegate_url, encoding="utf-8")
if isinstance(links, bytes):
links = str(links, encoding="utf-8")
store = memstore.MemoryStore()
if immediate:
mode = 'checkid_immediate'
else:
mode = 'checkid_setup'
endpoint = Service([OPENID_1_1_TYPE], server_url, user_url, delegate_url)
fetcher = TestFetcher(None, None, assocs[0])
@mock.patch('openid.consumer.create_session', create_session)
def run():
trust_root = str(consumer_url, encoding="utf-8")
consumer = Consumer({}, store)
request = consumer.beginWithoutDiscovery(endpoint)
return_to = str(consumer_url, encoding="utf-8")
m = request.getMessage(trust_root, return_to, immediate)
redirect_url = request.redirectURL(trust_root, return_to, immediate)
if isinstance(redirect_url, bytes):
redirect_url = str(redirect_url, encoding="utf-8")
parsed = urllib.parse.urlparse(redirect_url)
qs = parsed[4]
q = parseQuery(qs)
new_return_to = q['openid.return_to']
del q['openid.return_to']
expected = {
'openid.mode': mode,
'openid.identity': delegate_url,
'openid.trust_root': trust_root,
'openid.assoc_handle': fetcher.assoc_handle,
}
assert q == expected, pprint.pformat((q, expected))
# (q, user_url, delegate_url, mode, expected)
assert new_return_to.startswith(return_to)
assert redirect_url.startswith(server_url)
parsed = urllib.parse.urlparse(new_return_to)
query = parseQuery(parsed[4])
query.update({
'openid.mode': 'id_res',
'openid.return_to': new_return_to,
'openid.identity': delegate_url,
'openid.assoc_handle': fetcher.assoc_handle,
})
assoc = store.getAssociation(server_url, fetcher.assoc_handle)
message = Message.fromPostArgs(query)
message = assoc.signMessage(message)
response = consumer.complete(message.toPostArgs(), new_return_to)
assert response.claimed_id == user_url
with mock.patch('openid.fetchers.fetch', fetcher.fetch):
assert fetcher.num_assocs == 0
run()
assert fetcher.num_assocs == 1
# Test that doing it again uses the existing association
run()
assert fetcher.num_assocs == 1
# Another association is created if we remove the existing one
store.removeAssociation(server_url, fetcher.assoc_handle)
run()
assert fetcher.num_assocs == 2
# Test that doing it again uses the existing association
run()
assert fetcher.num_assocs == 2
import unittest
http_server_url = b'http://server.example.com/'
consumer_url = b'http://consumer.example.com/'
https_server_url = b'https://server.example.com/'
class TestSuccess(unittest.TestCase, CatchLogs):
server_url = http_server_url
user_url = b'http://www.example.com/user.html'
delegate_url = b'http://consumer.example.com/user'
def setUp(self):
CatchLogs.setUp(self)
self.links = '<link rel="openid.server" href="%s" />' % (
self.server_url,)
self.delegate_links = ('<link rel="openid.server" href="%s" />'
'<link rel="openid.delegate" href="%s" />') % (
self.server_url, self.delegate_url)
def tearDown(self):
CatchLogs.tearDown(self)
def test_nodelegate(self):
_test_success(self.server_url, self.user_url,
self.user_url, self.links)
def test_nodelegateImmediate(self):
_test_success(self.server_url, self.user_url,
self.user_url, self.links, True)
def test_delegate(self):
_test_success(self.server_url, self.user_url,
self.delegate_url, self.delegate_links)
def test_delegateImmediate(self):
_test_success(self.server_url, self.user_url,
self.delegate_url, self.delegate_links, True)
class TestSuccessHTTPS(TestSuccess):
server_url = https_server_url
class TestConstruct(unittest.TestCase):
def setUp(self):
self.store_sentinel = object()
def test_construct(self):
oidc = Consumer({}, self.store_sentinel)
self.assertTrue(oidc.store is self.store_sentinel)
def test_nostore(self):
self.assertRaises(TypeError, Consumer)
class TestIdRes(unittest.TestCase, CatchLogs):
def setUp(self):
CatchLogs.setUp(self)
self.store = memstore.MemoryStore()
self.consumer = Consumer({}, self.store)
self.return_to = 'http://unittest/complete'
self.consumer_id = "consu"
self.server_url = "serlie"
self.server_id = "sirod"
self.endpoint = Service([OPENID_1_1_TYPE], self.server_url, self.consumer_id, self.server_id)
self.consumer.session[self.consumer._token_key] = self.endpoint.__dict__
class TestIdResCheckSignature(TestIdRes):
def setUp(self):
TestIdRes.setUp(self)
self.assoc = GoodAssociation()
self.assoc.handle = "{not_dumb}"
self.store.storeAssociation(self.endpoint.server_url, self.assoc)
self.message = Message.fromPostArgs({
'openid.mode': 'id_res',
'openid.identity': '=example',
'openid.sig': GOODSIG,
'openid.assoc_handle': self.assoc.handle,
'openid.signed': 'mode,identity,assoc_handle,signed',
'frobboz': 'banzit',
})
def test_sign(self):
# assoc_handle to assoc with good sig
self.consumer._idResCheckSignature(self.message,
self.endpoint.server_url)
def test_signFailsWithBadSig(self):
self.message.setArg(OPENID_NS, 'sig', 'BAD SIGNATURE')
self.assertRaises(
AuthenticationError, self.consumer._idResCheckSignature,
self.message, self.endpoint.server_url)
@mock.patch('openid.consumer.makeKVPost', lambda *args: {})
def test_stateless(self):
# assoc_handle missing assoc, consumer._checkAuth returns goodthings
self.message.setArg(OPENID_NS, "assoc_handle", "dumbHandle")
self.consumer._processCheckAuthResponse = (
lambda response, server_url: True)
self.consumer._idResCheckSignature(self.message,
self.endpoint.server_url)
def test_statelessRaisesError(self):
# assoc_handle missing assoc, consumer._checkAuth returns goodthings
self.message.setArg(OPENID_NS, "assoc_handle", "dumbHandle")
self.consumer._checkAuth = lambda unused1, unused2: False
self.assertRaises(
AuthenticationError, self.consumer._idResCheckSignature,
self.message, self.endpoint.server_url)
@mock.patch('openid.consumer.makeKVPost', lambda *args: {})
def test_stateless_noStore(self):
# assoc_handle missing assoc, consumer._checkAuth returns goodthings
self.message.setArg(OPENID_NS, "assoc_handle", "dumbHandle")
self.consumer.store = None
self.consumer._processCheckAuthResponse = (
lambda response, server_url: True)
self.consumer._idResCheckSignature(self.message,
self.endpoint.server_url)
def test_statelessRaisesError_noStore(self):
# assoc_handle missing assoc, consumer._checkAuth returns goodthings
self.message.setArg(OPENID_NS, "assoc_handle", "dumbHandle")
self.consumer._checkAuth = lambda unused1, unused2: False
self.consumer.store = None
self.assertRaises(
AuthenticationError, self.consumer._idResCheckSignature,
self.message, self.endpoint.server_url)
class TestQueryFormat(TestIdRes):
def test_notAList(self):
# XXX: should be a Message object test, not a consumer test
# Value should be a single string. If it's a list, it should generate
# an exception.
query = {'openid.mode': ['cancel']}
try:
r = Message.fromPostArgs(query)
except TypeError as err:
self.assertTrue(str(err).find('values') != -1, err)
else:
self.fail("expected TypeError, got this instead: %s" % (r,))
class Complete(unittest.TestCase):
def setUp(self):
self.consumer = Consumer({}, memstore.MemoryStore())
self.claimed_id = 'claimed_id'
service = Service(
[OPENID_2_0_TYPE], 'http://unittest/server',
self.claimed_id, self.claimed_id
)
self.consumer.session[self.consumer._token_key] = service.__dict__
self.return_to = 'http://unittest/complete'
def test_id_res_setup_needed(self):
setup_url = 'http://unittest/setup'
query = {'openid.mode': 'id_res', 'openid.user_setup_url': setup_url}
with self.assertRaises(SetupNeeded) as cm:
self.consumer.complete(query, self.return_to)
self.assertEqual(cm.exception.response.setup_url(), setup_url)
def test_cancel(self):
query = _nsdict({'openid.mode': 'cancel'})
response = self.consumer.complete(query, self.return_to)
self.assertEqual(response.status, 'cancel')
self.assertEqual(response.identity(), self.claimed_id)
def test_error(self):
query = _nsdict({
'openid.mode': 'error',
'openid.error': 'failed',
'openid.contact': 'contact',
})
with self.assertRaises(AuthenticationError) as cm:
self.consumer.complete(query, self.return_to)
self.assertEqual(cm.exception.args[0], 'failed')
self.assertEqual(cm.exception.response.getArg(OPENID2_NS, 'contact'), 'contact')
def test_cancel(self):
query = _nsdict({'openid.mode': 'cancel'})
with self.assertRaises(AuthenticationError) as cm:
self.consumer.complete(query, self.return_to)
def test_missing_field(self):
query = _nsdict({'openid.mode': 'id_res'})
self.assertRaises(AuthenticationError,
self.consumer.complete, query, self.return_to
)
def test_no_mode(self):
self.assertRaises(AuthenticationError,
self.consumer.complete, {}, self.return_to
)
class TestCompleteMissingSig(unittest.TestCase):
def setUp(self):
self.store = GoodAssocStore()
self.consumer = Consumer({}, self.store)
self.server_url = "http://idp.unittest/"
self.return_to = 'http://unittest/complete'
claimed_id = 'bogus.claimed'
self.query = _nsdict({
'openid.mode': 'id_res',
'openid.return_to': self.return_to,
'openid.identity': claimed_id,
'openid.assoc_handle': 'does not matter',
'openid.sig': GOODSIG,
'openid.response_nonce': mkNonce(),
'openid.signed': 'identity,return_to,response_nonce,assoc_handle,claimed_id,op_endpoint',
'openid.claimed_id': claimed_id,
'openid.op_endpoint': self.server_url,
})
self.endpoint = Service([OPENID_2_0_TYPE], self.server_url, claimed_id, claimed_id)
self.consumer.session[self.consumer._token_key] = self.endpoint.__dict__
def test_idResMissingNoSigs(self):
r = self.consumer.complete(self.query, self.return_to)
self.assertTrue(r)
def test_idResNoIdentity(self):
del self.query['openid.identity']
del self.query['openid.claimed_id']
self.endpoint.claimed_id = None
self.endpoint.local_id = None
self.query['openid.signed'] = 'return_to,response_nonce,assoc_handle,op_endpoint'
response = self.consumer.complete(self.query, self.return_to)
self.assertTrue(response)
def test_idResMissingIdentitySig(self):
self.query['openid.signed'] = 'return_to,response_nonce,assoc_handle,claimed_id'
self.assertRaises(AuthenticationError,
self.consumer.complete, self.query, self.return_to
)
def test_idResMissingReturnToSig(self):
self.query['openid.signed'] = 'identity,response_nonce,assoc_handle,claimed_id'
self.assertRaises(AuthenticationError,
self.consumer.complete, self.query, self.return_to
)
def test_idResMissingAssocHandleSig(self):
self.query['openid.signed'] = 'identity,response_nonce,return_to,claimed_id'
self.assertRaises(AuthenticationError,
self.consumer.complete, self.query, self.return_to
)
def test_idResMissingClaimedIDSig(self):
self.query['openid.signed'] = 'identity,response_nonce,return_to,assoc_handle'
self.assertRaises(AuthenticationError,
self.consumer.complete, self.query, self.return_to
)
class TestCheckAuthResponse(TestIdRes, CatchLogs):
def setUp(self):
CatchLogs.setUp(self)
TestIdRes.setUp(self)
def tearDown(self):
CatchLogs.tearDown(self)
def _createAssoc(self):
issued = time.time()
lifetime = 1000
assoc = association.Association(
'handle', 'secret', issued, lifetime, 'HMAC-SHA1')
store = self.consumer.store
store.storeAssociation(self.server_url, assoc)
assoc2 = store.getAssociation(self.server_url)
self.assertEqual(assoc, assoc2)
def test_goodResponse(self):
"""successful response to check_authentication"""
response = Message.fromOpenIDArgs({'is_valid': 'true'})
r = self.consumer._processCheckAuthResponse(response, self.server_url)
self.assertTrue(r)
def test_missingAnswer(self):
"""check_authentication returns false when server sends no answer"""
response = Message.fromOpenIDArgs({})
r = self.consumer._processCheckAuthResponse(response, self.server_url)
self.assertFalse(r)
def test_badResponse(self):
"""check_authentication returns false when is_valid is false"""
response = Message.fromOpenIDArgs({'is_valid': 'false'})
r = self.consumer._processCheckAuthResponse(response, self.server_url)
self.assertFalse(r)
def test_badResponseInvalidate(self):
"""Make sure that the handle is invalidated when is_valid is false
From "Verifying directly with the OpenID Provider"::
If the OP responds with "is_valid" set to "true", and
"invalidate_handle" is present, the Relying Party SHOULD
NOT send further authentication requests with that handle.
"""
self._createAssoc()
response = Message.fromOpenIDArgs({
'is_valid': 'false',
'invalidate_handle': 'handle',
})
r = self.consumer._processCheckAuthResponse(response, self.server_url)
self.assertFalse(r)
self.assertTrue(
self.consumer.store.getAssociation(self.server_url) is None)
def test_invalidateMissing(self):
"""invalidate_handle with a handle that is not present"""
response = Message.fromOpenIDArgs({
'is_valid': 'true',
'invalidate_handle': 'missing',
})
r = self.consumer._processCheckAuthResponse(response, self.server_url)
self.assertTrue(r)
self.failUnlessLogMatches(
'Received "invalidate_handle"'
)
def test_invalidateMissing_noStore(self):
"""invalidate_handle with a handle that is not present"""
response = Message.fromOpenIDArgs({
'is_valid': 'true',
'invalidate_handle': 'missing',
})
self.consumer.store = None
r = self.consumer._processCheckAuthResponse(response, self.server_url)
self.assertTrue(r)
self.failUnlessLogMatches(
'Received "invalidate_handle"',
'Unexpectedly got invalidate_handle without a store')
def test_invalidatePresent(self):
"""invalidate_handle with a handle that exists
From "Verifying directly with the OpenID Provider"::
If the OP responds with "is_valid" set to "true", and
"invalidate_handle" is present, the Relying Party SHOULD
NOT send further authentication requests with that handle.
"""
self._createAssoc()
response = Message.fromOpenIDArgs({
'is_valid': 'true',
'invalidate_handle': 'handle',
})
r = self.consumer._processCheckAuthResponse(response, self.server_url)
self.assertTrue(r)
self.assertTrue(
self.consumer.store.getAssociation(self.server_url) is None)
class TestSetupNeeded(TestIdRes):
def failUnlessSetupNeeded(self, expected_setup_url, message):
setup_url = message.setup_url()
self.assertEqual(expected_setup_url, setup_url)
def test_setupNeededOpenID1(self):
"""The minimum conditions necessary to trigger Setup Needed"""
setup_url = 'http://unittest/setup-here'
message = Message.fromPostArgs({
'openid.mode': 'id_res',
'openid.user_setup_url': setup_url,
})
self.assertTrue(message.isOpenID1())
self.failUnlessSetupNeeded(setup_url, message)
def test_setupNeededOpenID1_extra(self):
"""Extra stuff along with setup_url still trigger Setup Needed"""
setup_url = 'http://unittest/setup-here'
message = Message.fromPostArgs({
'openid.mode': 'id_res',
'openid.user_setup_url': setup_url,
'openid.identity': 'bogus',
})
self.assertTrue(message.isOpenID1())
self.failUnlessSetupNeeded(setup_url, message)
def test_noSetupNeededOpenID1(self):
"""When the user_setup_url is missing on an OpenID 1 message,
we assume that it's not a cancel response to checkid_immediate"""
message = Message.fromOpenIDArgs({'mode': 'id_res'})
self.assertTrue(message.isOpenID1())
self.assertIsNone(message.setup_url())
def test_setupNeededOpenID2(self):
query = _nsdict({'openid.mode': 'setup_needed'})
self.assertRaises(SetupNeeded,
self.consumer.complete, query, None
)
def test_setupNeededDoesntWorkForOpenID1(self):
query = {'openid.mode': 'setup_needed'}
self.assertIsNone(Message.fromPostArgs(query).setup_url())
self.assertRaisesRegex(AuthenticationError, 'Mode missing or invalid',
self.consumer.complete, query, None,
)
def test_noSetupNeededOpenID2(self):
message = Message.fromOpenIDArgs({
'mode': 'id_res',
'game': 'puerto_rico',
'ns': OPENID2_NS,
})
self.assertTrue(message.isOpenID2())
self.assertIsNone(message.setup_url())
class FieldValidation(unittest.TestCase):
def mkSuccessTest(openid_args, signed_list):
def test(self):
message = Message.fromOpenIDArgs(openid_args)
message.setArg(OPENID_NS, 'signed', ','.join(signed_list))
self.assertFalse(validate_fields(message))
return test
test_openid1Success = mkSuccessTest(
{'return_to': 'return',
'assoc_handle': 'assoc handle',
'sig': 'a signature',
'identity': 'someone',
},
['return_to', 'identity'])
test_openid2Success = mkSuccessTest(
{'ns': OPENID2_NS,
'return_to': 'return',
'assoc_handle': 'assoc handle',
'sig': 'a signature',
'op_endpoint': 'my favourite server',
'response_nonce': 'use only once',
},
['return_to', 'response_nonce', 'assoc_handle', 'op_endpoint'])
test_openid2Success_identifiers = mkSuccessTest(
{'ns': OPENID2_NS,
'return_to': 'return',
'assoc_handle': 'assoc handle',
'sig': 'a siggnature',
'claimed_id': 'i claim to be me',
'identity': 'my server knows me as me',
'op_endpoint': 'my favourite server',
'response_nonce': 'use only once',
},
['return_to', 'response_nonce', 'identity',
'claimed_id', 'assoc_handle', 'op_endpoint'])
def mkValidationTest(openid_args):
def test(self):
message = Message.fromOpenIDArgs(openid_args)
self.assertRaises(AuthenticationError, validate_fields, message)
return test
test_openid1Missing_returnToSig = mkValidationTest(
{'return_to': 'return',
'assoc_handle': 'assoc handle',
'sig': 'a signature',
'identity': 'someone',
'signed': 'identity',
})
test_openid1Missing_identitySig = mkValidationTest(
{'return_to': 'return',
'assoc_handle': 'assoc handle',
'sig': 'a signature',
'identity': 'someone',
'signed': 'return_to'
})
test_openid2Missing_opEndpointSig = mkValidationTest(
{'ns': OPENID2_NS,
'return_to': 'return',
'assoc_handle': 'assoc handle',
'sig': 'a signature',
'identity': 'someone',
'op_endpoint': 'the endpoint',
'signed': 'return_to,identity,assoc_handle'
})
test_openid1MissingReturnTo = mkValidationTest(
{'assoc_handle': 'assoc handle',
'sig': 'a signature',
'identity': 'someone',
})
test_openid1MissingAssocHandle = mkValidationTest(
{'return_to': 'return',
'sig': 'a signature',
'identity': 'someone',
})
# XXX: I could go on...
class CheckAuthHappened(Exception):
pass
class CheckNonceVerifyTest(TestIdRes, CatchLogs):
def setUp(self):
CatchLogs.setUp(self)
TestIdRes.setUp(self)
def tearDown(self):
CatchLogs.tearDown(self)
def test_openid1Success(self):
"""use consumer-generated nonce"""
nonce_value = mkNonce()
query = urllib.parse.urlencode({NONCE_ARG: nonce_value})
self.return_to = 'http://rt.unittest/?' + query
self.response = Message.fromOpenIDArgs({'return_to': self.return_to})
self.response.setArg(BARE_NS, NONCE_ARG, nonce_value)
self.consumer._idResCheckNonce(self.response, self.endpoint)
self.failUnlessLogEmpty()
def test_consumerNonceOpenID2(self):
"""OpenID 2 does not use consumer-generated nonce"""
self.return_to = 'http://rt.unittest/?nonce=%s' % (mkNonce(),)
self.response = Message.fromOpenIDArgs(
{'return_to': self.return_to, 'ns': OPENID2_NS})
self.assertRaises(AuthenticationError,
self.consumer._idResCheckNonce, self.response, self.endpoint
)
self.failUnlessLogEmpty()
def test_serverNonce(self):
"""use server-generated nonce"""
self.response = Message.fromOpenIDArgs(
{'ns': OPENID2_NS, 'response_nonce': mkNonce()})
self.consumer._idResCheckNonce(self.response, self.endpoint)
self.failUnlessLogEmpty()
def test_serverNonceOpenID1(self):
"""OpenID 1 does not use server-generated nonce"""
self.response = Message.fromOpenIDArgs(
{'ns': OPENID1_NS,
'return_to': 'http://return.to/',
'response_nonce': mkNonce()})
self.assertRaises(AuthenticationError,
self.consumer._idResCheckNonce, self.response, self.endpoint
)
self.failUnlessLogEmpty()
def test_badNonce(self):
"""remove the nonce from the store
From "Checking the Nonce"::
When the Relying Party checks the signature on an assertion, the
Relying Party SHOULD ensure that an assertion has not yet
been accepted with the same value for "openid.response_nonce"
from the same OP Endpoint URL.
"""
nonce = mkNonce()
stamp, salt = splitNonce(nonce)
self.store.useNonce(self.server_url, stamp, salt)
self.response = Message.fromOpenIDArgs(
{'response_nonce': nonce,
'ns': OPENID2_NS,
})
self.assertRaises(AuthenticationError,
self.consumer._idResCheckNonce, self.response, self.endpoint
)
def test_successWithNoStore(self):
"""When there is no store, checking the nonce succeeds"""
self.consumer.store = None
self.response = Message.fromOpenIDArgs(
{'response_nonce': mkNonce(),
'ns': OPENID2_NS,
})
self.consumer._idResCheckNonce(self.response, self.endpoint)
self.failUnlessLogEmpty()
def test_tamperedNonce(self):
"""Malformed nonce"""
self.response = Message.fromOpenIDArgs(
{'ns': OPENID2_NS,
'response_nonce': 'malformed'})
self.assertRaises(AuthenticationError,
self.consumer._idResCheckNonce, self.response, self.endpoint
)
def test_missingNonce(self):
"""no nonce parameter on the return_to"""
self.response = Message.fromOpenIDArgs(
{'return_to': self.return_to})
self.assertRaises(AuthenticationError,
self.consumer._idResCheckNonce, self.response, self.endpoint
)
@mock.patch.object(Consumer, '_idResCheckNonce', mock.Mock(return_value=True))
@mock.patch.object(Consumer, '_checkAuth', mock.Mock(side_effect=CheckAuthHappened))
class TestCheckAuthTriggered(TestIdRes, CatchLogs):
def setUp(self):
TestIdRes.setUp(self)
CatchLogs.setUp(self)
def test_checkAuthTriggered(self):
query = {
'openid.mode': 'id_res',
'openid.return_to': self.return_to,
'openid.identity': self.server_id,
'openid.assoc_handle': 'not_found',
'openid.sig': GOODSIG,
'openid.signed': 'identity,return_to',
}
try:
result = self.consumer.complete(query, self.return_to)
except CheckAuthHappened:
pass
else:
self.fail('_checkAuth did not happen. Result was: %r %s' %
(result, self.messages))
def test_checkAuthTriggeredWithAssoc(self):
# Store an association for this server that does not match the
# handle that is in the message
issued = time.time()
lifetime = 1000
assoc = association.Association(
'handle', 'secret', issued, lifetime, 'HMAC-SHA1')
self.store.storeAssociation(self.server_url, assoc)
query = {
'openid.mode': 'id_res',
'openid.return_to': self.return_to,
'openid.identity': self.server_id,
'openid.assoc_handle': 'not_found',
'openid.sig': GOODSIG,
'openid.signed': 'identity,return_to',
}
try:
result = self.consumer.complete(query, self.return_to)
except CheckAuthHappened:
pass
else:
self.fail('_checkAuth did not happen. Result was: %r' % (result,))
def test_expiredAssoc(self):
# Store an expired association for the server with the handle
# that is in the message
issued = time.time() - 10
lifetime = 0
handle = 'handle'
assoc = association.Association(
handle, 'secret', issued, lifetime, 'HMAC-SHA1')
self.assertTrue(assoc.expiresIn <= 0)
self.store.storeAssociation(self.server_url, assoc)
message = Message.fromPostArgs({
'openid.mode': 'id_res',
'openid.return_to': self.return_to,
'openid.identity': self.server_id,
'openid.assoc_handle': handle,
'openid.sig': GOODSIG,
'openid.signed': 'identity,return_to',
})
self.assertRaises(
AuthenticationError,
self.consumer._idResCheckSignature, message, self.endpoint.server_url,
)
def test_newerAssoc(self):
lifetime = 1000
good_issued = time.time() - 10
good_handle = 'handle'
good_assoc = association.Association(
good_handle, 'secret', good_issued, lifetime, 'HMAC-SHA1')
self.store.storeAssociation(self.server_url, good_assoc)
bad_issued = time.time() - 5
bad_handle = 'handle2'
bad_assoc = association.Association(
bad_handle, 'secret', bad_issued, lifetime, 'HMAC-SHA1')
self.store.storeAssociation(self.server_url, bad_assoc)
message = Message.fromOpenIDArgs({
'mode': 'id_res',
'return_to': self.return_to,
'identity': self.server_id,
'assoc_handle': good_handle,
})
message = good_assoc.signMessage(message)
response = self.consumer.complete(message.toPostArgs(), self.return_to)
self.assertEqual(response.claimed_id, self.consumer_id)
class ReturnTo(unittest.TestCase):
'''
Verifying the Return URL paramaters.
From the specification "Verifying the Return URL"::
To verify that the "openid.return_to" URL matches the URL that is
processing this_checkReturnTo assertion:
- The URL scheme, authority, and path MUST be the same between the
two URLs.
- Any query parameters that are present in the "openid.return_to"
URL MUST also be present with the same values in the
accepting URL.
'''
def test_missing(self):
message = Message.fromPostArgs({'openid.mode': 'id_res'})
self.assertRaises(AuthenticationError,validate_return_to, message, 'http://example.com/')
def test_bad_url(self):
message = Message.fromPostArgs({'openid.return_to': 'http://unittest/complete'})
self.assertRaises(AuthenticationError,validate_return_to, message, 'http://fraud/complete')
self.assertRaises(AuthenticationError,validate_return_to, message, 'http://unittest/complete/')
self.assertRaises(AuthenticationError,validate_return_to, message, 'https://unittest/complete')
def test_good_args(self):
message = Message.fromPostArgs({
'openid.return_to': 'http://example.com/?foo=bar',
'foo': 'bar',
'stray': 'value', # unknown values are okay
})
self.assertIsNone(validate_return_to(message, 'http://example.com/'))
def test_bad_args(self):
message = Message.fromPostArgs({
'openid.mode': 'id_res',
'openid.return_to': 'http://example.com/?foo=bar&xxx=yyy',
'xxx': 'not yyy',
})
with self.assertRaises(AuthenticationError) as cm:
validate_return_to(message, 'http://example.com/')
self.assertTrue('foo, xxx' in str(cm.exception))
class MockFetcher(object):
def __init__(self, response=None):
self.response = response
self.fetches = []
def fetch(self, url, body=None, headers=None):
self.fetches.append((url, body, headers))
return self.response
class ExceptionRaisingMockFetcher(object):
class MyException(Exception):
pass
def fetch(self, url, body=None, headers=None):
raise self.MyException('mock fetcher exception')
class TestCheckAuth(unittest.TestCase, CatchLogs):
def setUp(self):
CatchLogs.setUp(self)
self.store = memstore.MemoryStore()
self.consumer = Consumer({}, self.store)
self._original = fetchers.fetch
self.fetcher = MockFetcher()
fetchers.fetch = self.fetcher.fetch
def tearDown(self):
CatchLogs.tearDown(self)
fetchers.fetch = self._original
def test_error(self):
self.fetcher.response = HTTPResponse(
'http://some_url', 404, {'Hea': 'der'}, b'blah:blah\n')
query = {'openid.signed': 'stuff',
'openid.stuff': 'a value'}
r = self.consumer._checkAuth(Message.fromPostArgs(query),
http_server_url)
self.assertFalse(r)
self.assertTrue(self.messages)
def test_bad_args(self):
query = {
'openid.signed': 'foo',
'closid.foo': 'something',
}
consumer = Consumer({}, self.store)
consumer._checkAuth(Message.fromPostArgs(query), 'does://not.matter')
def test_signedList(self):
query = Message.fromOpenIDArgs({
'mode': 'id_res',
'sig': 'rabbits',
'identity': '=example',
'assoc_handle': 'munchkins',
'ns.sreg': 'urn:sreg',
'sreg.email': 'bogus@example.com',
'signed': 'identity,mode,ns.sreg,sreg.email',
'foo': 'bar',
})
args = self.consumer._createCheckAuthRequest(query)
self.assertTrue(args.isOpenID1())
for signed_arg in query.getArg(OPENID_NS, 'signed').split(','):
self.assertTrue(args.getAliasedArg(signed_arg), signed_arg)
def test_112(self):
args = {
'openid.assoc_handle': 'fa1f5ff0-cde4-11dc-a183-3714bfd55ca8',
'openid.claimed_id': 'http://binkley.lan/user/test01',
'openid.identity': 'http://test01.binkley.lan/',
'openid.mode': 'id_res',
'openid.ns': 'http://specs.openid.net/auth/2.0',
'openid.ns.pape': 'http://specs.openid.net/extensions/pape/1.0',
'openid.op_endpoint': 'http://binkley.lan/server',
'openid.pape.auth_policies': 'none',
'openid.pape.auth_time': '2008-01-28T20:42:36Z',
'openid.pape.nist_auth_level': '0',
'openid.response_nonce': '2008-01-28T21:07:04Z99Q=',
'openid.return_to': 'http://binkley.lan:8001/process?janrain_nonce=2008-01-28T21%3A07%3A02Z0tMIKx',
'openid.sig': 'YJlWH4U6SroB1HoPkmEKx9AyGGg=',
'openid.signed': 'assoc_handle,identity,response_nonce,return_to,claimed_id,op_endpoint,pape.auth_time,ns.pape,pape.nist_auth_level,pape.auth_policies'
}
self.assertEqual(OPENID2_NS, args['openid.ns'])
incoming = Message.fromPostArgs(args)
self.assertTrue(incoming.isOpenID2())
car = self.consumer._createCheckAuthRequest(incoming)
expected_args = args.copy()
expected_args['openid.mode'] = 'check_authentication'
expected = Message.fromPostArgs(expected_args)
self.assertTrue(expected.isOpenID2())
self.assertEqual(expected, car)
car_args = car.toPostArgs()
self.assertEqual(set(expected_args.keys()), set(car_args.keys()))
self.assertEqual(set(expected_args.values()), set(car_args.values()))
self.assertEqual(expected_args, car.toPostArgs())
class TestFetchAssoc(unittest.TestCase, CatchLogs):
def setUp(self):
CatchLogs.setUp(self)
self.store = memstore.MemoryStore()
self._original = fetchers.fetch
self.fetcher = MockFetcher()
fetchers.fetch = self.fetcher.fetch
self.consumer = Consumer({}, self.store)
def tearDown(self):
fetchers.fetch = self._original
def test_error_exception_unwrapped(self):
"""Ensure that exceptions are bubbled through from fetchers
when making associations
"""
self.fetcher = ExceptionRaisingMockFetcher()
with mock.patch('openid.fetchers.fetch', self.fetcher.fetch):
self.assertRaises(self.fetcher.MyException,
makeKVPost,
Message.fromPostArgs({'mode': 'associate'}),
"http://server_url")
# exception fetching returns no association
e = Service()
e.server_url = 'some://url'
self.assertRaises(self.fetcher.MyException,
self.consumer._getAssociation, e)
self.assertRaises(self.fetcher.MyException,
self.consumer._checkAuth,
Message.fromPostArgs({'openid.signed': ''}),
'some://url')
class TestResponse(unittest.TestCase):
def setUp(self):
self.endpoint = Service()
self.endpoint.claimed_id = 'identity_url'
def test_extensionResponse(self):
resp = mkSuccess({
'ns.sreg': 'urn:sreg',
'ns.unittest': 'urn:unittest',
'unittest.one': '1',
'unittest.two': '2',
'sreg.nickname': 'j3h',
'return_to': 'return_to',
})
utargs = resp.extensionResponse('urn:unittest', False)
self.assertEqual(utargs, {'one': '1', 'two': '2'})
sregargs = resp.extensionResponse('urn:sreg', False)
self.assertEqual(sregargs, {'nickname': 'j3h'})
def test_extensionResponseSigned(self):
args = {
'ns.sreg': 'urn:sreg',
'ns.unittest': 'urn:unittest',
'unittest.one': '1',
'unittest.two': '2',
'sreg.nickname': 'j3h',
'sreg.dob': 'yesterday',
'return_to': 'return_to',
'signed': 'sreg.nickname,unittest.one,sreg.dob',
}
signed_list = ['openid.sreg.nickname',
'openid.unittest.one',
'openid.sreg.dob']
# Don't use mkSuccess because it creates an all-inclusive
# signed list.
msg = Message.fromOpenIDArgs(args)
resp = Response(msg, signed_list)
# All args in this NS are signed, so expect all.
sregargs = resp.extensionResponse('urn:sreg', True)
self.assertEqual(sregargs, {
'nickname': 'j3h',
'dob': 'yesterday'
})
# Not all args in this NS are signed, so expect None when
# asking for them.
utargs = resp.extensionResponse('urn:unittest', True)
self.assertEqual(utargs, None)
def test_noReturnTo(self):
resp = mkSuccess({})
self.assertTrue(resp.getReturnTo() is None)
def test_returnTo(self):
resp = mkSuccess({'return_to': 'return_to'})
self.assertEqual(resp.getReturnTo(), 'return_to')
def _beginWithoutDiscovery(self, service, anonymous=False):
request = AuthRequest(service, None)
self.endpoint = service
self.session[self._token_key] = request.endpoint.__dict__
try:
request.setAnonymous(anonymous)
except ValueError as why:
raise ProtocolError(str(why))
return request
@mock.patch.object(Consumer, 'beginWithoutDiscovery', _beginWithoutDiscovery)
class ConsumerTest(unittest.TestCase):
def setUp(self):
self.identity = 'http://identity.url/'
self.endpoint = Service([], '', self.identity)
self.store = None
self.session = {}
self.consumer = Consumer(self.session, self.store)
def test_setAssociationPreference(self):
self.consumer.setAssociationPreference([])
self.assertTrue(isinstance(self.consumer.negotiator,
association.SessionNegotiator))
self.assertEqual([],
self.consumer.negotiator.allowed_types)
self.consumer.setAssociationPreference([('HMAC-SHA1', 'DH-SHA1')])
self.assertEqual([('HMAC-SHA1', 'DH-SHA1')],
self.consumer.negotiator.allowed_types)
def test_beginWithoutDiscovery(self):
# Does this really test anything non-trivial?
result = self.consumer.beginWithoutDiscovery(self.endpoint)
# The result is an auth request
self.assertTrue(isinstance(result, AuthRequest))
# Side-effect of calling beginWithoutDiscovery is setting the
# session value to the endpoint attribute of the result
self.assertTrue(
self.session[self.consumer._token_key] is result.endpoint.__dict__)
# The endpoint that we passed in is the endpoint on the auth_request
self.assertTrue(result.endpoint is self.endpoint)
class Cleanup(unittest.TestCase):
def setUp(self):
self.claimed_id = 'http://unittest/identity'
self.session = {}
self.consumer = Consumer(self.session, GoodAssocStore())
self.consumer.session[self.consumer._token_key] = Service([OPENID_1_1_TYPE], '', self.claimed_id).__dict__
self.return_to = 'http://unittest/complete'
def test_success_session(self):
nonce = mkNonce()
query = {
'openid.mode': 'id_res',
'openid.return_to': self.return_to + '?' + urllib.parse.urlencode({NONCE_ARG: nonce}),
'openid.identity': self.claimed_id,
NONCE_ARG: nonce,
'openid.assoc_handle': 'z',
'openid.signed': 'identity,return_to',
'openid.sig': GOODSIG,
}
self.consumer.complete(query, self.return_to)
self.assertFalse(self.consumer._token_key in self.session)
def test_failure_session(self):
self.assertRaises(AuthenticationError,
self.consumer.complete, {}, self.return_to
)
self.assertFalse(self.consumer._token_key in self.session)
@mock.patch('urllib.request.urlopen', support.urlopen)
class IDPDrivenTest(unittest.TestCase):
def setUp(self):
self.store = GoodAssocStore()
self.consumer = Consumer({}, self.store)
self.endpoint = Service([OPENID_IDP_2_0_TYPE], 'http://unittest/')
self.consumer.session[self.consumer._token_key] = self.endpoint.__dict__
self.return_to = 'http://unittest/complete'
self.query = _nsdict({
'openid.mode': 'id_res',
'openid.return_to': self.return_to,
'openid.op_endpoint': 'http://www.myopenid.com/server',
'openid.claimed_id': 'http://unittest/openid2_xrds.xrds',
'openid.identity': 'http://smoker.myopenid.com/',
'openid.response_nonce': mkNonce(),
'openid.assoc_handle': 'z',
'openid.signed': 'return_to,identity,response_nonce,claimed_id,assoc_handle,op_endpoint',
'openid.sig': GOODSIG,
})
def test_idpDrivenBegin(self):
# Testing here that the token-handling doesn't explode...
self.consumer.beginWithoutDiscovery(self.endpoint)
def test_idpDrivenComplete(self):
response = self.consumer.complete(self.query, self.return_to)
self.assertTrue(response)
def test_idpDrivenCompleteFraud(self):
self.query['openid.claimed_id'] = 'http://unittest/openid2_xrds_no_local_id.xrds'
self.assertRaises(AuthenticationError,
self.consumer.complete, self.query, self.return_to
)
@mock.patch('urllib.request.urlopen', support.urlopen)
class DiscoveryVerification(unittest.TestCase):
def setUp(self):
self.consumer = Consumer({}, None)
self.identifier = 'http://unittest/openid2_xrds.xrds'
self.local_id = 'http://smoker.myopenid.com/'
self.server_url = 'http://www.myopenid.com/server'
self.message1 = Message.fromPostArgs({
'openid.ns': OPENID1_NS,
'openid.identity': self.local_id,
})
self.message2 = Message.fromPostArgs({
'openid.ns': OPENID2_NS,
'openid.op_endpoint': self.server_url,
'openid.claimed_id': self.identifier,
'openid.identity': self.local_id,
})
self.endpoint = Service(
[OPENID_2_0_TYPE],
self.server_url,
self.identifier,
self.local_id,
)
def test_prediscovered_match(self):
self.assertFalse(self.consumer._verify_openid2(self.message2, self.endpoint))
def test_openid1_prediscovered_match(self):
self.endpoint.types = [OPENID_1_1_TYPE]
self.assertFalse(self.consumer._verify_openid1(self.message1, self.endpoint))
def test_fragment(self):
claimed_id = self.identifier + '#fragment'
self.message2.setArg(OPENID2_NS, 'claimed_id', claimed_id)
self.assertFalse(self.consumer._verify_openid2(self.message2, self.endpoint))
def test_prediscovered_wrong_type(self):
self.assertRaises(
AuthenticationError,
self.consumer._verify_openid1, self.message1, self.endpoint
)
def test_openid1_no_endpoint(self):
self.assertRaises(
AuthenticationError,
self.consumer._verify_openid1, self.message1, None
)
def test_openid2_claimed_id_local_id(self):
variants = [
{
'openid.op_endpoint': self.server_url,
'openid.identity': self.identifier,
},
{
'openid.op_endpoint': self.server_url,
'openid.claimed_id': self.identifier,
},
]
for q in variants:
self.assertRaises(AuthenticationError,
self.consumer._verify_openid2,
Message.fromPostArgs(_nsdict(q)),
self.endpoint,
)
def test_openid2_no_claimed_id(self):
endpoint = Service([OPENID_IDP_2_0_TYPE], self.server_url)
self.message2.delArg(OPENID2_NS, 'claimed_id')
self.message2.delArg(OPENID2_NS, 'identity')
self.assertFalse(self.consumer._verify_openid2(self.message2, endpoint))
def test_wrong_info(self):
endpoints = [
Service([OPENID_2_0_TYPE], 'wrong', self.identifier, self.local_id),
Service([OPENID_2_0_TYPE], self.server_url, self.identifier, 'wrong'),
]
for endpoint in endpoints:
self.assertRaises(AuthenticationError,
self.consumer._verify_openid2, self.message2, endpoint
)
def test_rediscover(self):
with mock.patch('openid.discover.discover') as discover:
discover.return_value = self.endpoint
self.consumer._verify_openid2(self.message2, None)
discover.assert_called_once_with(self.identifier)
discover.reset_mock()
self.consumer._verify_openid2(self.message2, self.endpoint)
self.assertFalse(discover.call_count)
class TestCreateAssociationRequest(unittest.TestCase):
def setUp(self):
class DummyEndpoint(object):
use_compatibility = False
def compat_mode(self):
return self.use_compatibility
self.endpoint = DummyEndpoint()
self.consumer = Consumer({}, store=None)
self.assoc_type = 'HMAC-SHA1'
def test_noEncryptionSendsType(self):
session_type = 'no-encryption'
session, args = self.consumer._createAssociateRequest(
self.endpoint, self.assoc_type, session_type)
self.assertTrue(isinstance(session, PlainTextConsumerSession))
expected = Message.fromOpenIDArgs(
{'ns': OPENID2_NS,
'session_type': session_type,
'mode': 'associate',
'assoc_type': self.assoc_type,
})
self.assertEqual(expected, args)
def test_noEncryptionCompatibility(self):
self.endpoint.use_compatibility = True
session_type = 'no-encryption'
session, args = self.consumer._createAssociateRequest(
self.endpoint, self.assoc_type, session_type)
self.assertTrue(isinstance(session, PlainTextConsumerSession))
self.assertEqual(Message.fromOpenIDArgs({
'mode': 'associate',
'assoc_type': self.assoc_type,
}), args)
@mock.patch('openid.consumer.create_session', create_session)
def test_dhSHA1Compatibility(self):
self.endpoint.use_compatibility = True
session_type = 'DH-SHA1'
session, args = self.consumer._createAssociateRequest(
self.endpoint, self.assoc_type, session_type)
self.assertTrue(isinstance(session, DiffieHellmanSHA1ConsumerSession))
# This is a random base-64 value, so just check that it's
# present.
self.assertTrue(args.getArg(OPENID1_NS, 'dh_consumer_public'))
args.delArg(OPENID1_NS, 'dh_consumer_public')
# OK, session_type is set here and not for no-encryption
# compatibility
expected = Message.fromOpenIDArgs({
'mode': 'associate',
'session_type': 'DH-SHA1',
'assoc_type': self.assoc_type,
# DH does byte-manipulation and returns bytes
'dh_modulus': b'BfvStQ==',
'dh_gen': b'Ag==',
})
self.assertEqual(expected, args)
# XXX: test the other types
class _TestingDiffieHellmanResponseParameters(object):
session_cls = None
message_namespace = None
def setUp(self):
# Pre-compute DH with small prime so tests run quickly.
self.server_dh = DiffieHellman(100389557, 2)
self.consumer_dh = DiffieHellman(100389557, 2)
# base64(btwoc(g ^ xb mod p))
self.dh_server_public = cryptutil.longToBase64(self.server_dh.public)
self.secret = cryptutil.randomString(self.session_cls.secret_size)
self.enc_mac_key = oidutil.toBase64(
self.server_dh.xorSecret(self.consumer_dh.public,
self.secret,
self.session_cls.hash_func))
self.consumer_session = self.session_cls(self.consumer_dh)
self.msg = Message(self.message_namespace)
def testExtractSecret(self):
self.msg.setArg(OPENID_NS, 'dh_server_public', self.dh_server_public)
self.msg.setArg(OPENID_NS, 'enc_mac_key', self.enc_mac_key)
extracted = self.consumer_session.extractSecret(self.msg)
self.assertEqual(extracted, self.secret)
def testAbsentServerPublic(self):
self.msg.setArg(OPENID_NS, 'enc_mac_key', self.enc_mac_key)
self.assertRaises(KeyError, self.consumer_session.extractSecret,
self.msg)
def testAbsentMacKey(self):
self.msg.setArg(OPENID_NS, 'dh_server_public', self.dh_server_public)
self.assertRaises(KeyError, self.consumer_session.extractSecret,
self.msg)
def testInvalidBase64Public(self):
self.msg.setArg(OPENID_NS, 'dh_server_public', 'n o t b a s e 6 4.')
self.msg.setArg(OPENID_NS, 'enc_mac_key', self.enc_mac_key)
self.assertRaises(ValueError,
self.consumer_session.extractSecret,
self.msg)
def testInvalidBase64MacKey(self):
self.msg.setArg(OPENID_NS, 'dh_server_public', self.dh_server_public)
self.msg.setArg(OPENID_NS, 'enc_mac_key', 'n o t base 64')
self.assertRaises(ValueError,
self.consumer_session.extractSecret,
self.msg)
class TestOpenID1SHA1(_TestingDiffieHellmanResponseParameters,
unittest.TestCase):
session_cls = DiffieHellmanSHA1ConsumerSession
message_namespace = OPENID1_NS
class TestOpenID2SHA1(_TestingDiffieHellmanResponseParameters,
unittest.TestCase):
session_cls = DiffieHellmanSHA1ConsumerSession
message_namespace = OPENID2_NS
if cryptutil.SHA256_AVAILABLE:
class TestOpenID2SHA256(_TestingDiffieHellmanResponseParameters,
unittest.TestCase):
session_cls = DiffieHellmanSHA256ConsumerSession
message_namespace = OPENID2_NS
else:
warnings.warn("Not running SHA256 association session tests.")
class TestNoStore(unittest.TestCase):
def setUp(self):
self.consumer = Consumer({}, None)
def test_completeNoGetAssoc(self):
"""_getAssociation is never called when the store is None"""
def notCalled(unused):
self.fail('This method was unexpectedly called')
endpoint = Service([], '', 'identity_url')
self.consumer._getAssociation = notCalled
auth_request = self.consumer.beginWithoutDiscovery(endpoint)
# _getAssociation was not called
class TestConsumerAnonymous(unittest.TestCase):
def test_beginWithoutDiscoveryAnonymousFail(self):
"""Make sure that ValueError for setting an auth request
anonymous gets converted to a ProtocolError
"""
with mock.patch.object(AuthRequest,
'setAnonymous',
mock.Mock(side_effect=ValueError)):
consumer = Consumer({}, None)
self.assertRaises(
ProtocolError,
consumer.beginWithoutDiscovery, Service([], '')
)
class SillyExtension(Extension):
ns_uri = 'http://silly.example.com/'
ns_alias = 'silly'
def getExtensionArgs(self):
return {'i_am': 'silly'}
class TestAddExtension(unittest.TestCase):
def test_SillyExtension(self):
ext = SillyExtension()
ar = AuthRequest(Service(), None)
ar.addExtension(ext)
ext_args = ar.message.getArgs(ext.ns_uri)
self.assertEqual(ext.getExtensionArgs(), ext_args)
@mock.patch('urllib.request.urlopen', support.urlopen)
class TestKVPost(unittest.TestCase):
def test_200(self):
r = makeKVPost(Message(), 'http://unittest/message-ok.txt')
expected_msg = Message.fromOpenIDArgs({'foo': 'bar', 'baz': 'quux'})
self.assertEqual(expected_msg, r)
def test_400(self):
with self.assertRaises(ServerError) as cm:
r = makeKVPost(Message(), 'http://unittest/message-error.txt?status=400')
self.assertEqual(cm.exception.error_text, 'bonk')
self.assertEqual(cm.exception.error_code, '7')
def test_500(self):
with self.assertRaises(urllib.error.URLError):
makeKVPost(Message(), 'http://unittest/?status=500')
if __name__ == '__main__':
unittest.main()
|
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google engineers.
#
# The only allowed edits are to method and file documentation. A 3-way
# merge preserves those additions if the generated source changes.
"""Accesses the google.pubsub.v1 Subscriber API."""
import json
import os
import pkg_resources
import platform
from google.gax import api_callable
from google.gax import config
from google.gax import path_template
import google.gax
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import policy_pb2
from google.pubsub.v1 import pubsub_pb2
_PageDesc = google.gax.PageDescriptor
class SubscriberApi(object):
"""
The service that an application uses to manipulate subscriptions and to
consume messages from a subscription via the ``Pull`` method.
"""
SERVICE_ADDRESS = 'pubsub.googleapis.com'
"""The default address of the service."""
DEFAULT_SERVICE_PORT = 443
"""The default port of the service."""
_CODE_GEN_NAME_VERSION = 'gapic/0.1.0'
_GAX_VERSION = pkg_resources.get_distribution('google-gax').version
_PAGE_DESCRIPTORS = {
'list_subscriptions': _PageDesc('page_token', 'next_page_token',
'subscriptions')
}
# The scopes needed to make gRPC calls to all of the methods defined in
# this service
_ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/pubsub', )
_PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}')
_SUBSCRIPTION_PATH_TEMPLATE = path_template.PathTemplate(
'projects/{project}/subscriptions/{subscription}')
_TOPIC_PATH_TEMPLATE = path_template.PathTemplate(
'projects/{project}/topics/{topic}')
@classmethod
def project_path(cls, project):
"""Returns a fully-qualified project resource name string."""
return cls._PROJECT_PATH_TEMPLATE.render({'project': project, })
@classmethod
def subscription_path(cls, project, subscription):
"""Returns a fully-qualified subscription resource name string."""
return cls._SUBSCRIPTION_PATH_TEMPLATE.render({
'project': project,
'subscription': subscription,
})
@classmethod
def topic_path(cls, project, topic):
"""Returns a fully-qualified topic resource name string."""
return cls._TOPIC_PATH_TEMPLATE.render({
'project': project,
'topic': topic,
})
@classmethod
def match_project_from_project_name(cls, project_name):
"""Parses the project from a project resource.
Args:
project_name (string): A fully-qualified path representing a project
resource.
Returns:
A string representing the project.
"""
return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project')
@classmethod
def match_project_from_subscription_name(cls, subscription_name):
"""Parses the project from a subscription resource.
Args:
subscription_name (string): A fully-qualified path representing a subscription
resource.
Returns:
A string representing the project.
"""
return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get(
'project')
@classmethod
def match_subscription_from_subscription_name(cls, subscription_name):
"""Parses the subscription from a subscription resource.
Args:
subscription_name (string): A fully-qualified path representing a subscription
resource.
Returns:
A string representing the subscription.
"""
return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get(
'subscription')
@classmethod
def match_project_from_topic_name(cls, topic_name):
"""Parses the project from a topic resource.
Args:
topic_name (string): A fully-qualified path representing a topic
resource.
Returns:
A string representing the project.
"""
return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project')
@classmethod
def match_topic_from_topic_name(cls, topic_name):
"""Parses the topic from a topic resource.
Args:
topic_name (string): A fully-qualified path representing a topic
resource.
Returns:
A string representing the topic.
"""
return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic')
def __init__(self,
service_path=SERVICE_ADDRESS,
port=DEFAULT_SERVICE_PORT,
channel=None,
metadata_transformer=None,
ssl_creds=None,
scopes=None,
client_config=None,
app_name='gax',
app_version=_GAX_VERSION):
"""Constructor.
Args:
service_path (string): The domain name of the API remote host.
port (int): The port on which to connect to the remote host.
channel (:class:`grpc.Channel`): A ``Channel`` instance through
which to make calls.
ssl_creds (:class:`grpc.ChannelCredentials`): A
``ChannelCredentials`` instance for use with an SSL-enabled
channel.
client_config (dict):
A dictionary for call options for each method. See
:func:`google.gax.construct_settings` for the structure of
this data. Falls back to the default config if not specified
or the specified config is missing data points.
metadata_transformer (Callable[[], list]): A function that creates
the metadata for requests.
app_name (string): The codename of the calling service.
app_version (string): The version of the calling service.
Returns:
A SubscriberApi object.
"""
if scopes is None:
scopes = self._ALL_SCOPES
if client_config is None:
client_config = {}
goog_api_client = '{}/{} {} gax/{} python/{}'.format(
app_name, app_version, self._CODE_GEN_NAME_VERSION,
self._GAX_VERSION, platform.python_version())
metadata = [('x-goog-api-client', goog_api_client)]
default_client_config = json.loads(
pkg_resources.resource_string(
__name__, 'subscriber_client_config.json').decode())
defaults = api_callable.construct_settings(
'google.pubsub.v1.Subscriber',
default_client_config,
client_config,
config.STATUS_CODE_NAMES,
kwargs={'metadata': metadata},
page_descriptors=self._PAGE_DESCRIPTORS)
self.iam_policy_stub = config.create_stub(
iam_policy_pb2.IAMPolicyStub,
service_path,
port,
ssl_creds=ssl_creds,
channel=channel,
metadata_transformer=metadata_transformer,
scopes=scopes)
self.subscriber_stub = config.create_stub(
pubsub_pb2.SubscriberStub,
service_path,
port,
ssl_creds=ssl_creds,
channel=channel,
metadata_transformer=metadata_transformer,
scopes=scopes)
self._create_subscription = api_callable.create_api_call(
self.subscriber_stub.CreateSubscription,
settings=defaults['create_subscription'])
self._get_subscription = api_callable.create_api_call(
self.subscriber_stub.GetSubscription,
settings=defaults['get_subscription'])
self._list_subscriptions = api_callable.create_api_call(
self.subscriber_stub.ListSubscriptions,
settings=defaults['list_subscriptions'])
self._delete_subscription = api_callable.create_api_call(
self.subscriber_stub.DeleteSubscription,
settings=defaults['delete_subscription'])
self._modify_ack_deadline = api_callable.create_api_call(
self.subscriber_stub.ModifyAckDeadline,
settings=defaults['modify_ack_deadline'])
self._acknowledge = api_callable.create_api_call(
self.subscriber_stub.Acknowledge, settings=defaults['acknowledge'])
self._pull = api_callable.create_api_call(
self.subscriber_stub.Pull, settings=defaults['pull'])
self._modify_push_config = api_callable.create_api_call(
self.subscriber_stub.ModifyPushConfig,
settings=defaults['modify_push_config'])
self._set_iam_policy = api_callable.create_api_call(
self.iam_policy_stub.SetIamPolicy,
settings=defaults['set_iam_policy'])
self._get_iam_policy = api_callable.create_api_call(
self.iam_policy_stub.GetIamPolicy,
settings=defaults['get_iam_policy'])
self._test_iam_permissions = api_callable.create_api_call(
self.iam_policy_stub.TestIamPermissions,
settings=defaults['test_iam_permissions'])
# Service calls
def create_subscription(self,
name,
topic,
push_config=None,
ack_deadline_seconds=0,
options=None):
"""
Creates a subscription to a given topic.
If the subscription already exists, returns ``ALREADY_EXISTS``.
If the corresponding topic doesn't exist, returns ``NOT_FOUND``.
If the name is not provided in the request, the server will assign a random
name for this subscription on the same project as the topic. Note that
for REST API requests, you must specify a name.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> name = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> topic = api.topic_path('[PROJECT]', '[TOPIC]')
>>> response = api.create_subscription(name, topic)
Args:
name (string): The name of the subscription. It must have the format
``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must
start with a letter, and contain only letters (``[A-Za-z]``), numbers
(``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``),
plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters
in length, and it must not start with ``\"goog\"``.
topic (string): The name of the topic from which this subscription is receiving messages.
The value of this field will be ``_deleted-topic_`` if the topic has been
deleted.
push_config (:class:`google.pubsub.v1.pubsub_pb2.PushConfig`): If push delivery is used with this subscription, this field is
used to configure it. An empty ``pushConfig`` signifies that the subscriber
will pull and ack messages using API methods.
ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message
before the subscriber should acknowledge the message. After message
delivery but before the ack deadline expires and before the message is
acknowledged, it is an outstanding message and will not be delivered
again during that time (on a best-effort basis).
For pull subscriptions, this value is used as the initial value for the ack
deadline. To override this value for a given message, call
``ModifyAckDeadline`` with the corresponding ``ack_id`` if using
pull.
The maximum custom deadline you can specify is 600 seconds (10 minutes).
For push delivery, this value is also used to set the request timeout for
the call to the push endpoint.
If the subscriber never acknowledges the message, the Pub/Sub
system will eventually redeliver the message.
If this parameter is 0, a default value of 10 seconds is used.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.pubsub.v1.pubsub_pb2.Subscription` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
if push_config is None:
push_config = pubsub_pb2.PushConfig()
request = pubsub_pb2.Subscription(
name=name,
topic=topic,
push_config=push_config,
ack_deadline_seconds=ack_deadline_seconds)
return self._create_subscription(request, options)
def get_subscription(self, subscription, options=None):
"""
Gets the configuration details of a subscription.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> subscription = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> response = api.get_subscription(subscription)
Args:
subscription (string): The name of the subscription to get.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.pubsub.v1.pubsub_pb2.Subscription` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription)
return self._get_subscription(request, options)
def list_subscriptions(self, project, page_size=0, options=None):
"""
Lists matching subscriptions.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> from google.gax import CallOptions, INITIAL_PAGE
>>> api = subscriber_api.SubscriberApi()
>>> project = api.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in api.list_subscriptions(project):
>>> # process element
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> for page in api.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)):
>>> for element in page:
>>> # process element
>>> pass
Args:
project (string): The name of the cloud project that subscriptions belong to.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax.PageIterator` instance. By default, this
is an iterable of :class:`google.pubsub.v1.pubsub_pb2.Subscription` instances.
This object can also be configured to iterate over the pages
of the response through the `CallOptions` parameter.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = pubsub_pb2.ListSubscriptionsRequest(
project=project, page_size=page_size)
return self._list_subscriptions(request, options)
def delete_subscription(self, subscription, options=None):
"""
Deletes an existing subscription. All pending messages in the subscription
are immediately dropped. Calls to ``Pull`` after deletion will return
``NOT_FOUND``. After a subscription is deleted, a new one may be created with
the same name, but the new one has no association with the old
subscription, or its topic unless the same topic is specified.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> subscription = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> api.delete_subscription(subscription)
Args:
subscription (string): The subscription to delete.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = pubsub_pb2.DeleteSubscriptionRequest(
subscription=subscription)
self._delete_subscription(request, options)
def modify_ack_deadline(self,
subscription,
ack_ids,
ack_deadline_seconds,
options=None):
"""
Modifies the ack deadline for a specific message. This method is useful
to indicate that more time is needed to process a message by the
subscriber, or to make the message available for redelivery if the
processing was interrupted. Note that this does not modify the
subscription-level ``ackDeadlineSeconds`` used for subsequent messages.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> subscription = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> ack_ids = []
>>> ack_deadline_seconds = 0
>>> api.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds)
Args:
subscription (string): The name of the subscription.
ack_ids (list[string]): List of acknowledgment IDs.
ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to
the Pub/Sub system. Must be >= 0. For example, if the value is 10, the new
ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call
was made. Specifying zero may immediately make the message available for
another pull request.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = pubsub_pb2.ModifyAckDeadlineRequest(
subscription=subscription,
ack_ids=ack_ids,
ack_deadline_seconds=ack_deadline_seconds)
self._modify_ack_deadline(request, options)
def acknowledge(self, subscription, ack_ids, options=None):
"""
Acknowledges the messages associated with the ``ack_ids`` in the
``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages
from the subscription.
Acknowledging a message whose ack deadline has expired may succeed,
but such a message may be redelivered later. Acknowledging a message more
than once will not result in an error.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> subscription = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> ack_ids = []
>>> api.acknowledge(subscription, ack_ids)
Args:
subscription (string): The subscription whose message is being acknowledged.
ack_ids (list[string]): The acknowledgment ID for the messages being acknowledged that was returned
by the Pub/Sub system in the ``Pull`` response. Must not be empty.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = pubsub_pb2.AcknowledgeRequest(
subscription=subscription, ack_ids=ack_ids)
self._acknowledge(request, options)
def pull(self,
subscription,
max_messages,
return_immediately=False,
options=None):
"""
Pulls messages from the server. Returns an empty list if there are no
messages available in the backlog. The server may return ``UNAVAILABLE`` if
there are too many concurrent pull requests pending for the given
subscription.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> subscription = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> max_messages = 0
>>> response = api.pull(subscription, max_messages)
Args:
subscription (string): The subscription from which messages should be pulled.
return_immediately (bool): If this is specified as true the system will respond immediately even if
it is not able to return a message in the ``Pull`` response. Otherwise the
system is allowed to wait until at least one message is available rather
than returning no messages. The client may cancel the request if it does
not wish to wait any longer for the response.
max_messages (int): The maximum number of messages returned for this request. The Pub/Sub
system may return fewer than the number specified.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.pubsub.v1.pubsub_pb2.PullResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = pubsub_pb2.PullRequest(
subscription=subscription,
max_messages=max_messages,
return_immediately=return_immediately)
return self._pull(request, options)
def modify_push_config(self, subscription, push_config, options=None):
"""
Modifies the ``PushConfig`` for a specified subscription.
This may be used to change a push subscription to a pull one (signified by
an empty ``PushConfig``) or vice versa, or change the endpoint URL and other
attributes of a push subscription. Messages will accumulate for delivery
continuously through the call regardless of changes to the ``PushConfig``.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> from google.pubsub.v1 import pubsub_pb2
>>> api = subscriber_api.SubscriberApi()
>>> subscription = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> push_config = pubsub_pb2.PushConfig()
>>> api.modify_push_config(subscription, push_config)
Args:
subscription (string): The name of the subscription.
push_config (:class:`google.pubsub.v1.pubsub_pb2.PushConfig`): The push configuration for future deliveries.
An empty ``pushConfig`` indicates that the Pub/Sub system should
stop pushing messages from the given subscription and allow
messages to be pulled and acknowledged - effectively pausing
the subscription if ``Pull`` is not called.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = pubsub_pb2.ModifyPushConfigRequest(
subscription=subscription, push_config=push_config)
self._modify_push_config(request, options)
def set_iam_policy(self, resource, policy, options=None):
"""
Sets the access control policy on the specified resource. Replaces any
existing policy.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> from google.iam.v1 import policy_pb2
>>> api = subscriber_api.SubscriberApi()
>>> resource = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> policy = policy_pb2.Policy()
>>> response = api.set_iam_policy(resource, policy)
Args:
resource (string): REQUIRED: The resource for which policy is being specified.
Resource is usually specified as a path, such as,
projects/{project}/zones/{zone}/disks/{disk}.
policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the 'resource'. The size of
the policy is limited to a few 10s of KB. An empty policy is in general a
valid policy but certain services (like Projects) might reject them.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.iam.v1.policy_pb2.Policy` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = iam_policy_pb2.SetIamPolicyRequest(
resource=resource, policy=policy)
return self._set_iam_policy(request, options)
def get_iam_policy(self, resource, options=None):
"""
Gets the access control policy for a resource. Is empty if the
policy or the resource does not exist.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> resource = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> response = api.get_iam_policy(resource)
Args:
resource (string): REQUIRED: The resource for which policy is being requested. Resource
is usually specified as a path, such as, projects/{project}.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.iam.v1.policy_pb2.Policy` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = iam_policy_pb2.GetIamPolicyRequest(resource=resource)
return self._get_iam_policy(request, options)
def test_iam_permissions(self, resource, permissions, options=None):
"""
Returns permissions that a caller has on the specified resource.
Example:
>>> from google.cloud.gapic.pubsub.v1 import subscriber_api
>>> api = subscriber_api.SubscriberApi()
>>> resource = api.subscription_path('[PROJECT]', '[SUBSCRIPTION]')
>>> permissions = []
>>> response = api.test_iam_permissions(resource, permissions)
Args:
resource (string): REQUIRED: The resource for which policy detail is being requested.
Resource is usually specified as a path, such as, projects/{project}.
permissions (list[string]): The set of permissions to check for the 'resource'. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions)
return self._test_iam_permissions(request, options)
|
|
#Created on 12 Aug 2014
#@author: neil.butcher
import sqlite3
from .MeasurementDatabase import filename
from .CurrentUnitSetter import setter
from PySide2 import QtCore
class UnitMeasurementException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Measurement(object):
def __init__(self, name=None):
self._name = name
self._unitsCache = None
self._id_cache = None
self._baseUnitCache = None
def __repr__(self):
return "Measurement('" + self._name + "')"
@property
def baseUnit(self):
"""
:rtype: Unit
"""
if self._unitsCache is None:
self._units()
return self._baseUnitCache
@property
def name(self):
"""
:rtype: str
"""
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
cursor = _connection.execute("SELECT name FROM MEASUREMENTNAMES WHERE measurementID = ? AND preferred = 1 ",
(self._id(),))
for row in cursor:
return row[0]
_connection.execute("UPDATE MEASUREMENTNAMES set preferred = 1 where measurementID = ? AND name = ? ",
(self._id_cache, self._name))
_connection.commit()
return self._name
def setPreferredName(self, name):
"""
:type name: str
"""
if name != self.name:
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
_connection.execute("UPDATE MEASUREMENTNAMES set preferred = 0 where measurementID = ?",
(self._id_cache,))
_connection.execute("UPDATE MEASUREMENTNAMES set preferred = 1 where measurementID = ? AND name = ? ",
(self._id_cache, name))
_connection.commit()
def addAlias(self, name):
"""
:type name: str
"""
if name != self.name:
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
_connection.execute("INSERT INTO MEASUREMENTNAMES VALUES (?,?,?) ",
(self._id_cache, name, 0))
_connection.commit()
def _units(self):
if self._unitsCache is None:
self._unitsCache = {}
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
cursor = _connection.execute("SELECT name , Scale , offset ,id , base FROM UNITS WHERE measurementID = ?",
(self._id(),))
for row in cursor:
unit = Unit()
unit.measurement = self
unit._name = row[0]
unit.scale = float(row[1])
unit.offset = float(row[2])
unit.id_cache = row[3]
self._unitsCache[row[3]] = unit
if row[4] == 1:
self._baseUnitCache = unit
if self._baseUnitCache is None:
raise UnitMeasurementException("There was no unit to act as the base unit for measurement " + self._name)
return self._unitsCache
@property
def units(self):
return list(self._units().values())
def _id(self):
if self._id_cache is None:
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
cursor = _connection.execute("SELECT id FROM MEASUREMENTS WHERE name = ?", (self._name,))
for row in cursor:
if self._id_cache is None:
self._id_cache = row[0]
else:
raise UnitMeasurementException("There are multiple measurements with the same name")
if self._id_cache is None:
cursor = _connection.execute("SELECT measurementID FROM MEASUREMENTNAMES WHERE name = ?", (self._name,))
for row in cursor:
if self._id_cache is None:
self._id_cache = row[0]
else:
raise UnitMeasurementException("There are multiple measurements with the same name")
if self._id_cache is None:
raise UnitMeasurementException("There was no measurements with this name in the database")
return self._id_cache
def currentUnit(self, label='normal'):
"""
:type label: str
:rtype: Unit
"""
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
cursor = _connection.execute("SELECT unitID FROM CURRENTUNITS WHERE measurementID = ? AND label = ? ",
(self._id(), label))
for row in cursor:
return self._units()[row[0]]
self._units()
_connection.execute("INSERT INTO CURRENTUNITS VALUES (?,?,?) ",
(self._id_cache, label, self.baseUnit.id_cache))
_connection.commit()
return self.baseUnit
def setCurrentUnit(self, u, label='normal'):
"""
:type u: Unit
:type label: str
"""
if u != self.currentUnit(label):
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
_connection.execute("UPDATE CURRENTUNITS set unitID = ? where measurementID = ? AND label = ? ",
(u.id_cache, self._id_cache, label))
_connection.commit()
def report(self, base_value, precision=6, label='normal', writeUnit=True):
try:
scaled_value = self.currentUnit(label).scaledValueOf(base_value)
text = QtCore.QLocale().toString(scaled_value, precision=precision)
except (ValueError, TypeError):
text = str(base_value)
if writeUnit:
text = text + ' (' + self.currentUnit(label).name + ')'
return text
def scaledValueOf(self, base_float, label='normal'):
return self.currentUnit(label).scaledValueOf(base_float)
def baseValueFrom(self, base_float, label='normal'):
return self.currentUnit(label).baseValueFrom(base_float)
class Unit(object):
def __init__(self):
self._name = None
self.measurement = None
self.scale = 1.0
self.offset = 0.0
self.id_cache = 0
def __repr__(self):
return "Unit(" +str(self.measurement) + ",'" + self.name + "')"
def scaledValueOf(self, base_float):
return (base_float / self.scale ) - self.offset
def baseValueFrom(self, scaled_float):
return (scaled_float + self.offset) * self.scale
def scaledDeltaValueOf(self, base_float):
# scale a change in the measurement (rather than an absolute value)
#eg a change of 1Kelvin = a change of 1degC
return (base_float / self.scale )
def baseDeltaValueFrom(self, scaled_float):
# scale a change in the measurement (rather than an absolute value)
#eg a change of 1Kelvin = a change of 1degC
return scaled_float * self.scale
@property
def baseUnit(self):
"""
:rtype: Unit
"""
return self.measurement.baseUnit
def currentUnit(self, label='normal'):
"""
:type label: str
:rtype: Unit
"""
return self.measurement.currentUnit(label=label)
def becomeCurrentNormalUnit(self):
setter.setMeasurementUnit(self.measurement, self)
def alias(self):
res = []
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
cursor = _connection.execute("SELECT name FROM UNITNAMES WHERE unitID = ?",
(self.id_cache,))
for row in cursor:
res.append(row[0])
return res
def addAlias(self, name):
"""
:type name: str
"""
if name != self.name:
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
_connection.execute("INSERT INTO UNITNAMES VALUES (?,?,?) ",
(self.id_cache, name, 0))
_connection.commit()
@property
def name(self):
"""
:rtype: str
"""
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
cursor = _connection.execute("SELECT name FROM UNITNAMES WHERE unitID = ? AND preferred = ? ",
(self.id_cache, 1))
for row in cursor:
return row[0]
_connection.execute("UPDATE UNITNAMES set preferred = ? where unitID = ? AND name = ? ",
(1, self.id_cache, self._name))
_connection.commit()
return self._name
def setPreferredName(self, name):
"""
:type name: str
"""
if name != self.name:
_connection = sqlite3.connect(filename, detect_types=sqlite3.PARSE_DECLTYPES)
_connection.execute("UPDATE UNITNAMES set preferred = ? where unitID = ?",
(0, self.id_cache,))
_connection.execute("UPDATE UNITNAMES set preferred = ? where unitID = ? AND name = ? ",
(1, self.id_cache, name))
_connection.commit()
|
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Collection
# |
# +--UseCollection
# |
# +--ArchCollection
# |
# +--LocalFlagCollection
#
# MajorArch derives from CollectionWithFlag, which is a subclass of Flag and
# Collection. (maybe CollectionWithFlag and MajorArch should be collapsed?)
"""
Provides the build configuration as special dictionaries that directly
export their namespaces.
"""
import itertools
#conary
from conary.deps import arch, deps
from conary.lib import log
from conary.errors import CvcError
class Flag(dict):
def __init__(self, name, parent=None, value=False,
required=True, track=False, path=None, platform=False):
self._name = name
self._value = value
self._parent = parent
self._required = required
self._tracking = track
self._used = False
self._alias = None
self._path = path
self._platform = platform
def __repr__(self):
if self._alias:
return "%s (alias %s): %s" % (self._name, self._alias, self._value)
else:
return "%s: %s" % (self._name, self._value)
def __str__(self):
if self._alias:
return "%s (alias %s): %s" % (self._fullName(), self._alias,
self._value)
else:
return "%s: %s" % (self._fullName(), self._value)
def setShortDoc(self, doc):
# XXX we don't do anything with this documentation currently.
self._shortDoc = doc
def setRequired(self, value=True):
self._required = value
def setPlatform(self, value=True):
self._platform = value
def isPlatformFlag(self):
return self._platform
def _set(self, value=True):
self._value = value
def _get(self):
""" Grab value without tracking """
return self._value
def _fullName(self):
return ('.'.join(x._name for x in self._reverseParents())
+ '.' + self._name)
def _reverseParents(self):
if self._parent is not None:
for parent in self._parent._reverseParents():
yield parent
yield self._parent
def _getDepSense(self):
if self._get():
if self._required:
return deps.FLAG_SENSE_REQUIRED
else:
return deps.FLAG_SENSE_PREFERRED
else:
return deps.FLAG_SENSE_PREFERNOT
def _toDependency(self):
""" Returns an actual Dependency Set consisting of only this flag """
raise NotImplementedError
def _resetUsed(self):
self._used = False
def _trackUsed(self, value):
self._tracking = value
# --- boolean operations on Flags ---
def __nonzero__(self):
if self._tracking:
self._setUsed(True)
return self._value
def _setUsed(self, used=True):
self._used = used
def __eq__(self, other):
if not isinstance(other, (Flag, bool)):
return False
return bool(self) == bool(other)
def __ne__(self, other):
return not self.__eq__(other)
def __ror__(self, other):
return bool(self) | other
def __or__(self, other):
return self.__ror__(other)
def __rand__(self, other):
return bool(self) & other
def __and__(self, other):
return self.__rand__(other)
class Collection(dict):
def __init__(self, name, parent=None, track=False):
self._name = name
self._parent = parent
self._strictMode = True
self._tracking = track
self._attrs = {}
def _addAlias(self, realKey, alias):
""" Add a second way to access the given item.
Necessary if the actual name for a flag is not a valid
python identifier.
"""
if alias in self or alias in self._attrs:
raise RuntimeError, 'alias is already set'
elif self[realKey]._alias:
raise RuntimeError, 'key %s already has an alias' % realKey
else:
self._setAttr(alias, self[realKey])
self[realKey]._alias = alias
def _setAttr(self, name, value):
""" A generic way to add a temporary attribute to this collection.
Attributes stored in this manner will be removed when the
collection is cleared, but are not tracked like flags.
"""
self._attrs[name] = value
def _delAttr(self, name):
del self._attrs[name]
def _getAttr(self, name):
return self._attrs[name]
def _addFlag(self, key, *args, **kw):
if 'track' not in kw:
kw = kw.copy()
kw['track'] = self._tracking
dict.__setitem__(self, key, self._collectionType(key, self,
*args, **kw))
def __repr__(self):
return "%s: {%s}" % (self._name,
', '.join((repr(x) for x in self.values())))
def __nonzero__(self):
raise RuntimeError(
'Cannot compare collection as True/False')
def _clear(self):
for flag in self.keys():
del self[flag]
for attr in self._attrs.keys():
del self._attrs[attr]
def __getattr__(self, key):
if key in self.__dict__:
return self.__dict__[key]
if key in self:
return self[key]
if key in self._attrs:
return self._getAttr(key)
if key[0] == '_':
raise AttributeError, key
return self._getNonExistantKey(key)
def __getitem__(self, key):
if key in self._attrs:
return self._attrs[key]
else:
return dict.__getitem__(self, key)
def __setattr__(self, key, value):
if key[0] == '_':
self.__dict__[key] = value
else:
raise RuntimeError, "Cannot set value of flags: %s" % key
def _getNonExistantKey(self, key):
""" Method that is called when a nonexistant key is accessed.
Overridden by subclasses to allow for useful error messages
or default key values to be supplied """
raise AttributeError, key
def _iterAll(self):
for child in self.itervalues():
if isinstance(child, Collection):
for flag in child._iterAll():
yield flag
else:
yield child
def _setStrictMode(self, value=True):
""" Strict mode determines whether you receive an error or
an empty flag upon accessing a nonexistant flag
"""
self._strictMode = value
def _reverseParents(self):
""" Traverse through the parents from the topmost parent down. """
if self._parent is not None:
for parent in self._parent._reverseParents():
yield parent
yield self._parent
# -- Tracking Commands --
def _trackUsed(self, value=True):
self._tracking = value
for child in self.itervalues():
child._trackUsed(value)
def _resetUsed(self):
for child in self.itervalues():
child._resetUsed()
def _getUsed(self):
return [ x for x in self._iterUsed() ]
def _iterUsed(self):
for child in self.itervalues():
if isinstance(child, Collection):
for flag in child._iterUsed():
yield flag
else:
if child._used:
yield child
class CollectionWithFlag(Flag, Collection):
""" CollectionWithFlag. Currently only has one child class, MajorArch. """
def __init__(self, name, parent, track=False):
Flag.__init__(self, name, parent, track=track)
Collection.__init__(self, name, parent, track=track)
def _trackUsed(self, value=True):
Flag._trackUsed(self, value)
Collection._trackUsed(self, value)
def _resetUsed(self):
Flag._resetUsed(self)
Collection._resetUsed(self)
def _iterUsed(self):
if self._used:
yield self
for child in Collection._iterUsed(self):
yield child
def _iterAll(self):
yield self
for child in Collection._iterAll(self):
yield child
def __repr__(self):
return "%s: %s {%s}" % (self._name, self._value,
', '.join((repr(x) for x in self.values())))
class NoSuchUseFlagError(CvcError):
def __init__(self, key):
self.key = key
def __str__(self):
return """
An unknown use flag, Use.%s, was accessed. The default behavior
of conary is to complain about the missing flag, since it may be
a typo. You can add the flag /etc/conary/use/%s, or
${HOME}/.conary/use/%s, or use the --unknown-flags option on
the command line to make conary assume that all unknown flags are
not relevant to your system.
""" % (self.key, self.key, self.key)
class NoSuchArchFlagError(CvcError):
def __init__(self, key):
self.key = key
def __str__(self):
return """
An unknown architecture, Arch.%s, was accessed. The default
behavior of conary is to complain about the missing flag,
since it may be a typo. You can add the architecture
/etc/conary/arch/%s or ${HOME}/.conary/arch/%s, or
use the --unknown-flags option on the command line to make
conary assume that all unknown flags are not relevant to
your system.
""" % (self.key, self.key, self.key)
class NoSuchSubArchFlagError(CvcError):
def __init__(self, majArch, key):
self.majArch = majArch
self.key = key
def __str__(self):
return """
An unknown sub architecture, Arch.%s.%s was accessed. The default
behavior of conary is to complain about the missing flag, since it
may be a typo. You can add the subarchitecture /etc/conary/arch/%s
or $(HOME)/.conary/architecture/%s, or use the --unknown-flags
option on the command line to make conary assume that all unknown flags are
not relevant to your system.
""" % (self.majArch, self.key, self.majArch, self.majArch)
##########ARCH STUFF HERE######################################
class ArchCollection(Collection):
def __init__(self):
self._archProps = []
self._collectionType = MajorArch
Collection.__init__(self, 'Arch')
def _getNonExistantKey(self, key):
if self._strictMode:
raise NoSuchArchFlagError(key)
else:
self._addFlag(key, track=False)
self[key]._setStrictMode(False)
return self[key]
def _setArch(self, majArch, subArches=None):
""" Set the current build architecture and subArches.
All other architectures are set to false, and not
tracked.
"""
found = False
for key in self:
if key == majArch:
self[key]._set(True, subArches)
self._setArchPropValues(self[key])
found = True
else:
self[key]._set(False)
if not found:
raise AttributeError, "No Such Arch %s" % majArch
def _setArchProps(self, *archProps):
""" Sets the required arch properties.
archProps are flags at the Arch level that describe
cross-architecture features, such as endianess or
whether the arch is 32 or 64 bit oriented.
For the current definition of required archProps, see flavorCfg.
"""
for archProp in self._archProps:
try:
self._delAttr(archProp)
except KeyError:
pass
self._archProps = archProps[:]
for archProp in self._archProps:
self._setAttr(archProp, False)
def _setArchPropValues(self, majArch):
"""
archProps are flags at the Arch level that describe
cross-architecture features, such as endianess or
whether the arch is 32 or 64 bit oriented.
For the current definition of required archProps, see flavorCfg.
"""
archProps = majArch._archProps.copy()
extraKeys = tuple(set(archProps.keys()) - set(self._archProps))
missingKeys = tuple(set(self._archProps) - set(archProps.keys()))
if extraKeys:
raise RuntimeError, \
'Extra arch properties %s provided by %s' % (extraKeys, majArch)
if missingKeys:
raise RuntimeError, \
'Missing arch properties %s not provided by %s' % (missingKeys,
majArch)
for archProp, value in archProps.iteritems():
self._setAttr(archProp, value)
def _iterAll(self):
""" Only iterate over the current architecture. This is
almost always what you want, otherwise it's easy enough
to manually go through the architectures
"""
for child in self.itervalues():
if child._get():
for flag in child._iterAll():
yield flag
def _getAttr(self, name):
currentArch = self.getCurrentArch()
# when getting an architecture prop like bits64,
# set the architecture flag if tracking is on
if currentArch is not None:
bool(currentArch)
return Collection._getAttr(self, name)
def _getMacro(self, key):
""" return the given macro value, as determined by the active arch flags
"""
arch = self.getCurrentArch()
if arch is None:
return None
return arch._getMacro(key)
def _getMacros(self):
""" return the macros defined by the current architecture
"""
arch = self.getCurrentArch()
if arch is None:
return None
return arch._getMacros()
def getCurrentArch(self):
for majarch in self.itervalues():
if majarch._get():
return majarch
class MajorArch(CollectionWithFlag):
def __init__(self, name, parent, track=False, archProps=None, macros=None,
platform=False):
self._collectionType = SubArch
if archProps:
self._archProps = archProps.copy()
else:
self._archProps = {}
if not macros:
self._macros = {}
else:
self._macros = macros
self._platform = platform
CollectionWithFlag.__init__(self, name, parent, track=track)
def _setUsed(self, used=True):
CollectionWithFlag._setUsed(self, used)
# if we are not the current architecture, find
# the current architecture and set it
if used and not self._get():
currentArch = self._parent.getCurrentArch()
currentArch._setUsed()
def _getMacro(self, key):
for subArch in self.itervalues():
if subArch._get() and key in subArch._macros:
return subArch._macros[key]
return self._macros[key]
def _getMacros(self):
macros = self._macros.copy()
for subArch in self.itervalues():
if subArch._get():
macros.update(subArch._macros)
return macros
def _getNonExistantKey(self, key):
if self._strictMode:
raise NoSuchSubArchFlagError(self._name, key)
else:
self._addFlag(key)
return self[key]
def _set(self, value=True, subArches=None):
""" Allows you to set the value of this arch, and also set the
values of the subArches.
XXX hmmm...should there be a difference between subArches=None,
and subArches=[]? Maybe this is too complicated, and you should
just have to set the subarches yourself.
"""
if not subArches:
subArches = []
self._value = value
for subArch in self:
if subArches and subArch in subArches:
continue
self[subArch]._set(False)
subsumed = {}
for subArch in subArches:
subsumed.update(dict.fromkeys(self[subArch]._subsumes))
for subArch in subArches:
if subArch in subsumed:
continue
self[subArch]._set()
def _toDependency(self, depType=deps.InstructionSetDependency):
set = deps.Flavor()
sense = self._getDepSense()
dep = deps.Dependency(self._name, [])
set.addDep(depType, dep)
return set
def _trackUsed(self, value=True):
CollectionWithFlag._trackUsed(self, value=value)
def _iterUsed(self):
if self._get():
return CollectionWithFlag._iterUsed(self)
return []
class SubArch(Flag):
def __init__(self, name, parent, track=False, subsumes=None,
macros=None):
if not subsumes:
self._subsumes = []
else:
self._subsumes = subsumes
if not macros:
self._macros = {}
else:
self._macros = macros
Flag.__init__(self, name, parent, required=True, track=track)
def _setUsed(self, used=True):
Flag._setUsed(self, used)
# if we are not the current architecture, find
# the current architecture and set it
if used and not self._parent._get():
currentArch = self._parent._parent.getCurrentArch()
currentArch._setUsed()
def _toDependency(self, depType=deps.InstructionSetDependency):
""" Creates a Flavor dep set with the subarch in it.
Also includes any subsumed subarches if the
value of this subarch is true
(better comment about why we do that here)
"""
set = deps.Flavor()
sense = self._getDepSense()
depFlags = [ (self._name, sense) ]
parent = self._parent
if self._get():
depFlags.extend((parent[x]._name, sense) \
for x in self._subsumes)
dep = deps.Dependency(parent._name, depFlags)
set.addDep(depType, dep)
return set
####################### USE STUFF HERE ###########################
class UseFlag(Flag):
def _toDependency(self):
set = deps.Flavor()
sense = self._getDepSense()
depFlags = [ (self._name, sense) ]
dep = deps.Dependency('use', depFlags)
set.addDep(deps.UseDependency, dep)
return set
class UseCollection(Collection):
_collectionType = UseFlag
def __init__(self):
Collection.__init__(self, 'Use')
def _getNonExistantKey(self, key):
if self._strictMode:
raise NoSuchUseFlagError(key)
else:
self._addFlag(key)
return self[key]
####################### LOCALFLAG STUFF HERE ###########################
class LocalFlag(Flag):
def __init__(self, name, parent, track=False, required=False, path=None,
platform=False):
Flag.__init__(self, name, parent, track=track, required=required,
path=path, platform=platform)
self._override = False
def _set(self, value=True, override=False):
if self._override and not override:
return
self._value = value
self._override = override
def _toDependency(self, recipeName):
depFlags = [('.'.join((recipeName, self._name)),
self._getDepSense())]
set = deps.Flavor()
dep = deps.Dependency('use', depFlags)
set.addDep(deps.UseDependency, dep)
return set
class LocalFlagCollection(Collection):
def __init__(self):
self._collectionType = LocalFlag
Collection.__init__(self, 'Flags')
def _override(self, key, value):
if key not in self:
self._addFlag(key)
self[key]._set(value, override=True)
def _getNonExistantKey(self, key):
raise AttributeError, 'No such local flag %s' % key
def __setattr__(self, key, value):
if key[0] == '_':
self.__dict__[key] = value
else:
if key not in self:
self._addFlag(key)
self[key]._set(value)
####################### Package Local Flags Here ###################
class PackageFlagCollection(Collection):
def __init__(self, track=False):
self._collectionType = PackageFlagPackageCollection
Collection.__init__(self, 'PackageFlags')
def __getitem__(self, key):
if key not in self:
return self._getNonExistantKey(key)
return Collection.__getitem__(self, key)
def _getNonExistantKey(self, key):
self._addFlag(key)
self[key]._setStrictMode(False)
return self[key]
class PackageFlagPackageCollection(Collection):
def __init__(self, name, parent, track=False):
self._collectionType = PackageFlag
Collection.__init__(self, name, parent)
def __getitem__(self, key):
if key not in self:
return self._getNonExistantKey(key)
return Collection.__getitem__(self, key)
def _getNonExistantKey(self, key):
self._addFlag(key)
return self[key]
class PackageFlag(LocalFlag):
pass
def allowUnknownFlags(value=True):
Use._setStrictMode(not value)
Arch._setStrictMode(not value)
for majArch in Arch.values():
Arch._setStrictMode(not value)
def setUsed(flagList):
for flag in flagList:
flag._used = True
def resetUsed():
Use._resetUsed()
Arch._resetUsed()
LocalFlags._resetUsed()
def clearFlags():
""" Remove all build flags so that the set of flags can
be repopulated
"""
Use._clear()
Arch._clear()
LocalFlags._clear()
PackageFlags._clear()
def clearLocalFlags():
LocalFlags._clear()
def track(value=True):
Arch._trackUsed(value)
Use._trackUsed(value)
LocalFlags._trackUsed(value)
def iterAll():
return itertools.chain(Arch._iterAll(),
Use._iterAll(),
LocalFlags._iterAll(),
PackageFlags._iterAll())
def getUsed():
return [ x for x in iterUsed() ]
def iterUsed():
return itertools.chain(Arch._iterUsed(),
Use._iterUsed(),
LocalFlags._iterUsed())
def usedFlagsToFlavor(recipeName):
return createFlavor(recipeName, iterUsed())
def allFlagsToFlavor(recipeName):
return createFlavor(recipeName, iterAll())
def localFlagsToFlavor(recipeName):
return createFlavor(recipeName, LocalFlags._iterAll())
def platformFlagsToFlavor(recipeName=None):
flags = []
for flag in itertools.chain(Use._iterAll(), PackageFlags._iterAll(),
LocalFlags._iterAll()):
if flag.isPlatformFlag():
flags.append(flag)
return createFlavor(recipeName, flags, error=False)
def createFlavor(recipeName, *flagIterables, **kw):
""" create a dependency set consisting of all of the flags in the
given flagIterables. Note that is a broad category that includes
lists, iterators, etc. RecipeName is the recipe which local flags
should be relative to, can be set to None if there are definitely
no local flags in the flagIterables.
"""
targetDep = kw.pop('targetDep', False)
if targetDep:
depType = deps.TargetInstructionSetDependency
else:
depType = deps.InstructionSetDependency
majArch = None
archFlags = {}
subsumed = {}
useFlags = []
set = deps.Flavor()
for flag in itertools.chain(*flagIterables):
flagType = type(flag)
if flagType == MajorArch:
if not flag._get():
continue
set.union(flag._toDependency(depType=depType))
elif flagType == SubArch:
set.union(flag._toDependency(depType=depType))
elif flagType == UseFlag:
set.union(flag._toDependency())
elif flagType == LocalFlag:
assert(recipeName)
set.union(flag._toDependency(recipeName))
elif flagType == PackageFlag:
set.union(flag._toDependency(flag._parent._name))
return set
crossFlavor = deps.parseFlavor('cross')
def setBuildFlagsFromFlavor(recipeName, flavor, error=True, warn=False,
useCross=True):
""" Sets the truth of the build Flags based on the build flavor.
All the set build flags must already exist. Flags not mentioned
in this flavor will be untouched.
XXX should this create flags as well as set them? Problem with that
is that we don't know whether the flag is required or not based
on the flavor; we would only be able to do as half-baked job
"""
crossCompiling = False
for depGroup in flavor.getDepClasses().values():
if isinstance(depGroup, deps.UseDependency):
for dep in depGroup.getDeps():
for flag, sense in dep.flags.iteritems():
if sense in (deps.FLAG_SENSE_REQUIRED,
deps.FLAG_SENSE_PREFERRED):
value = True
else:
value = False
# see if there is a . -- indicating this is a
# local flag
if flag == 'cross':
crossCompiling = True
parts = flag.split('.',1)
if len(parts) == 1:
try:
Use[flag]._set(value)
except KeyError:
if error:
raise AttributeError(
"No Such Use Flag %s" % flag)
elif warn:
log.warning(
'ignoring unknown Use flag %s' % flag)
continue
else:
packageName, flag = parts
PackageFlags[packageName][flag]._set(value)
if recipeName:
if packageName == recipeName:
# local flag values set from a build flavor
# are overrides -- the recipe should not
# change these values
LocalFlags._override(flag, value)
elif error:
raise RuntimeError('Trying to set a flavor with '
'localflag %s when no trove '
'name was given' % flag)
elif isinstance(depGroup, (deps.InstructionSetDependency,
deps.TargetInstructionSetDependency)):
if isinstance(depGroup, deps.InstructionSetDependency):
hasTargetISDep = flavor.getDepClasses().get(
deps.DEP_CLASS_TARGET_IS, None)
if hasTargetISDep and useCross:
# use target instruction set dependency instead
continue
elif not useCross:
continue
found = False
try:
majorArch = arch.getMajorArch(depGroup.getDeps())
except arch.IncompatibleInstructionSets, e:
raise RuntimeError(str(e))
if majorArch is None:
# No IS deps?
return
subarches = []
for (flag, sense) in majorArch.flags.iteritems():
if sense in (deps.FLAG_SENSE_REQUIRED,
deps.FLAG_SENSE_PREFERRED):
subarches.append(flag)
Arch._setArch(majorArch.name, subarches)
Arch._setAttr('crossCompiling', crossCompiling)
def setArchFlags(name, flavor):
# given an flavor, make use.Arch match that flavor.
for flag in Arch._iterAll():
flag._set(False)
setBuildFlagsFromFlavor(name, flavor, error=False)
Arch = ArchCollection()
Use = UseCollection()
LocalFlags = LocalFlagCollection()
PackageFlags = PackageFlagCollection()
|
|
##################################################################
# BrundleFuzzClient.py
# The core (Python) reads the feedback information from
# the PinTool (C++) from the shared memory
##################################################################
import sys
import os
import mmap
import subprocess
from array import array
import logging
import logging.handlers
from datetime import datetime
from ConfigParser import SafeConfigParser
try:
import cPickle as pickle
except:
import pickle
# This is necessary because a MutationObject will
# be unserialized from the server.
# It needs to exist in this namespace
from helpers.common import MutationObject
from helpers.utils import Utils
from helpers.crash_analysis import CrashAnalysis
from helpers.rpc_client import BrundleFuzzRpcClient
from helpers.fileops import FileOperations
from helpers.aesthetics import Aesthetics
# Some nice named constants
CAUSED_CRASH = 3
class BrundleFuzzClient(object):
def __init__(self):
self.debug = False
self.root_dir = os.path.dirname(os.path.abspath(__file__))
self.mutations_dir = os.path.join(self.root_dir, 'mutations')
self.hangs_dir = os.path.join(self.root_dir, 'hangs')
self.cfg = self._initialize_config()
self.ml = self._initialize_logging()
self.mo = None
# Shared memory
self.shm = None
self.shm_size = 0
self.bitmap_size = 65536
self.fd = 0
# PIN command line
self.cmd_l = []
# Setup helpers
self.ae = Aesthetics(self)
self.utils = Utils(self)
self.fileops = FileOperations(self)
self.crash_analysis = CrashAnalysis(self)
self.rpc_client = BrundleFuzzRpcClient(self)
self._initialize_shared_memory()
self._initialize_pin_cmd()
def _initialize_config(self):
"""
This config will be shared with helper
modules via the parent attribute
"""
cfg = SafeConfigParser()
cfg.read('config.ini')
return cfg
def _initialize_logging(self):
"""
Printing to console is dirty
"""
main_logger = logging.getLogger('main')
log_filename = os.path.join('logs', 'log.txt')
main_logger.setLevel(logging.DEBUG)
# 5 rotating logs of 1 MB each
handler = logging.handlers.RotatingFileHandler(
log_filename,
maxBytes = 1024 * 1024,
backupCount = 1
)
main_logger.addHandler(handler)
return main_logger
def _initialize_shared_memory(self):
"""
This is the IPC channel between us (Python)
and the PinTool (C/C++)
"""
s_uint32 = self.utils.get_size_uint32()
shm_name = "/tmp/NaFlSharedMemory"
self.shm_size = self.bitmap_size * s_uint32 # architecture dependent :)
self.fd = open(shm_name, 'a+b')
# "Stretch" the file to be mapped
self.fd.write("\x00" * self.shm_size)
def _initialize_pin_cmd(self):
"""
Initializes fuzzing parameters with
information stored in a config file
"""
self.cmd_l.append(self.cfg.get('pin_info', 'pin_bat'))
self.cmd_l.append('-t')
self.cmd_l.append(self.cfg.get('pin_info', 'pintool'))
self.cmd_l.append('-timer')
self.cmd_l.append(self.cfg.get('pin_info', 'timeout'))
self.cmd_l.append('-module')
self.cmd_l.append(self.cfg.get('target_info', 'module').lower())
self.cmd_l.append('--')
self.cmd_l.append(self.cfg.get('target_info', 'filename'))
# Parse the cmd options
try:
_options = self.cfg.get('target_info', 'cmd_options')
for _cmd in _options.split():
self.cmd_l.append(_cmd)
except:
self.ml.info('[.] No command line options found.')
self.debug = self.cfg.getboolean('runtime', 'debug')
def _run_under_pin(self, input_filename):
"""
Runs the given file under PIN and
gets the bitmap representing execution
@returns: current execution bitmap
"""
self.cmd_l.append(input_filename)
subprocess.call(self.cmd_l, shell = False)
self.cmd_l.pop() # remove the filename from cmd :)
# The PinTool has written its feedback into
# the shared memory. Time to read it.
self.fd.seek(0) # file-like interface
# This coerces somehow the bitmap to an array of ulong's
curr_bitmap = array('L', self.fd.read(self.shm_size)) # C ulong (4 bytes)
return curr_bitmap
def _fuzzing_loop(self):
"""
Fuzzing Loop.
This loops (maybe indefinitely) creating several
fuzzing processes
"""
iteration_nr = 0
while True:
# subprocess.call() is blocking, exactly what I need :)
# Execution continues when this subprocess returns, either:
# * instrumented process exits
# * instrumented process crashes
# * timeout expires (implemented in PinTool)
if iteration_nr % 10 == 0:
self.ae.m_info("* Iteration #%d" % iteration_nr)
self.ae.m_info("* PLACEHOLDER. PERIODIC MAINTENANCE PROCESSES")
iteration_nr += 1
continue
# Mutation objects are read from the queue
smo = self.rpc_client.poll_mutation_queue()
self.mo = pickle.loads(smo)
if self.mo:
input_filename = self.mo.filename
data = self.mo.data
input_path_filename = os.path.join(self.mutations_dir, input_filename)
with open(input_path_filename, 'wb') as f:
f.write(data)
# Run with the newly created file unde PIN
curr_bitmap = self._run_under_pin(input_path_filename)
else:
self.ae.m_alert("Problem getting MutationObject from server")
self.ae.m_alert("Continuing...")
continue
#####################################################
# Check if this was a crash on client side
# This way I can analyze it inmediately
#####################################################
if curr_bitmap[0] == 0x41414141 \
and curr_bitmap[1] == 0x42424242:
# Restore these first bytes to more appropriate values
curr_bitmap[0] = 0
curr_bitmap[1] = 0
self.ml.info('**** CRASH ****' * 4)
self.ml.info(input_filename)
self.mo.priority == CAUSED_CRASH
# Analyzes the crash (and saves it, if determined interesting)
# This sets the MutationObject crash_data attribute
cmd = [self.cfg.get('target_info', 'filename'), input_filename]
self.crash_analysis.analyze_crash(cmd)
# The bitmap regarding the current execution
self.mo.arr = curr_bitmap
# Delete the temporary file from disk
if os.path.exists(input_path_filename):
os.remove(input_path_filename)
# Information is sent back to the server
self.rpc_client.send_evaluation(self.mo)
iteration_nr += 1
def run(self):
"""
This prepares the run and starts the fuzzing loop
"""
victim_filename = self.cfg.get('target_info', 'filename')
self.ml.info("")
self.ml.info("=" * 80)
self.ml.info("Fuzzing initiated from the command line.")
self.ml.info("Started fuzzing: %s" % victim_filename)
self.ml.info("Timestamp: %s" % str(datetime.now()))
try:
self._fuzzing_loop() # never returns
except KeyboardInterrupt:
self.ae.m_alert("""
============================================
=== ===
=== Fuzzing cancelled by user (Ctrl + C) ===
=== Exiting... ===
=== ===
============================================
""")
self.fd.close()
self.rpc_client.connection.close()
sys.exit(1)
def main():
"""
This must be kept to the bare minimum
"""
bf = BrundleFuzzClient()
bf.run()
if __name__ == '__main__':
main()
|
|
#!/bin/env python2
import os
import WikiwhoRelationships
from copy import deepcopy
import operator
from sys import argv,exit
import getopt
from wmf import dump
from structures.Revision import Revision
import re
import datetime
#from django.utils import simplejson
def getStatsOfFile(revisions, order, relations, tags):
statData = []
editDistSum = {}
totalEditCount = 0
all_authors = set([])
all_editors = []
prev_revision = None
outgoing_negative_actions = {}
incoming_negative_actions = {}
self_reintroductions = []
self_supports = []
all_antagonized_editors = []
all_supported_editors = []
for (rev_id, vandalism) in order:
if (vandalism):
data = {'revision' : rev_id,
'wikiGini V1':0,
#'wikiGini V2':0,
#'totalLength': 0,
'lengthChange' : 0,
'editRapidness': 0,
'antagonizedEditors': 0,
'antagonized_editors_avg_w1' : 0,
'antagonisticActions': 0,
'supportedEditors': 0,
'supported_editors_avg_w1' : 0,
'supportiveActions': 0,
#'tokenActions': 0,
'giniEditorship': 0,
'giniEditorship_w1': 0,
'giniOutgoingNegativeActions': 0,
'giniIncomingNegativeActions': 0,
'selfReintroductionRatio': 0,
'selfReintroductionRatio_avg_w1':0,
'selfSupportRatio': 0,
'selfSupportRatio_avg_w1': 0,
'statEditors': 0,
#'vandalism': 1,
'maintained': 0,
'npov': 0,
'goodArticle': 0,
'featuredArticle': 0,
'disputed': 0,
'protected': 0}
statData.append(data)
#prev_revision = rev_id
continue
# Compute authorship distribution information
revision = revisions[rev_id]
relation = relations[revision.wikipedia_id]
authors = getAuthorshipDataFromRevision(revision)
all_authors.update(set(authors))
authDistSum = sumAuthDist(authors)
sortedAuthDistSum = sorted(authDistSum.iteritems(), key=operator.itemgetter(1))
totalWordCount = len(authors)
totalAuthorCountAS = len(sortedAuthDistSum)
# Compute editorship distribution information
all_editors.append(revision.contributor_name)
editDistSum = sumAuthDist(all_editors)
sortedEditDistSum = sorted(editDistSum.iteritems(), key=operator.itemgetter(1))
# editorship with different windows
window1 = 50
#window2 = 20
# editorship with window1
editDistSum_w1 = None
if (len(all_editors)>=window1):
editDistSum_w1 = sumAuthDist(all_editors[len(all_editors)-window1:])
sortedEditDistSum_w1 = sorted(editDistSum_w1.iteritems(), key=operator.itemgetter(1))
# editorship with window2
#editDistSum_w2 = None
#if (len(all_editors)>=window2):
# editDistSum_w2 = sumAuthDist(all_editors[len(all_editors)-window2:])
# sortedEditDistSum_w2 = sorted(editDistSum_w2.iteritems(), key=operator.itemgetter(1))
# Compute wikigini: V1
i = 1
res = 0
for tup in sortedAuthDistSum:
res = res + (i * tup[1])
i = i + 1
wikiGini = ((2.0 * res)/ (len(sortedAuthDistSum) * totalWordCount)) - ((len(sortedAuthDistSum) + 1.0 ) / len(sortedAuthDistSum))
# Compute wikigini: V2
i = len(all_authors) - len(sortedAuthDistSum) + 1
res = 0
for tup in sortedAuthDistSum:
res = res + (i * tup[1])
i = i + 1
wikiGini2 = ((2.0 * res)/ (len(all_authors) * totalWordCount)) - ((len(all_authors) + 1.0 ) / len(all_authors))
# Compute length change in percentage
if (prev_revision == None):
lengthChange = 0
else:
lengthChange = ((revision.length - revisions[prev_revision].length) / float(revisions[prev_revision].length))
# Compute edit rapidness
if (prev_revision == None):
editRapidness = 0
else:
editRapidness = (revision.timestamp - revisions[prev_revision].timestamp) / 3600.0
# antagonized_editors: Revert actions + delete actions in revision (distinct editors)
antagonized_editors = set([])
for elem in relation.revert.keys():
antagonized_editors.add(revisions[elem].contributor_id)
for elem in relation.deleted.keys():
antagonized_editors.add(revisions[elem].contributor_id)
all_antagonized_editors.append(len(antagonized_editors))
antagonized_editors_avg_w1 = 0
if (len(all_antagonized_editors) >= window1):
antagonized_editors_avg_w1 = sum(all_antagonized_editors[len(all_antagonized_editors)-window1:]) / float(window1)
# antagonistic_actions: Revert actions + delete actions in revision (number of tokens)
antagonistic_actions = 0
for elem in relation.revert.keys():
antagonistic_actions = antagonistic_actions + relation.revert[elem]
for elem in relation.deleted.keys():
antagonistic_actions = antagonistic_actions + relation.deleted[elem]
# supported_editors: reintroductions + redeletes (distinct editors)
supported_editors = set([])
for elem in relation.reintroduced.keys():
supported_editors.add(revisions[elem].contributor_id)
for elem in relation.redeleted.keys():
supported_editors.add(revisions[elem].contributor_id)
all_supported_editors.append(len(supported_editors))
supported_editors_avg_w1 = 0
if (len(all_supported_editors) >= window1):
supported_editors_avg_w1 = sum(all_supported_editors[len(all_supported_editors)-window1:]) / float(window1)
# supportive actions: reintroductions + redeletes (number of tokens)
supportive_actions = 0
for elem in relation.reintroduced.keys():
supportive_actions = supportive_actions + relation.reintroduced[elem]
for elem in relation.redeleted.keys():
supportive_actions = supportive_actions + relation.redeleted[elem]
# total number of token actions
tokenActions = 0
for elem in relation.deleted.keys():
tokenActions = tokenActions + relation.deleted[elem]
for elem in relation.reintroduced.keys():
tokenActions = tokenActions + relation.reintroduced[elem]
for elem in relation.redeleted.keys():
tokenActions = tokenActions + relation.redeleted[elem]
for elem in relation.revert.keys():
tokenActions = tokenActions + relation.revert[elem]
tokenActions = tokenActions + relation.added
# Compute gini editorship
i = 1
res = 0
for tup in sortedEditDistSum:
res = res + (i * tup[1])
i = i + 1
giniEditorship = ((2.0 * res)/ (len(sortedEditDistSum) * len(all_editors))) - ((len(sortedEditDistSum) + 1.0 ) / len(sortedEditDistSum))
# Compute gini editorship with window 1
giniEditorship_w1 = 0
if (editDistSum_w1 != None):
i = 1
res = 0
for tup in sortedEditDistSum_w1:
res = res + (i * tup[1])
i = i + 1
giniEditorship_w1 = ((2.0 * res)/ (len(sortedEditDistSum_w1) * window1)) - ((len(sortedEditDistSum_w1) + 1.0 ) / len(sortedEditDistSum_w1))
#giniEditorship_w2 = 0
#if (editDistSum_w2 != None):
# i = 1
# res = 0
# for tup in sortedEditDistSum_w2:
# res = res + (i * tup[1])
# i = i + 1
# giniEditorship_w2 = ((2.0 * res)/ (len(sortedEditDistSum_w2) * window2)) - ((len(sortedEditDistSum_w2) + 1.0 ) / len(sortedEditDistSum_w2))
# Computing gini of outgoing negative actions
#if (revision.contributor_name in outgoing_negative_actions.keys()):
if (revision.contributor_name in outgoing_negative_actions.keys()):
outgoing_negative_actions.update({revision.contributor_name: outgoing_negative_actions[revision.contributor_name] + antagonistic_actions})
else:
outgoing_negative_actions.update({revision.contributor_name: antagonistic_actions})
sortedNegDistSum = sorted(outgoing_negative_actions.iteritems(), key=operator.itemgetter(1))
i = 1
res = 0
for tup in sortedNegDistSum:
res = res + (i * tup[1])
i = i + 1
giniOutgoingNegativeActions = 0
if (sum(outgoing_negative_actions.values()) > 0):
# len(all_editors) represent the number of revisions
giniOutgoingNegativeActions = ((2.0 * res)/ (len(sortedNegDistSum) * sum(outgoing_negative_actions.values()))) - ((len(sortedNegDistSum) + 1.0 ) / len(sortedNegDistSum))
#print rev_id , sortedNegDistSum, giniOutgoingNegativeActions
#print "giniOutgoingNegativeActions", giniOutgoingNegativeActions, "outgoing_negative_actions", outgoing_negative_actions
# Computing gini of incoming negative actions
for elem in relation.revert.keys():
if elem in incoming_negative_actions.keys():
incoming_negative_actions.update({elem : incoming_negative_actions[elem] + relation.revert[elem]})
else:
incoming_negative_actions.update({elem : relation.revert[elem]})
for elem in relation.deleted.keys():
if elem in incoming_negative_actions.keys():
incoming_negative_actions.update({elem : incoming_negative_actions[elem] + relation.deleted[elem]})
else:
incoming_negative_actions.update({elem : relation.deleted[elem]})
sortedNegDistSum = sorted(incoming_negative_actions.iteritems(), key=operator.itemgetter(1))
i = 1
res = 0
for tup in sortedNegDistSum:
res = res + (i * tup[1])
i = i + 1
giniIncomingNegativeActions = 0
if (sum(incoming_negative_actions.values()) > 0):
# len(all_editors) represent the number of revisions
giniIncomingNegativeActions = ((2.0 * res)/ (len(sortedNegDistSum) * sum(incoming_negative_actions.values()))) - ((len(sortedNegDistSum) + 1.0 ) / len(sortedNegDistSum))
# self-reintroduction ratio
all_actions = float(relation.added + sum(relation.deleted.values()) + sum(relation.redeleted.values()) + sum(relation.reintroduced.values()) + sum(relation.revert.values()) + sum(relation.self_reintroduced.values()) + sum(relation.self_redeleted.values()) + sum(relation.self_deleted.values()) + sum(relation.self_revert.values()))
selfReintroductionRatio = 0
if (all_actions != 0):
selfReintroductionRatio = sum(relation.self_reintroduced.values()) / all_actions
self_reintroductions.append(selfReintroductionRatio)
selfReintroductionRatio_avg_w1 = 0
if (len(self_reintroductions) >= window1):
selfReintroductionRatio_avg_w1 = sum(self_reintroductions[len(self_reintroductions)-window1:]) / float(window1)
# self-supported actions ration
selfSupportRatio = 0
if (all_actions != 0):
selfSupportRatio = (sum(relation.self_reintroduced.values()) + sum(relation.self_redeleted.values())) / all_actions
self_supports.append(selfSupportRatio)
selfSupportRatio_avg_w1 = 0
if (len(self_reintroductions) >= window1):
selfSupportRatio_avg_w1 = (sum(self_reintroductions[len(self_reintroductions)-window1:]) + sum(self_supports[len(self_reintroductions)-window1:])) / float(window1)
# Update editor stats
statEditors = {}
for a in authors:
statEditors.update({a : {'wordOwnership' : authDistSum[a]/float(totalWordCount)}})
#print rev_id, statEditors, statEditors[(rev_id, revision.contributor_name) ]
positiveActions = 0
negativeActions = 0
if (all_actions != 0):
positiveActions = ((sum(relation.redeleted.values()) + sum(relation.reintroduced.values()))) / all_actions
negativeActions = ((sum(relation.deleted.values()) + sum(relation.revert.values()))) / all_actions
if ((rev_id, revision.contributor_name) in statEditors.keys()):
statEditors[revision.contributor_name].update({'positiveActions' : positiveActions})
statEditors[revision.contributor_name].update({'negativeActions' : negativeActions})
else:
statEditors.update({revision.contributor_name : {'wordOwnership' : 0}})
statEditors[revision.contributor_name ].update({'add' : relation.added})
statEditors[revision.contributor_name ].update({'positiveActions' : positiveActions})
statEditors[revision.contributor_name ].update({'negativeActions' : negativeActions})
# Compute maintained tag
maintained = 0
npov = 0
good_article = 0
featured_article = 0
disputed = 0
timestamps = tags.keys()
timestamps.sort()
for talk_ts in timestamps:
if talk_ts <= revision.timestamp:
for t in tags[talk_ts]:
# Handling "maintained" tag
if (t["tagname"] == "maintained") and (t["type"] == "addition"):
maintained = 1
elif (t["tagname"] == "maintained") and (t["type"] == "removal"):
maintained = 0
# Handling "npov" tag
elif (t["tagname"] == "npov") and (t["type"] == "addition"):
npov = 1
elif (t["tagname"] == "npov") and (t["type"] == "removal"):
npov = 0
# Handling "good article" tag
elif (t["tagname"] == "good article") and (t["type"] == "addition"):
good_article = 1
elif (t["tagname"] == "good article") and (t["type"] == "removal"):
good_article = 0
# Handling "featured article" tag
elif (t["tagname"] == "featured article") and (t["type"] == "addition"):
featured_article = 1
elif (t["tagname"] == "featured article") and (t["type"] == "removal"):
featured_article = 0
# Handling "disputed" tag
elif (t["tagname"] == "disputed") and (t["type"] == "addition"):
disputed = 1
elif (t["tagname"] == "disputed") and (t["type"] == "removal"):
disputed = 0
#################################################################################
#print "revision.content", revision.content
if "featured article" in revision.content:
#print "revision.content", revision.content
featured_article = 1
protected = 0
reglist = list()
reglist.append({"tagname": "good article", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)good article((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
#reglist.append({"tagname": "featured article", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)featured article((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
#reglist.append({"tagname": "featured article", "regexp": re.compile('\|currentstatus=FA', re.IGNORECASE)})
reglist.append({"tagname": "npov", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)(pov|npov)((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
reglist.append({"tagname": "disputed", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)disputed((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
#re_user = re.compile('({{|\[\[)user.*?[:|](.*?)[}/\]|]', re.IGNORECASE)
reglist.append({"tagname": "protected", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)(pp-pc.?)((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
for regexp in reglist:
m = regexp["regexp"].search(revision.content)
if m:
if regexp["tagname"] == "disputed":
disputed = 1
elif (regexp["tagname"] == "good article"):
good_article = 1
elif (regexp["tagname"] == "npov"):
npov = 1
elif (regexp["tagname"] == "protected"):
protected = 1
data = {'revision' : revision.wikipedia_id,
'wikiGini V1':wikiGini,
#'wikiGini V2':wikiGini2,
#'totalLength': (revision.length)/(1024.0*1024.0),
'lengthChange' : lengthChange,
'editRapidness': editRapidness,
'antagonizedEditors': len(antagonized_editors),
'antagonized_editors_avg_w1' : antagonized_editors_avg_w1,
'antagonisticActions': antagonistic_actions,
'supportedEditors': len(supported_editors),
'supported_editors_avg_w1' : supported_editors_avg_w1,
'supportiveActions': supportive_actions,
#'tokenActions': tokenActions,
'giniEditorship': giniEditorship,
'giniEditorship_w1': giniEditorship_w1,
'giniOutgoingNegativeActions': giniOutgoingNegativeActions,
'giniIncomingNegativeActions': giniIncomingNegativeActions,
'selfReintroductionRatio': selfReintroductionRatio,
'selfReintroductionRatio_avg_w1':selfReintroductionRatio_avg_w1,
'selfSupportRatio': selfSupportRatio,
'selfSupportRatio_avg_w1': selfSupportRatio_avg_w1,
'statEditors': statEditors,
#'vandalism': 0,
'maintained': maintained,
'npov': npov,
'goodArticle': good_article,
'featuredArticle': featured_article,
'disputed': disputed,
'protected': protected}
statData.append(data)
#editorData = {'revision:', revision.wikipedia_id}
#if (prev_revision != None):
# print "timestamp", revision.timestamp, revisions[prev_revision].timestamp, revision.timestamp - revisions[prev_revision].timestamp
prev_revision = rev_id
return statData
def saveStatsToFile(filename, stats):
text_file = open(filename, "w")
text_file.write(stats)
text_file.close()
def sumAuthDist(authors):
wordCount = {}
for author in authors:
if(author in wordCount.keys()):
wordCount[author] = wordCount[author]+1
else:
wordCount[author] = 1
return wordCount
def getTagDatesFromPage(file_name):
# Compile regexp
reglist = list()
reglist.append({"tagname": "maintained", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)maintained((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
reglist.append({"tagname": "good article", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)good article((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
#reglist.append({"tagname": "featured article", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)featured article((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
#reglist.append({"tagname": "featured article", "regexp": re.compile('\|currentstatus=FA', re.IGNORECASE)})
reglist.append({"tagname": "npov", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)(pov|npov)((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
reglist.append({"tagname": "disputed", "regexp": re.compile('\{\{(articleissues\|((?:(?!\}\}).)*\||)|multiple issues\|((?:(?!\}\}).)*\||)|)disputed((\||=)(?:(?!\}\}).)*|)\}\}', re.IGNORECASE)})
re_user = re.compile('({{|\[\[)user.*?[:|](.*?)[}/\]|]', re.IGNORECASE)
#"({{|\[\[)user[\s\S]*?[:|]([\s\S]*?)[}/\]|]"
# Access the file.
dumpIterator = dump.Iterator(file_name)
# Revisions to compare.
revision_curr = Revision()
revision_prev = Revision()
text_curr = None
listOfTagChanges = {}
all_contributors = {"maintained": {}, "good article": {}, "featured article": {}, "npov": {}, "disputed": {}}
# Iterate over the pages.
for page in dumpIterator.readPages():
# Iterate over revisions of the article.
i = 0
prev_matched = list()
for revision in page.readRevisions():
revision.wikipedia_id = int(revision.getId())
revision.timestamp = revision.getTimestamp()
# Some revisions don't have contributor.
if (revision.getContributor() != None):
revision.contributor_id = revision.getContributor().getId()
revision.contributor_name = revision.getContributor().getUsername()
else:
revision.contributor_id = 'Not Available'
revision.contribur_name = 'Not Available'
text_curr = revision.getText()
if(text_curr):
text_curr = text_curr.encode('utf-8')
text_curr = text_curr.lower()
else:
continue
matched = list()
aux = list()
for regexp in reglist:
m = regexp["regexp"].search(text_curr)
if m:
mc = re_user.split(m.group(0))
i = 2
users = []
users.append(mc[2])
while (i+3 < len(mc)):
#print m[i+3]
users.append(mc[i+3])
i = i +3
#print regexp["tagname"], contributor
#print regexp["tagname"], users
matched.append(regexp["tagname"])
aux.append((regexp["tagname"], users))
##
#m_user = re_user.search(m.group(0))
#contributor = m_user.group(2)
if "|currentstatus=FA" in text_curr:
matched.append("featured article")
aux.append((revision.contributor_name,"featured article"))
# Calculate additions
for (match, contributor) in aux:
if not (match in prev_matched):
if not (revision.timestamp in listOfTagChanges.keys()):
listOfTagChanges[revision.timestamp] = list()
listOfTagChanges[revision.timestamp].append({"rev": revision.wikipedia_id, "type": "addition", "tagname": match, "wikiname": revision.contributor_name, "timestamp": revision.timestamp, "date": datetime.datetime.fromtimestamp(int(revision.timestamp)).strftime('%Y-%m-%d %H:%M:%S')})
all_contributors[match].update({revision.timestamp : {"rev": revision.wikipedia_id, "user":contributor, "date":datetime.datetime.fromtimestamp(int(revision.timestamp)).strftime('%Y-%m-%d %H:%M:%S')}})
# Calculate removals
for match in prev_matched:
if not (match in matched):
if not (revision.timestamp in listOfTagChanges.keys()):
listOfTagChanges[revision.timestamp] = list()
listOfTagChanges[revision.timestamp].append({"rev": revision.wikipedia_id, "type": "removal", "tagname": match, "wikiname": revision.contributor_name, "timestamp": revision.timestamp, "date": datetime.datetime.fromtimestamp(int(revision.timestamp)).strftime('%Y-%m-%d %H:%M:%S')})
prev_matched = matched
return listOfTagChanges, all_contributors
def getAuthorshipDataFromRevision(revision):
#print "Printing authorship for revision: ", revision.wikipedia_id
#text = []
authors = []
for hash_paragraph in revision.ordered_paragraphs:
p_copy = deepcopy(revision.paragraphs[hash_paragraph])
paragraph = p_copy.pop(0)
for hash_sentence in paragraph.ordered_sentences:
sentence = paragraph.sentences[hash_sentence].pop(0)
for word in sentence.words:
#text.append(word.value)
authors.append(word.author_id)
return authors
def printStats(stats):
# Stats to print
finalStats = {}
# Mappings of revisions and fake ids
revs = {}
# Stats per revisions
#wikiGini1 = []
#wikiGini2 = []
#totalLength = []
lengthChange = []
editRapidness = []
#antagonizedEditors = []
#antagonisticActions = []
#supportedEditors = []
#supportiveActions = []
#tokenActions = []
giniEditorship = []
giniEditorship_w1 = []
giniOutgoingNegativeActions = []
giniIncomingNegativeActions = []
#selfReintroductionRatio = []
#selfSupportRatio = []
#selfReintroductionRatio_avg_w1 = []
#selfSupportRatio_avg_w1 = []
#vandalism = []
maintained = []
npov = []
goodArticle = []
featuredArticle = []
disputed = []
protected = []
antagonized_editors_avg_w1 = []
#supported_editors_avg_w1 = []
# Stats per editors
editorStats = {}
count = 0
for elem in stats:
#wikiGini1.append({"x": count, "y": elem['wikiGini V1'], "z": elem["revision"]})
#wikiGini2.append({"x": count, "y": elem['wikiGini V2'], "z": elem["revision"]})
#totalLength.append({"x": count, "y": elem['totalLength'], "z": elem["revision"]})
lengthChange.append({"x": count, "y": elem['lengthChange'], "z": elem["revision"]})
editRapidness.append({"x": count, "y": elem['editRapidness'], "z": elem["revision"]})
#antagonizedEditors.append({"x": count, "y": elem['antagonizedEditors'], "z": elem["revision"]})
antagonized_editors_avg_w1.append({"x": count, "y": elem['antagonized_editors_avg_w1'], "z": elem["revision"]})
#antagonisticActions.append({"x": count, "y": elem['antagonisticActions'], "z": elem["revision"]})
#supportedEditors.append({"x": count, "y": elem['supportedEditors'], "z": elem["revision"]})
#supported_editors_avg_w1.append({"x": count, "y": elem['supported_editors_avg_w1'], "z": elem["revision"]})
#supportiveActions.append({"x": count, "y": elem['supportiveActions'], "z": elem["revision"]})
#tokenActions.append({"x": count, "y": elem['tokenActions'], "z": elem["revision"]})
giniEditorship.append({"x": count, "y": elem['giniEditorship'], "z": elem["revision"]})
giniEditorship_w1.append({"x": count, "y": elem['giniEditorship_w1'], "z": elem["revision"]})
giniOutgoingNegativeActions.append({"x": count, "y": elem['giniOutgoingNegativeActions'], "z": elem["revision"]})
giniIncomingNegativeActions.append({"x": count, "y": elem['giniIncomingNegativeActions'], "z": elem["revision"]})
#selfReintroductionRatio.append({"x": count, "y": elem['selfReintroductionRatio'], "z": elem["revision"]})
#selfReintroductionRatio_avg_w1.append({"x": count, "y": elem['selfReintroductionRatio_avg_w1'], "z": elem["revision"]})
#selfSupportRatio.append({"x": count, "y": elem['selfSupportRatio'], "z": elem["revision"]})
#selfSupportRatio_avg_w1.append({"x": count, "y": elem['selfSupportRatio_avg_w1'], "z": elem["revision"]})
#vandalism.append({"x": count, "y": elem['vandalism'], "z": elem["revision"]})
maintained.append({"x": count, "y": elem['maintained'], "z": elem["revision"]})
npov.append({"x": count, "y": elem['npov'], "z": elem["revision"]})
goodArticle.append({"x": count, "y": elem['goodArticle'], "z": elem["revision"]})
protected.append({"x": count, "y": elem['protected'], "z": elem["revision"]})
featuredArticle.append({"x": count, "y": elem['featuredArticle'], "z": elem["revision"]})
disputed.append({"x": count, "y": elem['disputed'], "z": elem["revision"]})
#for editor in elem['statEditors']:
count = count + 1
#serie1 = {"key" : "WikiGini V1", "values": wikiGini1}
#serie2 = {"key" : "WikiGini V2", "values": wikiGini2, "disabled": "true"}
#serie3 = {"key" : "Total Length (MB)", "values": totalLength, "disabled": "true"}
serie4 = {"key" : "Length Change (%)", "values": lengthChange, "disabled": "true"}
serie5 = {"key" : "Edit Rapidness (h.)", "values": editRapidness, "disabled": "true"}
#serie6 = {"key" : "Antagonized editors", "values": antagonizedEditors, "disabled": "true"}
serie26 = {"key" : "Avg. editors disagreed with (window=50)", "values": antagonized_editors_avg_w1}
#serie7 = {"key" : "Antagonized actions", "values": antagonisticActions, "disabled": "true"}
#serie8 = {"key" : "Supported editors", "values": supportedEditors, "disabled": "true"}
#serie27 = {"key" : "Avg. Supported editors (window=50)", "values": supported_editors_avg_w1, "disabled": "true"}
#serie9 = {"key" : "Supportive actions", "values": supportiveActions, "disabled": "true"}
#serie10 = {"key" : "Total token actions", "values": tokenActions, "disabled": "true"}
serie11 = {"key" : "Gini editorship", "values": giniEditorship, "disabled": "true"}
serie12 = {"key" : "Gini editorship (window=50)", "values": giniEditorship_w1, "disabled": "true"}
serie14 = {"key" : "Gini outgoing dis. actions", "values": giniOutgoingNegativeActions, "disabled": "true"}
serie15 = {"key" : "Gini incoming dis. actions", "values": giniIncomingNegativeActions, "disabled": "true"}
#serie16 = {"key" : "Self-reintroduction ratio", "values": selfReintroductionRatio, "disabled": "true"}
#serie17 = {"key" : "Avg. Self-reintroduction ratio (window=50)", "values": selfReintroductionRatio_avg_w1, "disabled": "true"}
#serie18 = {"key" : "Self-support ratio", "values": selfSupportRatio, "disabled": "true"}
#serie19 = {"key" : "Avg. Self-support ratio (window=50)", "values": selfSupportRatio_avg_w1, "disabled": "true"}
#serie20 = {"key" : "Vandalism", "values": vandalism, "disabled": "true"}
serie21 = {"key" : "Template:Maintained", "values": maintained, "disabled": "true"}
serie22 = {"key" : "Template:NPOV", "values": npov, "disabled": "true"}
serie23 = {"key" : "Template:Good Article", "values": goodArticle, "disabled": "true"}
serie24 = {"key" : "Template:Featured Article", "values": featuredArticle, "disabled": "true"}
serie25 = {"key" : "Template:Disputed", "values": disputed, "disabled": "true"}
serie27 = {"key" : "Template:Protected", "values": protected, "disabled": "true"}
finalStats.update({'revisions' : [serie26, serie4, serie5, serie14, serie15, serie11, serie12, serie21, serie22, serie23, serie24, serie25, serie27]})
return "example = " + str(finalStats) + ";"
def printStatsCSV(stats):
# Stats to print
finalStats = {}
# Mappings of revisions and fake ids
revs = {}
# Stats per revisions
wikiGini1 = []
#wikiGini2 = []
#totalLength = []
lengthChange = []
editRapidness = []
antagonizedEditors = []
antagonisticActions = []
supportedEditors = []
supportiveActions = []
#tokenActions = []
giniEditorship = []
giniEditorship_w1 = []
giniOutgoingNegativeActions = []
giniIncomingNegativeActions = []
selfReintroductionRatio = []
selfSupportRatio = []
selfReintroductionRatio_avg_w1 = []
selfSupportRatio_avg_w1 = []
#vandalism = []
maintained = []
npov = []
goodArticle = []
featuredArticle = []
disputed = []
antagonized_editors_avg_w1 = []
supported_editors_avg_w1 = []
# Stats per editors
editorStats = {}
lines = []
header = ["WikiGini V1", "Length Change (%)"] #...
",".join(header)
lines.append(header)
for elem in stats:
row = []
row.append(elem["revision"])
row.append(elem['wikiGini V1'])
row.append(elem['lengthChange'])
row.append(elem['editRapidness'])
row.append(elem['antagonizedEditors'])
row.append(elem['antagonized_editors_avg_w1'])
row.append(elem['antagonisticActions'])
row.append(elem['supportedEditors'])
# ...
",".join(row)
lines.append(row)
"\n".join(lines)
# ...
# antagonisticActions.append({"x": count, "y": elem['antagonisticActions'], "z": elem["revision"]})
# supportedEditors.append({"x": count, "y": elem['supportedEditors'], "z": elem["revision"]})
# supported_editors_avg_w1.append({"x": count, "y": elem['supported_editors_avg_w1'], "z": elem["revision"]})
#
# supportiveActions.append({"x": count, "y": elem['supportiveActions'], "z": elem["revision"]})
# #tokenActions.append({"x": count, "y": elem['tokenActions'], "z": elem["revision"]})
# giniEditorship.append({"x": count, "y": elem['giniEditorship'], "z": elem["revision"]})
# giniEditorship_w1.append({"x": count, "y": elem['giniEditorship_w1'], "z": elem["revision"]})
#
# giniOutgoingNegativeActions.append({"x": count, "y": elem['giniOutgoingNegativeActions'], "z": elem["revision"]})
# giniIncomingNegativeActions.append({"x": count, "y": elem['giniIncomingNegativeActions'], "z": elem["revision"]})
# selfReintroductionRatio.append({"x": count, "y": elem['selfReintroductionRatio'], "z": elem["revision"]})
# selfReintroductionRatio_avg_w1.append({"x": count, "y": elem['selfReintroductionRatio_avg_w1'], "z": elem["revision"]})
# selfSupportRatio.append({"x": count, "y": elem['selfSupportRatio'], "z": elem["revision"]})
# selfSupportRatio_avg_w1.append({"x": count, "y": elem['selfSupportRatio_avg_w1'], "z": elem["revision"]})
# #vandalism.append({"x": count, "y": elem['vandalism'], "z": elem["revision"]})
# maintained.append({"x": count, "y": elem['maintained'], "z": elem["revision"]})
# npov.append({"x": count, "y": elem['npov'], "z": elem["revision"]})
# goodArticle.append({"x": count, "y": elem['goodArticle'], "z": elem["revision"]})
# featuredArticle.append({"x": count, "y": elem['featuredArticle'], "z": elem["revision"]})
# disputed.append({"x": count, "y": elem['disputed'], "z": elem["revision"]})
#for editor in elem['statEditors']:
#
#
#
# serie1 = {"key" : "WikiGini V1", "values": wikiGini1}
# #serie2 = {"key" : "WikiGini V2", "values": wikiGini2, "disabled": "true"}
# #serie3 = {"key" : "Total Length (MB)", "values": totalLength, "disabled": "true"}
# serie4 = {"key" : "Length Change (%)", "values": lengthChange, "disabled": "true"}
# serie5 = {"key" : "Edit Rapidness (h.)", "values": editRapidness, "disabled": "true"}
# serie6 = {"key" : "Antagonized editors", "values": antagonizedEditors, "disabled": "true"}
# serie26 = {"key" : "Avg. Antagonized editors (window=50)", "values": antagonized_editors_avg_w1, "disabled": "true"}
#
# serie7 = {"key" : "Antagonized actions", "values": antagonisticActions, "disabled": "true"}
# serie8 = {"key" : "Supported editors", "values": supportedEditors, "disabled": "true"}
# serie27 = {"key" : "Avg. Supported editors (window=50)", "values": supported_editors_avg_w1, "disabled": "true"}
#
# serie9 = {"key" : "Supportive actions", "values": supportiveActions, "disabled": "true"}
# #serie10 = {"key" : "Total token actions", "values": tokenActions, "disabled": "true"}
# serie11 = {"key" : "Gini editorship", "values": giniEditorship, "disabled": "true"}
# serie12 = {"key" : "Gini editorship (window=50)", "values": giniEditorship_w1, "disabled": "true"}
#
# serie14 = {"key" : "Gini outgoing neg. actions", "values": giniOutgoingNegativeActions, "disabled": "true"}
# serie15 = {"key" : "Gini incoming neg. actions", "values": giniIncomingNegativeActions, "disabled": "true"}
# serie16 = {"key" : "Self-reintroduction ratio", "values": selfReintroductionRatio, "disabled": "true"}
# serie17 = {"key" : "Avg. Self-reintroduction ratio (window=50)", "values": selfReintroductionRatio_avg_w1, "disabled": "true"}
# serie18 = {"key" : "Self-support ratio", "values": selfSupportRatio, "disabled": "true"}
# serie19 = {"key" : "Avg. Self-support ratio (window=50)", "values": selfSupportRatio_avg_w1, "disabled": "true"}
# #serie20 = {"key" : "Vandalism", "values": vandalism, "disabled": "true"}
# serie21 = {"key" : "Maintained", "values": maintained, "disabled": "true"}
# serie22 = {"key" : "NPOV", "values": npov, "disabled": "true"}
# serie23 = {"key" : "Good Article", "values": goodArticle, "disabled": "true"}
# serie24 = {"key" : "Featured Article", "values": featuredArticle, "disabled": "true"}
# serie25 = {"key" : "Disputed", "values": disputed, "disabled": "true"}
#finalStats.update({'revisions' : [serie1, serie4, serie5, serie6, serie26, serie7, serie8, serie27, serie9, serie11, serie12, serie14, serie15, serie16, serie17, serie18, serie19, serie21, serie22, serie23, serie24, serie25]})
#return "example = " + str(finalStats) + ";"
return lines
def printTags(contributors, revisions, order, file_name):
maintainers = contributors["maintained"]
timestamps = maintainers.keys()
timestamps.sort()
csv_file = open(file_name, "w")
for (rev_id, vandalism) in order:
users = None
for talk_ts in timestamps:
if not(vandalism) and talk_ts <= revisions[rev_id].timestamp:
users = maintainers[talk_ts]["user"]
if (users != None):
csv_file.write(str(rev_id) + "\t" + "maintained" + "\t " + "\t".join(users) + "\n")
csv_file.close()
def main(my_argv):
inputfile = ''
output = None
if (len(my_argv) <= 3):
try:
opts, _ = getopt.getopt(my_argv,"i:",["ifile="])
except getopt.GetoptError:
print 'Usage: wikistats.py -i <inputfile> [-o <output>]'
exit(2)
else:
try:
opts, _ = getopt.getopt(my_argv,"i:o:",["ifile=","output="])
except getopt.GetoptError:
print 'Usage: wikistats.py -i <inputfile> [-o <output>]'
exit(2)
for opt, arg in opts:
if opt in ('-h', "--help"):
print "wikistats"
print
print 'Usage: wikistats.py -i <inputfile> [-rev <revision_id>]'
print "-i --ifile File to analyze"
print "-o --output Type of output. Options: 'json', 'table'. If not specified, JSON is the default."
print "-h --help This help."
exit()
elif opt in ("-i", "--ifile"):
inputfile = arg
elif opt in ("-o", "--output"):
output = arg
return (inputfile,output)
if __name__ == '__main__':
(file_name, output) = main(argv[1:])
# Compute distribution information
(revisions, order, relations) = WikiwhoRelationships.analyseArticle(file_name)
# Compute file name of talk page data file
talkPageFileName = os.path.join(os.path.dirname(file_name), "talk_" + os.path.basename(file_name))
tags, contributors = getTagDatesFromPage(talkPageFileName)
printTags(contributors, revisions, order, file_name.replace(".xml", "_maintainers.csv"))
if (output == None or output == 'json'):
# Compute statistics
stats = getStatsOfFile(revisions, order, relations, tags)
print printStats(stats)
elif (output == 'table'):
WikiwhoRelationships.printRelationships(relations, order)
elif (output== 'csv'):
# Compute statistics
stats = getStatsOfFile(revisions, order, relations, tags)
print printStatsCSV(stats)
else:
print "Output format", output, "not supported"
#print stats
#time2 = time()
#pos = file_name.rfind("/")
#print file_name[pos+1: len(file_name)-len(".xml")], time2-time1
#printRelationships(relations, order)
#printRevision(revisions[11])
#print "Execution time:", time2-time1
#saveStatsToFile("/home/jurkan/Dokumente/Informatik/ciseminar/software/nethackstats.json", getStatsOfFile("/home/jurkan/Dokumente/Informatik/ciseminar/software/nethack.xml"))
|
|
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import sys
import unittest
import TestSCons
import SCons.Variables
import SCons.Subst
import SCons.Warnings
class Environment(object):
def __init__(self):
self.dict = {}
def subst(self, x):
return SCons.Subst.scons_subst(x, self, gvars=self.dict)
def __setitem__(self, key, value):
self.dict[key] = value
def __getitem__(self, key):
return self.dict[key]
def __contains__(self, key):
return self.dict.__contains__(key)
def has_key(self, key):
return key in self.dict
def check(key, value, env):
assert int(value) == 6 * 9, "key %s = %s" % (key, repr(value))
# Check saved option file by executing and comparing against
# the expected dictionary
def checkSave(file, expected):
gdict = {}
ldict = {}
exec open(file, 'rU').read() in gdict, ldict
assert expected == ldict, "%s\n...not equal to...\n%s" % (expected, ldict)
class VariablesTestCase(unittest.TestCase):
def test_keys(self):
"""Test the Variables.keys() method"""
opts = SCons.Variables.Variables()
opts.Add('VAR1')
opts.Add('VAR2',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12)
keys = list(opts.keys())
assert keys == ['VAR1', 'VAR2'], keys
def test_Add(self):
"""Test adding to a Variables object"""
opts = SCons.Variables.Variables()
opts.Add('VAR')
opts.Add('ANSWER',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12)
o = opts.options[0]
assert o.key == 'VAR'
assert o.help == ''
assert o.default is None
assert o.validator is None
assert o.converter is None
o = opts.options[1]
assert o.key == 'ANSWER'
assert o.help == 'THE answer to THE question'
assert o.default == "42"
o.validator(o.key, o.converter(o.default), {})
def test_it(var, opts=opts):
exc_caught = None
try:
opts.Add(var)
except SCons.Errors.UserError:
exc_caught = 1
assert exc_caught, "did not catch UserError for '%s'" % var
test_it('foo/bar')
test_it('foo-bar')
test_it('foo.bar')
def test_AddVariables(self):
"""Test adding a list of options to a Variables object"""
opts = SCons.Variables.Variables()
opts.AddVariables(('VAR2',),
('ANSWER2',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12))
o = opts.options[0]
assert o.key == 'VAR2', o.key
assert o.help == '', o.help
assert o.default is None, o.default
assert o.validator is None, o.validator
assert o.converter is None, o.converter
o = opts.options[1]
assert o.key == 'ANSWER2', o.key
assert o.help == 'THE answer to THE question', o.help
assert o.default == "42", o.default
o.validator(o.key, o.converter(o.default), {})
def test_Update(self):
"""Test updating an Environment"""
# Test that a default value is validated correctly.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
opts = SCons.Variables.Variables(file)
opts.Add('ANSWER',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12)
env = Environment()
opts.Update(env)
assert env['ANSWER'] == 54
env = Environment()
opts.Update(env, {})
assert env['ANSWER'] == 54
# Test that a bad value from the file is used and
# validation fails correctly.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
test.write('custom.py', 'ANSWER=54')
opts = SCons.Variables.Variables(file)
opts.Add('ANSWER',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12)
env = Environment()
exc_caught = None
try:
opts.Update(env)
except AssertionError:
exc_caught = 1
assert exc_caught, "did not catch expected assertion"
env = Environment()
exc_caught = None
try:
opts.Update(env, {})
except AssertionError:
exc_caught = 1
assert exc_caught, "did not catch expected assertion"
# Test that a good value from the file is used and validated.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
test.write('custom.py', 'ANSWER=42')
opts = SCons.Variables.Variables(file)
opts.Add('ANSWER',
'THE answer to THE question',
"10",
check,
lambda x: int(x) + 12)
env = Environment()
opts.Update(env)
assert env['ANSWER'] == 54
env = Environment()
opts.Update(env, {})
assert env['ANSWER'] == 54
# Test that a bad value from an args dictionary passed to
# Update() is used and validation fails correctly.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
test.write('custom.py', 'ANSWER=10')
opts = SCons.Variables.Variables(file)
opts.Add('ANSWER',
'THE answer to THE question',
"12",
check,
lambda x: int(x) + 12)
env = Environment()
exc_caught = None
try:
opts.Update(env, {'ANSWER':'54'})
except AssertionError:
exc_caught = 1
assert exc_caught, "did not catch expected assertion"
# Test that a good value from an args dictionary
# passed to Update() is used and validated.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
test.write('custom.py', 'ANSWER=10')
opts = SCons.Variables.Variables(file)
opts.Add('ANSWER',
'THE answer to THE question',
"12",
check,
lambda x: int(x) + 12)
env = Environment()
opts.Update(env, {'ANSWER':'42'})
assert env['ANSWER'] == 54
# Test against a former bug. If we supply a converter,
# but no default, the value should *not* appear in the
# Environment if no value is specified in the options file
# or args.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
opts = SCons.Variables.Variables(file)
opts.Add('ANSWER',
help='THE answer to THE question',
converter=str)
env = Environment()
opts.Update(env, {})
assert 'ANSWER' not in env
# Test that a default value of None is all right.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
opts = SCons.Variables.Variables(file)
opts.Add('ANSWER',
"This is the answer",
None,
check)
env = Environment()
opts.Update(env, {})
assert 'ANSWER' not in env
def test_args(self):
"""Test updating an Environment with arguments overridden"""
# Test that a bad (command-line) argument is used
# and the validation fails correctly.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
test.write('custom.py', 'ANSWER=42')
opts = SCons.Variables.Variables(file, {'ANSWER':54})
opts.Add('ANSWER',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12)
env = Environment()
exc_caught = None
try:
opts.Update(env)
except AssertionError:
exc_caught = 1
assert exc_caught, "did not catch expected assertion"
# Test that a good (command-line) argument is used and validated.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
test.write('custom.py', 'ANSWER=54')
opts = SCons.Variables.Variables(file, {'ANSWER':42})
opts.Add('ANSWER',
'THE answer to THE question',
"54",
check,
lambda x: int(x) + 12)
env = Environment()
opts.Update(env)
assert env['ANSWER'] == 54
# Test that a (command-line) argument is overridden by a dictionary
# supplied to Update() and the dictionary value is validated correctly.
test = TestSCons.TestSCons()
file = test.workpath('custom.py')
test.write('custom.py', 'ANSWER=54')
opts = SCons.Variables.Variables(file, {'ANSWER':54})
opts.Add('ANSWER',
'THE answer to THE question',
"54",
check,
lambda x: int(x) + 12)
env = Environment()
opts.Update(env, {'ANSWER':42})
assert env['ANSWER'] == 54
def test_Save(self):
"""Testing saving Variables"""
test = TestSCons.TestSCons()
cache_file = test.workpath('cached.options')
opts = SCons.Variables.Variables()
def bool_converter(val):
if val in [1, 'y']: val = 1
if val in [0, 'n']: val = 0
return val
# test saving out empty file
opts.Add('OPT_VAL',
'An option to test',
21,
None,
None)
opts.Add('OPT_VAL_2',
default='foo')
opts.Add('OPT_VAL_3',
default=1)
opts.Add('OPT_BOOL_0',
default='n',
converter=bool_converter)
opts.Add('OPT_BOOL_1',
default='y',
converter=bool_converter)
opts.Add('OPT_BOOL_2',
default=0,
converter=bool_converter)
env = Environment()
opts.Update(env, {'OPT_VAL_3' : 2})
assert env['OPT_VAL'] == 21, env['OPT_VAL']
assert env['OPT_VAL_2'] == 'foo', env['OPT_VAL_2']
assert env['OPT_VAL_3'] == 2, env['OPT_VAL_3']
assert env['OPT_BOOL_0'] == 0, env['OPT_BOOL_0']
assert env['OPT_BOOL_1'] == 1, env['OPT_BOOL_1']
assert env['OPT_BOOL_2'] == '0', env['OPT_BOOL_2']
env['OPT_VAL_2'] = 'bar'
env['OPT_BOOL_0'] = 0
env['OPT_BOOL_1'] = 1
env['OPT_BOOL_2'] = 2
opts.Save(cache_file, env)
checkSave(cache_file, { 'OPT_VAL_2' : 'bar',
'OPT_VAL_3' : 2,
'OPT_BOOL_2' : 2})
# Test against some old bugs
class Foo(object):
def __init__(self, x):
self.x = x
def __str__(self):
return self.x
test = TestSCons.TestSCons()
cache_file = test.workpath('cached.options')
opts = SCons.Variables.Variables()
opts.Add('THIS_USED_TO_BREAK',
'An option to test',
"Default")
opts.Add('THIS_ALSO_BROKE',
'An option to test',
"Default2")
opts.Add('THIS_SHOULD_WORK',
'An option to test',
Foo('bar'))
env = Environment()
opts.Update(env, { 'THIS_USED_TO_BREAK' : "Single'Quotes'In'String",
'THIS_ALSO_BROKE' : "\\Escape\nSequences\t",
'THIS_SHOULD_WORK' : Foo('baz') })
opts.Save(cache_file, env)
checkSave(cache_file, { 'THIS_USED_TO_BREAK' : "Single'Quotes'In'String",
'THIS_ALSO_BROKE' : "\\Escape\nSequences\t",
'THIS_SHOULD_WORK' : 'baz' })
def test_GenerateHelpText(self):
"""Test generating the default format help text"""
opts = SCons.Variables.Variables()
opts.Add('ANSWER',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12)
opts.Add('B',
'b - alpha test',
"42",
check,
lambda x: int(x) + 12)
opts.Add('A',
'a - alpha test',
"42",
check,
lambda x: int(x) + 12)
env = Environment()
opts.Update(env, {})
expect = """
ANSWER: THE answer to THE question
default: 42
actual: 54
B: b - alpha test
default: 42
actual: 54
A: a - alpha test
default: 42
actual: 54
"""
text = opts.GenerateHelpText(env)
assert text == expect, text
expectAlpha = """
A: a - alpha test
default: 42
actual: 54
ANSWER: THE answer to THE question
default: 42
actual: 54
B: b - alpha test
default: 42
actual: 54
"""
text = opts.GenerateHelpText(env, sort=cmp)
assert text == expectAlpha, text
def test_FormatVariableHelpText(self):
"""Test generating custom format help text"""
opts = SCons.Variables.Variables()
def my_format(env, opt, help, default, actual, aliases):
return '%s %s %s %s %s\n' % (opt, default, actual, help, aliases)
opts.FormatVariableHelpText = my_format
opts.Add('ANSWER',
'THE answer to THE question',
"42",
check,
lambda x: int(x) + 12)
opts.Add('B',
'b - alpha test',
"42",
check,
lambda x: int(x) + 12)
opts.Add('A',
'a - alpha test',
"42",
check,
lambda x: int(x) + 12)
env = Environment()
opts.Update(env, {})
expect = """\
ANSWER 42 54 THE answer to THE question ['ANSWER']
B 42 54 b - alpha test ['B']
A 42 54 a - alpha test ['A']
"""
text = opts.GenerateHelpText(env)
assert text == expect, text
expectAlpha = """\
A 42 54 a - alpha test ['A']
ANSWER 42 54 THE answer to THE question ['ANSWER']
B 42 54 b - alpha test ['B']
"""
text = opts.GenerateHelpText(env, sort=cmp)
assert text == expectAlpha, text
def test_Aliases(self):
"""Test option aliases"""
# test alias as a tuple
opts = SCons.Variables.Variables()
opts.AddVariables(
(('ANSWER', 'ANSWERALIAS'),
'THE answer to THE question',
"42"),
)
env = Environment()
opts.Update(env, {'ANSWER' : 'answer'})
assert 'ANSWER' in env
env = Environment()
opts.Update(env, {'ANSWERALIAS' : 'answer'})
assert 'ANSWER' in env and 'ANSWERALIAS' not in env
# test alias as a list
opts = SCons.Variables.Variables()
opts.AddVariables(
(['ANSWER', 'ANSWERALIAS'],
'THE answer to THE question',
"42"),
)
env = Environment()
opts.Update(env, {'ANSWER' : 'answer'})
assert 'ANSWER' in env
env = Environment()
opts.Update(env, {'ANSWERALIAS' : 'answer'})
assert 'ANSWER' in env and 'ANSWERALIAS' not in env
class UnknownVariablesTestCase(unittest.TestCase):
def test_unknown(self):
"""Test the UnknownVariables() method"""
opts = SCons.Variables.Variables()
opts.Add('ANSWER',
'THE answer to THE question',
"42")
args = {
'ANSWER' : 'answer',
'UNKNOWN' : 'unknown',
}
env = Environment()
opts.Update(env, args)
r = opts.UnknownVariables()
assert r == {'UNKNOWN' : 'unknown'}, r
assert env['ANSWER'] == 'answer', env['ANSWER']
def test_AddOptionUpdatesUnknown(self):
"""Test updating of the 'unknown' dict"""
opts = SCons.Variables.Variables()
opts.Add('A',
'A test variable',
"1")
args = {
'A' : 'a',
'ADDEDLATER' : 'notaddedyet',
}
env = Environment()
opts.Update(env,args)
r = opts.UnknownVariables()
assert r == {'ADDEDLATER' : 'notaddedyet'}, r
assert env['A'] == 'a', env['A']
opts.Add('ADDEDLATER',
'An option not present initially',
"1")
args = {
'A' : 'a',
'ADDEDLATER' : 'added',
}
opts.Update(env, args)
r = opts.UnknownVariables()
assert len(r) == 0, r
assert env['ADDEDLATER'] == 'added', env['ADDEDLATER']
def test_AddOptionWithAliasUpdatesUnknown(self):
"""Test updating of the 'unknown' dict (with aliases)"""
opts = SCons.Variables.Variables()
opts.Add('A',
'A test variable',
"1")
args = {
'A' : 'a',
'ADDEDLATERALIAS' : 'notaddedyet',
}
env = Environment()
opts.Update(env,args)
r = opts.UnknownVariables()
assert r == {'ADDEDLATERALIAS' : 'notaddedyet'}, r
assert env['A'] == 'a', env['A']
opts.AddVariables(
(('ADDEDLATER', 'ADDEDLATERALIAS'),
'An option not present initially',
"1"),
)
args['ADDEDLATERALIAS'] = 'added'
opts.Update(env, args)
r = opts.UnknownVariables()
assert len(r) == 0, r
assert env['ADDEDLATER'] == 'added', env['ADDEDLATER']
if __name__ == "__main__":
suite = unittest.TestSuite()
tclasses = [ VariablesTestCase,
UnknownVariablesTestCase ]
for tclass in tclasses:
names = unittest.getTestCaseNames(tclass, 'test_')
suite.addTests(list(map(tclass, names)))
if not unittest.TextTestRunner().run(suite).wasSuccessful():
sys.exit(1)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
|
# -*- coding: utf-8 -*-
import itertools
import json
import os
import six
import ntpath
import warnings
from pysnow.query_builder import QueryBuilder
from .legacy_exceptions import (
NoRequestExecuted,
MultipleResults,
NoResults,
InvalidUsage,
UnexpectedResponse,
MissingResult,
)
class LegacyRequest(object):
base_path = "api/now"
def __init__(self, method, table, **kwargs):
"""Takes arguments used to perform a HTTP request
:param method: HTTP request method
:param table: table to operate on
"""
self.method = method
self.table = table
self.url_link = None # Updated when a linked request is iterated on
self.base_url = kwargs.pop("base_url")
self.request_params = kwargs.pop("request_params")
self.raise_on_empty = kwargs.pop("raise_on_empty")
self.session = kwargs.pop("session")
self._last_response = None
if method in ("GET", "DELETE"):
self.query = kwargs.pop("query")
@property
def last_response(self):
"""Return _last_response after making sure an inner `requests.request` has been performed
:raise:
:NoRequestExecuted: If no request has been executed
:return:
- last response
"""
if self._last_response is None:
raise NoRequestExecuted("%s hasn't been executed" % self)
return self._last_response
@last_response.setter
def last_response(self, response):
""" Sets last_response property
:param response: `requests.request` response
"""
self._last_response = response
@property
def count(self):
""" Returns the number of records the query would yield"""
self.request_params.update({"sysparm_count": True})
response = self.session.get(
self._get_stats_url(),
params=self._get_formatted_query(
fields=list(), limit=None, order_by=list(), offset=None
),
)
content = self._get_content(response)
return int(content["stats"]["count"])
@property
def status_code(self):
"""Return last_response.status_code after making sure an inner `requests.request` has been performed
:return: status_code of last_response
"""
return self.last_response.status_code
def _all_inner(self, fields, limit, order_by, offset):
"""Yields all records for the query and follows links if present on the response after validating
:return: List of records with content
"""
response = self.session.get(
self._get_table_url(),
params=self._get_formatted_query(fields, limit, order_by, offset),
)
yield self._get_content(response)
while "next" in response.links:
self.url_link = response.links["next"]["url"]
response = self.session.get(self.url_link)
yield self._get_content(response)
def get_all(self, fields=list(), limit=None, order_by=list(), offset=None):
"""DEPRECATED - see get_multiple()"""
warnings.warn(
"get_all() is deprecated, please use get_multiple() instead",
DeprecationWarning,
)
return self.get_multiple(fields, limit, order_by, offset)
def get_multiple(self, fields=list(), limit=None, order_by=list(), offset=None):
"""Wrapper method that takes whatever was returned by the _all_inner() generators and chains it in one result
The response can be sorted by passing a list of fields to order_by.
Example:
get_multiple(order_by=['category', '-created_on']) would sort the category field in ascending order,
with a secondary sort by created_on in descending order.
:param fields: List of fields to return in the result
:param limit: Limits the number of records returned
:param order_by: Sort response based on certain fields
:param offset: A number of records to skip before returning records (for pagination)
:return:
- Iterable chain object
"""
return itertools.chain.from_iterable(
self._all_inner(fields, limit, order_by, offset)
)
def get_one(self, fields=list()):
"""Convenience function for queries returning only one result. Validates response before returning.
:param fields: List of fields to return in the result
:raise:
:MultipleResults: if more than one match is found
:return:
- Record content
"""
response = self.session.get(
self._get_table_url(),
params=self._get_formatted_query(
fields, limit=None, order_by=list(), offset=None
),
)
content = self._get_content(response)
l = len(content)
if l > 1:
raise MultipleResults("Multiple results for get_one()")
if len(content) == 0:
return {}
return content[0]
def insert(self, payload):
"""Inserts a new record with the payload passed as an argument
:param payload: The record to create (dict)
:return:
- Created record
"""
response = self.session.post(self._get_table_url(), data=json.dumps(payload))
return self._get_content(response)
def delete(self):
"""Deletes the queried record and returns response content after response validation
:raise:
:NoResults: if query returned no results
:NotImplementedError: if query returned more than one result (currently not supported)
:return:
- Delete response content (Generally always {'Success': True})
"""
try:
result = self.get_one()
if "sys_id" not in result:
raise NoResults()
except MultipleResults:
raise MultipleResults("Deletion of multiple records is not supported")
except NoResults as e:
e.args = ("Cannot delete a non-existing record",)
raise
response = self.session.delete(self._get_table_url(sys_id=result["sys_id"]))
return self._get_content(response)
def update(self, payload):
"""Updates the queried record with `payload` and returns the updated record after validating the response
:param payload: Payload to update the record with
:raise:
:NoResults: if query returned no results
:MultipleResults: if query returned more than one result (currently not supported)
:return:
- The updated record
"""
try:
result = self.get_one()
if "sys_id" not in result:
raise NoResults()
except MultipleResults:
raise MultipleResults("Update of multiple records is not supported")
except NoResults as e:
e.args = ("Cannot update a non-existing record",)
raise
if not isinstance(payload, dict):
raise InvalidUsage("Update payload must be of type dict")
response = self.session.put(
self._get_table_url(sys_id=result["sys_id"]), data=json.dumps(payload)
)
return self._get_content(response)
def clone(self, reset_fields=list()):
"""Clones the queried record
:param reset_fields: Fields to reset
:raise:
:NoResults: if query returned no results
:MultipleResults: if query returned more than one result (currently not supported)
:UnexpectedResponse: informs the user about what likely went wrong
:return:
- The cloned record
"""
if not isinstance(reset_fields, list):
raise InvalidUsage("reset_fields must be a `list` of fields")
try:
response = self.get_one()
if "sys_id" not in response:
raise NoResults()
except MultipleResults:
raise MultipleResults("Cloning multiple records is not supported")
except NoResults as e:
e.args = ("Cannot clone a non-existing record",)
raise
payload = {}
# Iterate over fields in the result
for field in response:
# Ignore fields in reset_fields
if field in reset_fields:
continue
item = response[field]
# Check if the item is of type dict and has a sys_id ref (value)
if isinstance(item, dict) and "value" in item:
payload[field] = item["value"]
else:
payload[field] = item
try:
return self.insert(payload)
except UnexpectedResponse as e:
if e.status_code == 403:
# User likely attempted to clone a record without resetting a unique field
e.args = (
"Unable to create clone. Make sure unique fields has been reset.",
)
raise
def attach(self, file):
"""Attaches the queried record with `file` and returns the response after validating the response
:param file: File to attach to the record
:raise:
:NoResults: if query returned no results
:MultipleResults: if query returned more than one result (currently not supported)
:return:
- The attachment record metadata
"""
try:
result = self.get_one()
if "sys_id" not in result:
raise NoResults()
except MultipleResults:
raise MultipleResults(
"Attaching a file to multiple records is not supported"
)
except NoResults:
raise NoResults("Attempted to attach file to a non-existing record")
if not os.path.isfile(file):
raise InvalidUsage(
"Attachment '%s' must be an existing regular file" % file
)
response = self.session.post(
self._get_attachment_url("upload"),
data={
"table_name": self.table,
"table_sys_id": result["sys_id"],
"file_name": ntpath.basename(file),
},
files={"file": open(file, "rb")},
headers={"content-type": None}, # Temporarily override header
)
return self._get_content(response)
def _get_content(self, response):
"""Checks for errors in the response. Returns response content, in bytes.
:param response: response object
:raise:
:UnexpectedResponse: if the server responded with an unexpected response
:return:
- ServiceNow response content
"""
method = response.request.method
self.last_response = response
server_error = {"summary": None, "details": None}
try:
content_json = response.json()
if "error" in content_json:
e = content_json["error"]
if "message" in e:
server_error["summary"] = e["message"]
if "detail" in e:
server_error["details"] = e["detail"]
except ValueError:
content_json = {}
if method == "DELETE":
# Make sure the delete operation returned the expected response
if response.status_code == 204:
return {"success": True}
else:
raise UnexpectedResponse(
204,
response.status_code,
method,
server_error["summary"],
server_error["details"],
)
# Make sure the POST operation returned the expected response
elif method == "POST" and response.status_code != 201:
raise UnexpectedResponse(
201,
response.status_code,
method,
server_error["summary"],
server_error["details"],
)
# It seems that Helsinki and later returns status 200 instead of 404 on empty result sets
if (
"result" in content_json and len(content_json["result"]) == 0
) or response.status_code == 404:
if self.raise_on_empty is True:
raise NoResults("Query yielded no results")
elif "error" in content_json:
raise UnexpectedResponse(
200,
response.status_code,
method,
server_error["summary"],
server_error["details"],
)
if "result" not in content_json:
raise MissingResult(
"The request was successful but the content didn't contain the expected 'result'"
)
return content_json["result"]
def _get_table_url(self, **kwargs):
return self._get_url("table", item=self.table, **kwargs)
def _get_attachment_url(self, action):
return self._get_url("attachment", item=action)
def _get_stats_url(self):
return self._get_url("stats", item=self.table)
def _get_url(self, resource, item, sys_id=None):
"""Takes table and sys_id (if present), and returns a URL
:param resource: API resource
:param item: API resource item
:param sys_id: Record sys_id
:return:
- url string
"""
url_str = "%(base_url)s/%(base_path)s/%(resource)s/%(item)s" % (
{
"base_url": self.base_url,
"base_path": self.base_path,
"resource": resource,
"item": item,
}
)
if sys_id:
return "%s/%s" % (url_str, sys_id)
return url_str
def _get_formatted_query(self, fields, limit, order_by, offset):
"""
Converts the query to a ServiceNow-interpretable format
:return:
- ServiceNow query
"""
if not isinstance(order_by, list):
raise InvalidUsage("Argument order_by should be a `list` of fields")
if not isinstance(fields, list):
raise InvalidUsage("Argument fields should be a `list` of fields")
if isinstance(self.query, QueryBuilder):
sysparm_query = str(self.query)
elif isinstance(self.query, dict): # Dict-type query
sysparm_query = "^".join(
["%s=%s" % (k, v) for k, v in six.iteritems(self.query)]
)
elif isinstance(self.query, six.string_types): # String-type query
sysparm_query = self.query
else:
raise InvalidUsage(
"Query must be instance of %s, %s or %s" % (QueryBuilder, str, dict)
)
for field in order_by:
if field[0] == "-":
sysparm_query += "^ORDERBYDESC%s" % field[1:]
else:
sysparm_query += "^ORDERBY%s" % field
params = {"sysparm_query": sysparm_query}
params.update(self.request_params)
if limit is not None:
params.update(
{"sysparm_limit": limit, "sysparm_suppress_pagination_header": True}
)
if offset is not None:
params.update({"sysparm_offset": offset})
if len(fields) > 0:
params.update({"sysparm_fields": ",".join(fields)})
return params
|
|
import os
from stone.backend import CodeBackend
from stone.ir import (
is_void_type,
is_struct_type
)
from go_helpers import (
HEADER,
fmt_type,
fmt_var,
generate_doc,
)
class GoClientBackend(CodeBackend):
def generate(self, api):
for namespace in api.namespaces.values():
if len(namespace.routes) > 0:
self._generate_client(namespace)
def _generate_client(self, namespace):
file_name = os.path.join(self.target_folder_path, namespace.name,
'client.go')
with self.output_to_relative_path(file_name):
self.emit_raw(HEADER)
self.emit()
self.emit('package %s' % namespace.name)
self.emit()
self.emit('// Client interface describes all routes in this namespace')
with self.block('type Client interface'):
for route in namespace.routes:
generate_doc(self, route)
self.emit(self._generate_route_signature(namespace, route))
self.emit()
self.emit('type apiImpl dropbox.Context')
for route in namespace.routes:
self._generate_route(namespace, route)
self.emit('// New returns a Client implementation for this namespace')
with self.block('func New(c dropbox.Config) Client'):
self.emit('ctx := apiImpl(dropbox.NewContext(c))')
self.emit('return &ctx')
def _generate_route_signature(self, namespace, route):
req = fmt_type(route.arg_data_type, namespace)
res = fmt_type(route.result_data_type, namespace, use_interface=True)
fn = fmt_var(route.name)
style = route.attrs.get('style', 'rpc')
arg = '' if is_void_type(route.arg_data_type) else 'arg {req}'
ret = '(err error)' if is_void_type(route.result_data_type) else \
'(res {res}, err error)'
signature = '{fn}(' + arg + ') ' + ret
if style == 'download':
signature = '{fn}(' + arg + \
') (res {res}, content io.ReadCloser, err error)'
elif style == 'upload':
signature = '{fn}(' + arg + ', content io.Reader) ' + ret
if is_void_type(route.arg_data_type):
signature = '{fn}(content io.Reader) ' + ret
return signature.format(fn=fn, req=req, res=res)
def _generate_route(self, namespace, route):
out = self.emit
fn = fmt_var(route.name)
err = fmt_type(route.error_data_type, namespace)
out('//%sAPIError is an error-wrapper for the %s route' %
(fn, route.name))
with self.block('type {fn}APIError struct'.format(fn=fn)):
out('dropbox.APIError')
out('EndpointError {err} `json:"error"`'.format(err=err))
out()
signature = 'func (dbx *apiImpl) ' + self._generate_route_signature(
namespace, route)
with self.block(signature):
if route.deprecated is not None:
out('log.Printf("WARNING: API `%s` is deprecated")' % fn)
if route.deprecated.by is not None:
out('log.Printf("Use API `%s` instead")' % fmt_var(route.deprecated.by.name))
out()
out('cli := dbx.Client')
out()
self._generate_request(namespace, route)
self._generate_post()
self._generate_response(route)
ok_check = 'if resp.StatusCode == http.StatusOK'
if fn == "Download":
ok_check += ' || resp.StatusCode == http.StatusPartialContent'
with self.block(ok_check):
self._generate_result(route)
self._generate_error_handling(route)
out()
def _generate_request(self, namespace, route):
out = self.emit
auth = route.attrs.get('auth', '')
host = route.attrs.get('host', 'api')
style = route.attrs.get('style', 'rpc')
body = 'nil'
if not is_void_type(route.arg_data_type):
out('dbx.Config.LogDebug("arg: %v", arg)')
out('b, err := json.Marshal(arg)')
with self.block('if err != nil'):
out('return')
out()
if host != 'content':
body = 'bytes.NewReader(b)'
if style == 'upload':
body = 'content'
headers = {}
if not is_void_type(route.arg_data_type):
if host == 'content' or style in ['upload', 'download']:
headers["Dropbox-API-Arg"] = "string(b)"
else:
headers["Content-Type"] = '"application/json"'
if style == 'upload':
headers["Content-Type"] = '"application/octet-stream"'
out('headers := map[string]string{')
for k, v in sorted(headers.items()):
out('\t"{}": {},'.format(k, v))
out('}')
if fmt_var(route.name) == "Download":
out('for k, v := range arg.ExtraHeaders { headers[k] = v }')
if auth != 'noauth' and auth != 'team':
with self.block('if dbx.Config.AsMemberID != ""'):
out('headers["Dropbox-API-Select-User"] = dbx.Config.AsMemberID')
out()
authed = 'false' if auth == 'noauth' else 'true'
out('req, err := (*dropbox.Context)(dbx).NewRequest("{}", "{}", {}, "{}", "{}", headers, {})'.format(
host, style, authed, namespace.name, route.name, body))
with self.block('if err != nil'):
out('return')
out('dbx.Config.LogInfo("req: %v", req)')
out()
def _generate_post(self):
out = self.emit
out('resp, err := cli.Do(req)')
with self.block('if err != nil'):
out('return')
out()
out('dbx.Config.LogInfo("resp: %v", resp)')
def _generate_response(self, route):
out = self.emit
style = route.attrs.get('style', 'rpc')
if style == 'download':
out('body := []byte(resp.Header.Get("Dropbox-API-Result"))')
out('content = resp.Body')
else:
out('defer resp.Body.Close()')
with self.block('body, err := ioutil.ReadAll(resp.Body);'
'if err != nil'):
out('return')
out()
out('dbx.Config.LogDebug("body: %v", body)')
def _generate_error_handling(self, route):
out = self.emit
style = route.attrs.get('style', 'rpc')
with self.block('if resp.StatusCode == http.StatusConflict'):
# If style was download, body was assigned to a header.
# Need to re-read the response body to parse the error
if style == 'download':
out('defer resp.Body.Close()')
with self.block('body, err = ioutil.ReadAll(resp.Body);'
'if err != nil'):
out('return')
out('var apiError %sAPIError' % fmt_var(route.name))
with self.block('err = json.Unmarshal(body, &apiError);'
'if err != nil'):
out('return')
out('err = apiError')
out('return')
out('var apiError dropbox.APIError')
with self.block("if resp.StatusCode == http.StatusBadRequest || "
"resp.StatusCode == http.StatusInternalServerError"):
out('apiError.ErrorSummary = string(body)')
out('err = apiError')
out('return')
with self.block('err = json.Unmarshal(body, &apiError);'
'if err != nil'):
out('return')
out('err = apiError')
out('return')
def _generate_result(self, route):
out = self.emit
if is_struct_type(route.result_data_type) and \
route.result_data_type.has_enumerated_subtypes():
out('var tmp %sUnion' % fmt_var(route.result_data_type.name, export=False))
with self.block('err = json.Unmarshal(body, &tmp);'
'if err != nil'):
out('return')
with self.block('switch tmp.Tag'):
for t in route.result_data_type.get_enumerated_subtypes():
with self.block('case "%s":' % t.name, delim=(None, None)):
self.emit('res = tmp.%s' % fmt_var(t.name))
elif not is_void_type(route.result_data_type):
with self.block('err = json.Unmarshal(body, &res);'
'if err != nil'):
out('return')
out()
out('return')
|
|
# Copyright (c) 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.i18n import _
from sahara.plugins.cdh.v5_7_0 import plugin_utils as pu
from sahara.plugins.cdh import validation
from sahara.plugins import exceptions as ex
from sahara.plugins import utils as u
class ValidatorV570(validation.Validator):
PU = pu.PluginUtilsV570()
@classmethod
def validate_cluster_creating(cls, cluster):
super(ValidatorV570, cls).validate_cluster_creating(cluster)
cls._hdfs_ha_validation(cluster)
cls._yarn_ha_validation(cluster)
cls._flume_validation(cluster)
cls._sentry_validation(cluster)
cls._solr_validation(cluster)
cls._sqoop_validation(cluster)
cls._hbase_indexer_validation(cluster)
cls._impala_validation(cluster)
cls._kms_validation(cluster)
@classmethod
def _hdfs_ha_validation(cls, cluster):
jn_count = cls._get_inst_count(cluster, 'HDFS_JOURNALNODE')
zk_count = cls._get_inst_count(cluster, 'ZOOKEEPER_SERVER')
require_anti_affinity = cls.PU.c_helper.get_required_anti_affinity(
cluster)
if jn_count > 0:
if jn_count < 3:
raise ex.InvalidComponentCountException('HDFS_JOURNALNODE',
_('not less than 3'),
jn_count)
if not jn_count % 2:
raise ex.InvalidComponentCountException('HDFS_JOURNALNODE',
_('be odd'), jn_count)
if zk_count < 1:
raise ex.RequiredServiceMissingException('ZOOKEEPER',
required_by='HDFS HA')
if require_anti_affinity:
if 'HDFS_SECONDARYNAMENODE' not in\
cls._get_anti_affinity(cluster):
raise ex.NameNodeHAConfigurationError(
_('HDFS_SECONDARYNAMENODE should be enabled '
'in anti_affinity.'))
if 'HDFS_NAMENODE' not in cls._get_anti_affinity(cluster):
raise ex.NameNodeHAConfigurationError(
_('HDFS_NAMENODE should be enabled in anti_affinity.'))
@classmethod
def _yarn_ha_validation(cls, cluster):
rm_count = cls._get_inst_count(cluster, 'YARN_RESOURCEMANAGER')
zk_count = cls._get_inst_count(cluster, 'ZOOKEEPER_SERVER')
stdb_rm_count = cls._get_inst_count(cluster, 'YARN_STANDBYRM')
require_anti_affinity = cls.PU.c_helper.get_required_anti_affinity(
cluster)
if stdb_rm_count > 1:
raise ex.InvalidComponentCountException(
'YARN_STANDBYRM', _('0 or 1'), stdb_rm_count)
if stdb_rm_count > 0:
if rm_count < 1:
raise ex.RequiredServiceMissingException(
'YARN_RESOURCEMANAGER', required_by='RM HA')
if zk_count < 1:
raise ex.RequiredServiceMissingException(
'ZOOKEEPER', required_by='RM HA')
if require_anti_affinity:
if 'YARN_RESOURCEMANAGER' not in\
cls._get_anti_affinity(cluster):
raise ex.ResourceManagerHAConfigurationError(
_('YARN_RESOURCEMANAGER should be enabled in '
'anti_affinity.'))
if 'YARN_STANDBYRM' not in cls._get_anti_affinity(cluster):
raise ex.ResourceManagerHAConfigurationError(
_('YARN_STANDBYRM should be'
' enabled in anti_affinity.'))
@classmethod
def _flume_validation(cls, cluster):
a_count = cls._get_inst_count(cluster, 'FLUME_AGENT')
dn_count = cls._get_inst_count(cluster, 'HDFS_DATANODE')
if a_count >= 1:
if dn_count < 1:
raise ex.RequiredServiceMissingException(
'HDFS_DATANODE', required_by='FLUME_AGENT')
@classmethod
def _sentry_validation(cls, cluster):
snt_count = cls._get_inst_count(cluster, 'SENTRY_SERVER')
dn_count = cls._get_inst_count(cluster, 'HDFS_DATANODE')
zk_count = cls._get_inst_count(cluster, 'ZOOKEEPER_SERVER')
if snt_count > 1:
raise ex.InvalidComponentCountException(
'SENTRY_SERVER', _('0 or 1'), snt_count)
if snt_count == 1:
if dn_count < 1:
raise ex.RequiredServiceMissingException(
'HDFS_DATANODE', required_by='SENTRY_SERVER')
if zk_count < 1:
raise ex.RequiredServiceMissingException(
'ZOOKEEPER', required_by='SENTRY_SERVER')
@classmethod
def _solr_validation(cls, cluster):
slr_count = cls._get_inst_count(cluster, 'SOLR_SERVER')
zk_count = cls._get_inst_count(cluster, 'ZOOKEEPER_SERVER')
dn_count = cls._get_inst_count(cluster, 'HDFS_DATANODE')
if slr_count >= 1:
if dn_count < 1:
raise ex.RequiredServiceMissingException(
'HDFS_DATANODE', required_by='SOLR_SERVER')
if zk_count < 1:
raise ex.RequiredServiceMissingException(
'ZOOKEEPER', required_by='SOLR_SERVER')
@classmethod
def _sqoop_validation(cls, cluster):
s2s_count = cls._get_inst_count(cluster, 'SQOOP_SERVER')
dn_count = cls._get_inst_count(cluster, 'HDFS_DATANODE')
hs_count = cls._get_inst_count(cluster, 'YARN_JOBHISTORY')
nm_count = cls._get_inst_count(cluster, 'YARN_NODEMANAGER')
if s2s_count > 1:
raise ex.InvalidComponentCountException(
'SQOOP_SERVER', _('0 or 1'), s2s_count)
if s2s_count == 1:
if dn_count < 1:
raise ex.RequiredServiceMissingException(
'HDFS_DATANODE', required_by='SQOOP_SERVER')
if nm_count < 1:
raise ex.RequiredServiceMissingException(
'YARN_NODEMANAGER', required_by='SQOOP_SERVER')
if hs_count != 1:
raise ex.RequiredServiceMissingException(
'YARN_JOBHISTORY', required_by='SQOOP_SERVER')
@classmethod
def _hbase_indexer_validation(cls, cluster):
lhbi_count = cls._get_inst_count(cluster, 'HBASE_INDEXER')
zk_count = cls._get_inst_count(cluster, 'ZOOKEEPER_SERVER')
dn_count = cls._get_inst_count(cluster, 'HDFS_DATANODE')
slr_count = cls._get_inst_count(cluster, 'SOLR_SERVER')
hbm_count = cls._get_inst_count(cluster, 'HBASE_MASTER')
if lhbi_count >= 1:
if dn_count < 1:
raise ex.RequiredServiceMissingException(
'HDFS_DATANODE', required_by='HBASE_INDEXER')
if zk_count < 1:
raise ex.RequiredServiceMissingException(
'ZOOKEEPER', required_by='HBASE_INDEXER')
if slr_count < 1:
raise ex.RequiredServiceMissingException(
'SOLR_SERVER', required_by='HBASE_INDEXER')
if hbm_count < 1:
raise ex.RequiredServiceMissingException(
'HBASE_MASTER', required_by='HBASE_INDEXER')
@classmethod
def _impala_validation(cls, cluster):
ics_count = cls._get_inst_count(cluster, 'IMPALA_CATALOGSERVER')
iss_count = cls._get_inst_count(cluster, 'IMPALA_STATESTORE')
id_count = cls._get_inst_count(cluster, 'IMPALAD')
dn_count = cls._get_inst_count(cluster, 'HDFS_DATANODE')
hms_count = cls._get_inst_count(cluster, 'HIVE_METASTORE')
if ics_count > 1:
raise ex.InvalidComponentCountException('IMPALA_CATALOGSERVER',
_('0 or 1'), ics_count)
if iss_count > 1:
raise ex.InvalidComponentCountException('IMPALA_STATESTORE',
_('0 or 1'), iss_count)
if ics_count == 1:
datanode_ng = u.get_node_groups(cluster, "HDFS_DATANODE")
impalad_ng = u.get_node_groups(cluster, "IMPALAD")
datanodes = set(ng.id for ng in datanode_ng)
impalads = set(ng.id for ng in impalad_ng)
if datanodes != impalads:
raise ex.InvalidClusterTopology(
_("IMPALAD must be installed on every HDFS_DATANODE"))
if iss_count != 1:
raise ex.RequiredServiceMissingException(
'IMPALA_STATESTORE', required_by='IMPALA')
if id_count < 1:
raise ex.RequiredServiceMissingException(
'IMPALAD', required_by='IMPALA')
if dn_count < 1:
raise ex.RequiredServiceMissingException(
'HDFS_DATANODE', required_by='IMPALA')
if hms_count < 1:
raise ex.RequiredServiceMissingException(
'HIVE_METASTORE', required_by='IMPALA')
@classmethod
def _kms_validation(cls, cluster):
kms_count = cls._get_inst_count(cluster, 'KMS')
if kms_count > 1:
raise ex.InvalidComponentCountException('KMS',
_('0 or 1'), kms_count)
@classmethod
def _get_anti_affinity(cls, cluster):
return cluster.anti_affinity
|
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from requests import exceptions
import six
from heat.common import exception
from heat.common.i18n import _
from heat.common import template_format
from heat.common import urlfetch
from heat.engine import attributes
from heat.engine import environment
from heat.engine import properties
from heat.engine.resources import stack_resource
from heat.engine import template
def generate_class(name, template_name, env):
data = TemplateResource.get_template_file(template_name, ('file',))
tmpl = template.Template(template_format.parse(data))
props, attrs = TemplateResource.get_schemas(tmpl, env.param_defaults)
cls = type(name, (TemplateResource,),
{'properties_schema': props,
'attributes_schema': attrs})
return cls
class TemplateResource(stack_resource.StackResource):
'''
A resource implemented by a nested stack.
This implementation passes resource properties as parameters to the nested
stack. Outputs of the nested stack are exposed as attributes of this
resource.
'''
def __init__(self, name, json_snippet, stack):
self._parsed_nested = None
self.stack = stack
self.validation_exception = None
tri = self._get_resource_info(json_snippet)
self.properties_schema = {}
self.attributes_schema = {}
# run Resource.__init__() so we can call self.nested()
super(TemplateResource, self).__init__(name, json_snippet, stack)
self.resource_info = tri
if self.validation_exception is None:
self._generate_schema(self.t)
def _get_resource_info(self, rsrc_defn):
tri = self.stack.env.get_resource_info(
rsrc_defn.resource_type,
resource_name=rsrc_defn.name,
registry_type=environment.TemplateResourceInfo)
if tri is None:
self.validation_exception = ValueError(_(
'Only Templates with an extension of .yaml or '
'.template are supported'))
else:
self.template_name = tri.template_name
self.resource_type = tri.name
if tri.user_resource:
self.allowed_schemes = ('http', 'https')
else:
self.allowed_schemes = ('http', 'https', 'file')
return tri
@staticmethod
def get_template_file(template_name, allowed_schemes):
try:
return urlfetch.get(template_name, allowed_schemes=allowed_schemes)
except (IOError, exceptions.RequestException) as r_exc:
args = {'name': template_name, 'exc': six.text_type(r_exc)}
msg = _('Could not fetch remote template '
'"%(name)s": %(exc)s') % args
raise exception.TemplateNotFound(message=msg)
@staticmethod
def get_schemas(tmpl, param_defaults):
return ((properties.Properties.schema_from_params(
tmpl.param_schemata(param_defaults))),
(attributes.Attributes.schema_from_outputs(
tmpl[tmpl.OUTPUTS])))
def _generate_schema(self, definition):
self._parsed_nested = None
try:
tmpl = template.Template(self.child_template())
except (exception.TemplateNotFound, ValueError) as download_error:
self.validation_exception = download_error
tmpl = template.Template(
{"HeatTemplateFormatVersion": "2012-12-12"})
# re-generate the properties and attributes from the template.
self.properties_schema, self.attributes_schema = self.get_schemas(
tmpl, self.stack.env.param_defaults)
self.properties = definition.properties(self.properties_schema,
self.context)
self.attributes = attributes.Attributes(self.name,
self.attributes_schema,
self._resolve_attribute)
def child_params(self):
'''
:return: parameter values for our nested stack based on our properties
'''
params = {}
for pname, pval in iter(self.properties.props.items()):
if not pval.implemented():
continue
try:
val = self.properties[pname]
except ValueError:
if self.action == self.INIT:
prop = self.properties.props[pname]
val = prop.get_value(None)
else:
raise
if val is not None:
# take a list and create a CommaDelimitedList
if pval.type() == properties.Schema.LIST:
if len(val) == 0:
params[pname] = ''
elif isinstance(val[0], dict):
flattened = []
for (count, item) in enumerate(val):
for (ik, iv) in iter(item.items()):
mem_str = '.member.%d.%s=%s' % (count, ik, iv)
flattened.append(mem_str)
params[pname] = ','.join(flattened)
else:
params[pname] = ','.join(val)
else:
# for MAP, the JSON param takes either a collection or
# string, so just pass it on and let the param validate
# as appropriate
params[pname] = val
return params
def child_template(self):
if not self._parsed_nested:
self._parsed_nested = template_format.parse(self.template_data())
return self._parsed_nested
def regenerate_info_schema(self, definition):
self._get_resource_info(definition)
self._generate_schema(definition)
def implementation_signature(self):
self._generate_schema(self.t)
return super(TemplateResource, self).implementation_signature()
def template_data(self):
# we want to have the latest possible template.
# 1. look in files
# 2. try download
# 3. look in the db
reported_excp = None
t_data = self.stack.t.files.get(self.template_name)
if not t_data and self.template_name.endswith((".yaml", ".template")):
try:
t_data = self.get_template_file(self.template_name,
self.allowed_schemes)
except exception.TemplateNotFound as err:
reported_excp = err
if t_data is None:
if self.nested() is not None:
t_data = jsonutils.dumps(self.nested().t.t)
if t_data is not None:
self.stack.t.files[self.template_name] = t_data
self.stack.t.env.register_class(self.resource_type,
self.template_name)
return t_data
if reported_excp is None:
reported_excp = ValueError(_('Unknown error retrieving %s') %
self.template_name)
raise reported_excp
def _validate_against_facade(self, facade_cls):
facade_schemata = properties.schemata(facade_cls.properties_schema)
for n, fs in facade_schemata.items():
if fs.required and n not in self.properties_schema:
msg = (_("Required property %(n)s for facade %(type)s "
"missing in provider") % {'n': n, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
ps = self.properties_schema.get(n)
if (n in self.properties_schema and
(fs.allowed_param_prop_type() != ps.type)):
# Type mismatch
msg = (_("Property %(n)s type mismatch between facade %(type)s"
" (%(fs_type)s) and provider (%(ps_type)s)") % {
'n': n, 'type': self.type(),
'fs_type': fs.type, 'ps_type': ps.type})
raise exception.StackValidationFailed(message=msg)
for n, ps in self.properties_schema.items():
if ps.required and n not in facade_schemata:
# Required property for template not present in facade
msg = (_("Provider requires property %(n)s "
"unknown in facade %(type)s") % {
'n': n, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
for attr in facade_cls.attributes_schema:
if attr not in self.attributes_schema:
msg = (_("Attribute %(attr)s for facade %(type)s "
"missing in provider") % {
'attr': attr, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
def validate(self):
if self.validation_exception is not None:
msg = six.text_type(self.validation_exception)
raise exception.StackValidationFailed(message=msg)
try:
self.template_data()
except ValueError as ex:
msg = _("Failed to retrieve template data: %s") % ex
raise exception.StackValidationFailed(message=msg)
cri = self.stack.env.get_resource_info(
self.type(),
resource_name=self.name,
registry_type=environment.ClassResourceInfo)
# If we're using an existing resource type as a facade for this
# template, check for compatibility between the interfaces.
if cri is not None and not isinstance(self, cri.get_class()):
facade_cls = cri.get_class()
self._validate_against_facade(facade_cls)
return super(TemplateResource, self).validate()
def handle_adopt(self, resource_data=None):
return self.create_with_template(self.child_template(),
self.child_params(),
adopt_data=resource_data)
def handle_create(self):
return self.create_with_template(self.child_template(),
self.child_params())
def metadata_update(self, new_metadata=None):
'''
Refresh the metadata if new_metadata is None
'''
if new_metadata is None:
self.metadata_set(self.t.metadata())
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
return self.update_with_template(self.child_template(),
self.child_params())
def handle_delete(self):
return self.delete_nested()
def FnGetRefId(self):
if self.nested() is None:
return six.text_type(self.name)
if 'OS::stack_id' in self.nested().outputs:
return self.nested().output('OS::stack_id')
return self.nested().identifier().arn()
def FnGetAtt(self, key, *path):
stack = self.nested()
if stack is None:
return None
def _get_inner_resource(resource_name):
if self.nested() is not None:
try:
return self.nested()[resource_name]
except KeyError:
raise exception.ResourceNotFound(
resource_name=resource_name,
stack_name=self.nested().name)
def get_rsrc_attr(resource_name, *attr_path):
resource = _get_inner_resource(resource_name)
return resource.FnGetAtt(*attr_path)
def get_rsrc_id(resource_name):
resource = _get_inner_resource(resource_name)
return resource.FnGetRefId()
# first look for explicit resource.x.y
if key.startswith('resource.'):
npath = key.split(".", 2)[1:] + list(path)
if len(npath) > 1:
return get_rsrc_attr(*npath)
else:
return get_rsrc_id(*npath)
# then look for normal outputs
if key in stack.outputs:
return attributes.select_from_attribute(stack.output(key), path)
# otherwise the key must be wrong.
raise exception.InvalidTemplateAttribute(resource=self.name,
key=key)
|
|
# -*- test-case-name: twisted.conch.test.test_telnet -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Telnet protocol implementation.
@author: Jean-Paul Calderone
"""
import struct
from zope.interface import implements
from twisted.internet import protocol, interfaces as iinternet, defer
from twisted.python import log
MODE = chr(1)
EDIT = 1
TRAPSIG = 2
MODE_ACK = 4
SOFT_TAB = 8
LIT_ECHO = 16
# Characters gleaned from the various (and conflicting) RFCs. Not all of these are correct.
NULL = chr(0) # No operation.
BEL = chr(7) # Produces an audible or
# visible signal (which does
# NOT move the print head).
BS = chr(8) # Moves the print head one
# character position towards
# the left margin.
HT = chr(9) # Moves the printer to the
# next horizontal tab stop.
# It remains unspecified how
# either party determines or
# establishes where such tab
# stops are located.
LF = chr(10) # Moves the printer to the
# next print line, keeping the
# same horizontal position.
VT = chr(11) # Moves the printer to the
# next vertical tab stop. It
# remains unspecified how
# either party determines or
# establishes where such tab
# stops are located.
FF = chr(12) # Moves the printer to the top
# of the next page, keeping
# the same horizontal position.
CR = chr(13) # Moves the printer to the left
# margin of the current line.
ECHO = chr(1) # User-to-Server: Asks the server to send
# Echos of the transmitted data.
SGA = chr(3) # Suppress Go Ahead. Go Ahead is silly
# and most modern servers should suppress
# it.
NAWS = chr(31) # Negotiate About Window Size. Indicate that
# information about the size of the terminal
# can be communicated.
LINEMODE = chr(34) # Allow line buffering to be
# negotiated about.
SE = chr(240) # End of subnegotiation parameters.
NOP = chr(241) # No operation.
DM = chr(242) # "Data Mark": The data stream portion
# of a Synch. This should always be
# accompanied by a TCP Urgent
# notification.
BRK = chr(243) # NVT character Break.
IP = chr(244) # The function Interrupt Process.
AO = chr(245) # The function Abort Output
AYT = chr(246) # The function Are You There.
EC = chr(247) # The function Erase Character.
EL = chr(248) # The function Erase Line
GA = chr(249) # The Go Ahead signal.
SB = chr(250) # Indicates that what follows is
# subnegotiation of the indicated
# option.
WILL = chr(251) # Indicates the desire to begin
# performing, or confirmation that
# you are now performing, the
# indicated option.
WONT = chr(252) # Indicates the refusal to perform,
# or continue performing, the
# indicated option.
DO = chr(253) # Indicates the request that the
# other party perform, or
# confirmation that you are expecting
# the other party to perform, the
# indicated option.
DONT = chr(254) # Indicates the demand that the
# other party stop performing,
# or confirmation that you are no
# longer expecting the other party
# to perform, the indicated option.
IAC = chr(255) # Data Byte 255. Introduces a
# telnet command.
LINEMODE_MODE = chr(1)
LINEMODE_EDIT = chr(1)
LINEMODE_TRAPSIG = chr(2)
LINEMODE_MODE_ACK = chr(4)
LINEMODE_SOFT_TAB = chr(8)
LINEMODE_LIT_ECHO = chr(16)
LINEMODE_FORWARDMASK = chr(2)
LINEMODE_SLC = chr(3)
LINEMODE_SLC_SYNCH = chr(1)
LINEMODE_SLC_BRK = chr(2)
LINEMODE_SLC_IP = chr(3)
LINEMODE_SLC_AO = chr(4)
LINEMODE_SLC_AYT = chr(5)
LINEMODE_SLC_EOR = chr(6)
LINEMODE_SLC_ABORT = chr(7)
LINEMODE_SLC_EOF = chr(8)
LINEMODE_SLC_SUSP = chr(9)
LINEMODE_SLC_EC = chr(10)
LINEMODE_SLC_EL = chr(11)
LINEMODE_SLC_EW = chr(12)
LINEMODE_SLC_RP = chr(13)
LINEMODE_SLC_LNEXT = chr(14)
LINEMODE_SLC_XON = chr(15)
LINEMODE_SLC_XOFF = chr(16)
LINEMODE_SLC_FORW1 = chr(17)
LINEMODE_SLC_FORW2 = chr(18)
LINEMODE_SLC_MCL = chr(19)
LINEMODE_SLC_MCR = chr(20)
LINEMODE_SLC_MCWL = chr(21)
LINEMODE_SLC_MCWR = chr(22)
LINEMODE_SLC_MCBOL = chr(23)
LINEMODE_SLC_MCEOL = chr(24)
LINEMODE_SLC_INSRT = chr(25)
LINEMODE_SLC_OVER = chr(26)
LINEMODE_SLC_ECR = chr(27)
LINEMODE_SLC_EWR = chr(28)
LINEMODE_SLC_EBOL = chr(29)
LINEMODE_SLC_EEOL = chr(30)
LINEMODE_SLC_DEFAULT = chr(3)
LINEMODE_SLC_VALUE = chr(2)
LINEMODE_SLC_CANTCHANGE = chr(1)
LINEMODE_SLC_NOSUPPORT = chr(0)
LINEMODE_SLC_LEVELBITS = chr(3)
LINEMODE_SLC_ACK = chr(128)
LINEMODE_SLC_FLUSHIN = chr(64)
LINEMODE_SLC_FLUSHOUT = chr(32)
LINEMODE_EOF = chr(236)
LINEMODE_SUSP = chr(237)
LINEMODE_ABORT = chr(238)
class ITelnetProtocol(iinternet.IProtocol):
def unhandledCommand(command, argument):
"""A command was received but not understood.
"""
def unhandledSubnegotiation(bytes):
"""A subnegotiation command was received but not understood.
"""
def enableLocal(option):
"""Enable the given option locally.
This should enable the given option on this side of the
telnet connection and return True. If False is returned,
the option will be treated as still disabled and the peer
will be notified.
"""
def enableRemote(option):
"""Indicate whether the peer should be allowed to enable this option.
Returns True if the peer should be allowed to enable this option,
False otherwise.
"""
def disableLocal(option):
"""Disable the given option locally.
Unlike enableLocal, this method cannot fail. The option must be
disabled.
"""
def disableRemote(option):
"""Indicate that the peer has disabled this option.
"""
class ITelnetTransport(iinternet.ITransport):
def do(option):
"""
Indicate a desire for the peer to begin performing the given option.
Returns a Deferred that fires with True when the peer begins performing
the option, or fails with L{OptionRefused} when the peer refuses to
perform it. If the peer is already performing the given option, the
Deferred will fail with L{AlreadyEnabled}. If a negotiation regarding
this option is already in progress, the Deferred will fail with
L{AlreadyNegotiating}.
Note: It is currently possible that this Deferred will never fire,
if the peer never responds, or if the peer believes the option to
already be enabled.
"""
def dont(option):
"""
Indicate a desire for the peer to cease performing the given option.
Returns a Deferred that fires with True when the peer ceases performing
the option. If the peer is not performing the given option, the
Deferred will fail with L{AlreadyDisabled}. If negotiation regarding
this option is already in progress, the Deferred will fail with
L{AlreadyNegotiating}.
Note: It is currently possible that this Deferred will never fire,
if the peer never responds, or if the peer believes the option to
already be disabled.
"""
def will(option):
"""
Indicate our willingness to begin performing this option locally.
Returns a Deferred that fires with True when the peer agrees to allow us
to begin performing this option, or fails with L{OptionRefused} if the
peer refuses to allow us to begin performing it. If the option is
already enabled locally, the Deferred will fail with L{AlreadyEnabled}.
If negotiation regarding this option is already in progress, the
Deferred will fail with L{AlreadyNegotiating}.
Note: It is currently possible that this Deferred will never fire,
if the peer never responds, or if the peer believes the option to
already be enabled.
"""
def wont(option):
"""
Indicate that we will stop performing the given option.
Returns a Deferred that fires with True when the peer acknowledges
we have stopped performing this option. If the option is already
disabled locally, the Deferred will fail with L{AlreadyDisabled}.
If negotiation regarding this option is already in progress,
the Deferred will fail with L{AlreadyNegotiating}.
Note: It is currently possible that this Deferred will never fire,
if the peer never responds, or if the peer believes the option to
already be disabled.
"""
def requestNegotiation(about, bytes):
"""
Send a subnegotiation request.
@param about: A byte indicating the feature being negotiated.
@param bytes: Any number of bytes containing specific information
about the negotiation being requested. No values in this string
need to be escaped, as this function will escape any value which
requires it.
"""
class TelnetError(Exception):
pass
class NegotiationError(TelnetError):
def __str__(self):
return self.__class__.__module__ + '.' + self.__class__.__name__ + ':' + repr(self.args[0])
class OptionRefused(NegotiationError):
pass
class AlreadyEnabled(NegotiationError):
pass
class AlreadyDisabled(NegotiationError):
pass
class AlreadyNegotiating(NegotiationError):
pass
class TelnetProtocol(protocol.Protocol):
implements(ITelnetProtocol)
def unhandledCommand(self, command, argument):
pass
def unhandledSubnegotiation(self, command, bytes):
pass
def enableLocal(self, option):
pass
def enableRemote(self, option):
pass
def disableLocal(self, option):
pass
def disableRemote(self, option):
pass
class Telnet(protocol.Protocol):
"""
@ivar commandMap: A mapping of bytes to callables. When a
telnet command is received, the command byte (the first byte
after IAC) is looked up in this dictionary. If a callable is
found, it is invoked with the argument of the command, or None
if the command takes no argument. Values should be added to
this dictionary if commands wish to be handled. By default,
only WILL, WONT, DO, and DONT are handled. These should not
be overridden, as this class handles them correctly and
provides an API for interacting with them.
@ivar negotiationMap: A mapping of bytes to callables. When
a subnegotiation command is received, the command byte (the
first byte after SB) is looked up in this dictionary. If
a callable is found, it is invoked with the argument of the
subnegotiation. Values should be added to this dictionary if
subnegotiations are to be handled. By default, no values are
handled.
@ivar options: A mapping of option bytes to their current
state. This state is likely of little use to user code.
Changes should not be made to it.
@ivar state: A string indicating the current parse state. It
can take on the values "data", "escaped", "command", "newline",
"subnegotiation", and "subnegotiation-escaped". Changes
should not be made to it.
@ivar transport: This protocol's transport object.
"""
# One of a lot of things
state = 'data'
def __init__(self):
self.options = {}
self.negotiationMap = {}
self.commandMap = {
WILL: self.telnet_WILL,
WONT: self.telnet_WONT,
DO: self.telnet_DO,
DONT: self.telnet_DONT}
def _write(self, bytes):
self.transport.write(bytes)
class _OptionState:
"""
Represents the state of an option on both sides of a telnet
connection.
@ivar us: The state of the option on this side of the connection.
@ivar him: The state of the option on the other side of the
connection.
"""
class _Perspective:
"""
Represents the state of an option on side of the telnet
connection. Some options can be enabled on a particular side of
the connection (RFC 1073 for example: only the client can have
NAWS enabled). Other options can be enabled on either or both
sides (such as RFC 1372: each side can have its own flow control
state).
@ivar state: C{'yes'} or C{'no'} indicating whether or not this
option is enabled on one side of the connection.
@ivar negotiating: A boolean tracking whether negotiation about
this option is in progress.
@ivar onResult: When negotiation about this option has been
initiated by this side of the connection, a L{Deferred}
which will fire with the result of the negotiation. C{None}
at other times.
"""
state = 'no'
negotiating = False
onResult = None
def __str__(self):
return self.state + ('*' * self.negotiating)
def __init__(self):
self.us = self._Perspective()
self.him = self._Perspective()
def __repr__(self):
return '<_OptionState us=%s him=%s>' % (self.us, self.him)
def getOptionState(self, opt):
return self.options.setdefault(opt, self._OptionState())
def _do(self, option):
self._write(IAC + DO + option)
def _dont(self, option):
self._write(IAC + DONT + option)
def _will(self, option):
self._write(IAC + WILL + option)
def _wont(self, option):
self._write(IAC + WONT + option)
def will(self, option):
"""Indicate our willingness to enable an option.
"""
s = self.getOptionState(option)
if s.us.negotiating or s.him.negotiating:
return defer.fail(AlreadyNegotiating(option))
elif s.us.state == 'yes':
return defer.fail(AlreadyEnabled(option))
else:
s.us.negotiating = True
s.us.onResult = d = defer.Deferred()
self._will(option)
return d
def wont(self, option):
"""Indicate we are not willing to enable an option.
"""
s = self.getOptionState(option)
if s.us.negotiating or s.him.negotiating:
return defer.fail(AlreadyNegotiating(option))
elif s.us.state == 'no':
return defer.fail(AlreadyDisabled(option))
else:
s.us.negotiating = True
s.us.onResult = d = defer.Deferred()
self._wont(option)
return d
def do(self, option):
s = self.getOptionState(option)
if s.us.negotiating or s.him.negotiating:
return defer.fail(AlreadyNegotiating(option))
elif s.him.state == 'yes':
return defer.fail(AlreadyEnabled(option))
else:
s.him.negotiating = True
s.him.onResult = d = defer.Deferred()
self._do(option)
return d
def dont(self, option):
s = self.getOptionState(option)
if s.us.negotiating or s.him.negotiating:
return defer.fail(AlreadyNegotiating(option))
elif s.him.state == 'no':
return defer.fail(AlreadyDisabled(option))
else:
s.him.negotiating = True
s.him.onResult = d = defer.Deferred()
self._dont(option)
return d
def requestNegotiation(self, about, bytes):
"""
Send a negotiation message for the option C{about} with C{bytes} as the
payload.
@see: L{ITelnetTransport.requestNegotiation}
"""
bytes = bytes.replace(IAC, IAC * 2)
self._write(IAC + SB + about + bytes + IAC + SE)
def dataReceived(self, data):
appDataBuffer = []
for b in data:
if self.state == 'data':
if b == IAC:
self.state = 'escaped'
elif b == '\r':
self.state = 'newline'
else:
appDataBuffer.append(b)
elif self.state == 'escaped':
if b == IAC:
appDataBuffer.append(b)
self.state = 'data'
elif b == SB:
self.state = 'subnegotiation'
self.commands = []
elif b in (NOP, DM, BRK, IP, AO, AYT, EC, EL, GA):
self.state = 'data'
if appDataBuffer:
self.applicationDataReceived(''.join(appDataBuffer))
del appDataBuffer[:]
self.commandReceived(b, None)
elif b in (WILL, WONT, DO, DONT):
self.state = 'command'
self.command = b
else:
raise ValueError("Stumped", b)
elif self.state == 'command':
self.state = 'data'
command = self.command
del self.command
if appDataBuffer:
self.applicationDataReceived(''.join(appDataBuffer))
del appDataBuffer[:]
self.commandReceived(command, b)
elif self.state == 'newline':
self.state = 'data'
if b == '\n':
appDataBuffer.append('\n')
elif b == '\0':
appDataBuffer.append('\r')
elif b == IAC:
# IAC isn't really allowed after \r, according to the
# RFC, but handling it this way is less surprising than
# delivering the IAC to the app as application data.
# The purpose of the restriction is to allow terminals
# to unambiguously interpret the behavior of the CR
# after reading only one more byte. CR LF is supposed
# to mean one thing (cursor to next line, first column),
# CR NUL another (cursor to first column). Absent the
# NUL, it still makes sense to interpret this as CR and
# then apply all the usual interpretation to the IAC.
appDataBuffer.append('\r')
self.state = 'escaped'
else:
appDataBuffer.append('\r' + b)
elif self.state == 'subnegotiation':
if b == IAC:
self.state = 'subnegotiation-escaped'
else:
self.commands.append(b)
elif self.state == 'subnegotiation-escaped':
if b == SE:
self.state = 'data'
commands = self.commands
del self.commands
if appDataBuffer:
self.applicationDataReceived(''.join(appDataBuffer))
del appDataBuffer[:]
self.negotiate(commands)
else:
self.state = 'subnegotiation'
self.commands.append(b)
else:
raise ValueError("How'd you do this?")
if appDataBuffer:
self.applicationDataReceived(''.join(appDataBuffer))
def connectionLost(self, reason):
for state in self.options.values():
if state.us.onResult is not None:
d = state.us.onResult
state.us.onResult = None
d.errback(reason)
if state.him.onResult is not None:
d = state.him.onResult
state.him.onResult = None
d.errback(reason)
def applicationDataReceived(self, bytes):
"""Called with application-level data.
"""
def unhandledCommand(self, command, argument):
"""Called for commands for which no handler is installed.
"""
def commandReceived(self, command, argument):
cmdFunc = self.commandMap.get(command)
if cmdFunc is None:
self.unhandledCommand(command, argument)
else:
cmdFunc(argument)
def unhandledSubnegotiation(self, command, bytes):
"""Called for subnegotiations for which no handler is installed.
"""
def negotiate(self, bytes):
command, bytes = bytes[0], bytes[1:]
cmdFunc = self.negotiationMap.get(command)
if cmdFunc is None:
self.unhandledSubnegotiation(command, bytes)
else:
cmdFunc(bytes)
def telnet_WILL(self, option):
s = self.getOptionState(option)
self.willMap[s.him.state, s.him.negotiating](self, s, option)
def will_no_false(self, state, option):
# He is unilaterally offering to enable an option.
if self.enableRemote(option):
state.him.state = 'yes'
self._do(option)
else:
self._dont(option)
def will_no_true(self, state, option):
# Peer agreed to enable an option in response to our request.
state.him.state = 'yes'
state.him.negotiating = False
d = state.him.onResult
state.him.onResult = None
d.callback(True)
assert self.enableRemote(option), "enableRemote must return True in this context (for option %r)" % (option,)
def will_yes_false(self, state, option):
# He is unilaterally offering to enable an already-enabled option.
# Ignore this.
pass
def will_yes_true(self, state, option):
# This is a bogus state. It is here for completeness. It will
# never be entered.
assert False, "will_yes_true can never be entered, but was called with %r, %r" % (state, option)
willMap = {('no', False): will_no_false, ('no', True): will_no_true,
('yes', False): will_yes_false, ('yes', True): will_yes_true}
def telnet_WONT(self, option):
s = self.getOptionState(option)
self.wontMap[s.him.state, s.him.negotiating](self, s, option)
def wont_no_false(self, state, option):
# He is unilaterally demanding that an already-disabled option be/remain disabled.
# Ignore this (although we could record it and refuse subsequent enable attempts
# from our side - he can always refuse them again though, so we won't)
pass
def wont_no_true(self, state, option):
# Peer refused to enable an option in response to our request.
state.him.negotiating = False
d = state.him.onResult
state.him.onResult = None
d.errback(OptionRefused(option))
def wont_yes_false(self, state, option):
# Peer is unilaterally demanding that an option be disabled.
state.him.state = 'no'
self.disableRemote(option)
self._dont(option)
def wont_yes_true(self, state, option):
# Peer agreed to disable an option at our request.
state.him.state = 'no'
state.him.negotiating = False
d = state.him.onResult
state.him.onResult = None
d.callback(True)
self.disableRemote(option)
wontMap = {('no', False): wont_no_false, ('no', True): wont_no_true,
('yes', False): wont_yes_false, ('yes', True): wont_yes_true}
def telnet_DO(self, option):
s = self.getOptionState(option)
self.doMap[s.us.state, s.us.negotiating](self, s, option)
def do_no_false(self, state, option):
# Peer is unilaterally requesting that we enable an option.
if self.enableLocal(option):
state.us.state = 'yes'
self._will(option)
else:
self._wont(option)
def do_no_true(self, state, option):
# Peer agreed to allow us to enable an option at our request.
state.us.state = 'yes'
state.us.negotiating = False
d = state.us.onResult
state.us.onResult = None
d.callback(True)
self.enableLocal(option)
def do_yes_false(self, state, option):
# Peer is unilaterally requesting us to enable an already-enabled option.
# Ignore this.
pass
def do_yes_true(self, state, option):
# This is a bogus state. It is here for completeness. It will never be
# entered.
assert False, "do_yes_true can never be entered, but was called with %r, %r" % (state, option)
doMap = {('no', False): do_no_false, ('no', True): do_no_true,
('yes', False): do_yes_false, ('yes', True): do_yes_true}
def telnet_DONT(self, option):
s = self.getOptionState(option)
self.dontMap[s.us.state, s.us.negotiating](self, s, option)
def dont_no_false(self, state, option):
# Peer is unilaterally demanding us to disable an already-disabled option.
# Ignore this.
pass
def dont_no_true(self, state, option):
# Offered option was refused. Fail the Deferred returned by the
# previous will() call.
state.us.negotiating = False
d = state.us.onResult
state.us.onResult = None
d.errback(OptionRefused(option))
def dont_yes_false(self, state, option):
# Peer is unilaterally demanding we disable an option.
state.us.state = 'no'
self.disableLocal(option)
self._wont(option)
def dont_yes_true(self, state, option):
# Peer acknowledged our notice that we will disable an option.
state.us.state = 'no'
state.us.negotiating = False
d = state.us.onResult
state.us.onResult = None
d.callback(True)
self.disableLocal(option)
dontMap = {('no', False): dont_no_false, ('no', True): dont_no_true,
('yes', False): dont_yes_false, ('yes', True): dont_yes_true}
def enableLocal(self, option):
"""
Reject all attempts to enable options.
"""
return False
def enableRemote(self, option):
"""
Reject all attempts to enable options.
"""
return False
def disableLocal(self, option):
"""
Signal a programming error by raising an exception.
L{enableLocal} must return true for the given value of C{option} in
order for this method to be called. If a subclass of L{Telnet}
overrides enableLocal to allow certain options to be enabled, it must
also override disableLocal to disable those options.
@raise NotImplementedError: Always raised.
"""
raise NotImplementedError(
"Don't know how to disable local telnet option %r" % (option,))
def disableRemote(self, option):
"""
Signal a programming error by raising an exception.
L{enableRemote} must return true for the given value of C{option} in
order for this method to be called. If a subclass of L{Telnet}
overrides enableRemote to allow certain options to be enabled, it must
also override disableRemote tto disable those options.
@raise NotImplementedError: Always raised.
"""
raise NotImplementedError(
"Don't know how to disable remote telnet option %r" % (option,))
class ProtocolTransportMixin:
def write(self, bytes):
self.transport.write(bytes.replace('\n', '\r\n'))
def writeSequence(self, seq):
self.transport.writeSequence(seq)
def loseConnection(self):
self.transport.loseConnection()
def getHost(self):
return self.transport.getHost()
def getPeer(self):
return self.transport.getPeer()
class TelnetTransport(Telnet, ProtocolTransportMixin):
"""
@ivar protocol: An instance of the protocol to which this
transport is connected, or None before the connection is
established and after it is lost.
@ivar protocolFactory: A callable which returns protocol instances
which provide L{ITelnetProtocol}. This will be invoked when a
connection is established. It is passed *protocolArgs and
**protocolKwArgs.
@ivar protocolArgs: A tuple of additional arguments to
pass to protocolFactory.
@ivar protocolKwArgs: A dictionary of additional arguments
to pass to protocolFactory.
"""
disconnecting = False
protocolFactory = None
protocol = None
def __init__(self, protocolFactory=None, *a, **kw):
Telnet.__init__(self)
if protocolFactory is not None:
self.protocolFactory = protocolFactory
self.protocolArgs = a
self.protocolKwArgs = kw
def connectionMade(self):
if self.protocolFactory is not None:
self.protocol = self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs)
assert ITelnetProtocol.providedBy(self.protocol)
try:
factory = self.factory
except AttributeError:
pass
else:
self.protocol.factory = factory
self.protocol.makeConnection(self)
def connectionLost(self, reason):
Telnet.connectionLost(self, reason)
if self.protocol is not None:
try:
self.protocol.connectionLost(reason)
finally:
del self.protocol
def enableLocal(self, option):
return self.protocol.enableLocal(option)
def enableRemote(self, option):
return self.protocol.enableRemote(option)
def disableLocal(self, option):
return self.protocol.disableLocal(option)
def disableRemote(self, option):
return self.protocol.disableRemote(option)
def unhandledSubnegotiation(self, command, bytes):
self.protocol.unhandledSubnegotiation(command, bytes)
def unhandledCommand(self, command, argument):
self.protocol.unhandledCommand(command, argument)
def applicationDataReceived(self, bytes):
self.protocol.dataReceived(bytes)
def write(self, data):
ProtocolTransportMixin.write(self, data.replace('\xff','\xff\xff'))
class TelnetBootstrapProtocol(TelnetProtocol, ProtocolTransportMixin):
implements()
protocol = None
def __init__(self, protocolFactory, *args, **kw):
self.protocolFactory = protocolFactory
self.protocolArgs = args
self.protocolKwArgs = kw
def connectionMade(self):
self.transport.negotiationMap[NAWS] = self.telnet_NAWS
self.transport.negotiationMap[LINEMODE] = self.telnet_LINEMODE
for opt in (LINEMODE, NAWS, SGA):
self.transport.do(opt).addErrback(log.err)
for opt in (ECHO,):
self.transport.will(opt).addErrback(log.err)
self.protocol = self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs)
try:
factory = self.factory
except AttributeError:
pass
else:
self.protocol.factory = factory
self.protocol.makeConnection(self)
def connectionLost(self, reason):
if self.protocol is not None:
try:
self.protocol.connectionLost(reason)
finally:
del self.protocol
def dataReceived(self, data):
self.protocol.dataReceived(data)
def enableLocal(self, opt):
if opt == ECHO:
return True
elif opt == SGA:
return True
else:
return False
def enableRemote(self, opt):
if opt == LINEMODE:
self.transport.requestNegotiation(LINEMODE, MODE + chr(TRAPSIG))
return True
elif opt == NAWS:
return True
elif opt == SGA:
return True
else:
return False
def telnet_NAWS(self, bytes):
# NAWS is client -> server *only*. self.protocol will
# therefore be an ITerminalTransport, the `.protocol'
# attribute of which will be an ITerminalProtocol. Maybe.
# You know what, XXX TODO clean this up.
if len(bytes) == 4:
width, height = struct.unpack('!HH', ''.join(bytes))
self.protocol.terminalProtocol.terminalSize(width, height)
else:
log.msg("Wrong number of NAWS bytes")
linemodeSubcommands = {
LINEMODE_SLC: 'SLC'}
def telnet_LINEMODE(self, bytes):
revmap = {}
linemodeSubcommand = bytes[0]
if 0:
# XXX TODO: This should be enabled to parse linemode subnegotiation.
getattr(self, 'linemode_' + self.linemodeSubcommands[linemodeSubcommand])(bytes[1:])
def linemode_SLC(self, bytes):
chunks = zip(*[iter(bytes)]*3)
for slcFunction, slcValue, slcWhat in chunks:
# Later, we should parse stuff.
'SLC', ord(slcFunction), ord(slcValue), ord(slcWhat)
from twisted.protocols import basic
class StatefulTelnetProtocol(basic.LineReceiver, TelnetProtocol):
delimiter = '\n'
state = 'Discard'
def connectionLost(self, reason):
basic.LineReceiver.connectionLost(self, reason)
TelnetProtocol.connectionLost(self, reason)
def lineReceived(self, line):
oldState = self.state
newState = getattr(self, "telnet_" + oldState)(line)
if newState is not None:
if self.state == oldState:
self.state = newState
else:
log.msg("Warning: state changed and new state returned")
def telnet_Discard(self, line):
pass
from twisted.cred import credentials
class AuthenticatingTelnetProtocol(StatefulTelnetProtocol):
"""A protocol which prompts for credentials and attempts to authenticate them.
Username and password prompts are given (the password is obscured). When the
information is collected, it is passed to a portal and an avatar implementing
L{ITelnetProtocol} is requested. If an avatar is returned, it connected to this
protocol's transport, and this protocol's transport is connected to it.
Otherwise, the user is re-prompted for credentials.
"""
state = "User"
protocol = None
def __init__(self, portal):
self.portal = portal
def connectionMade(self):
self.transport.write("Username: ")
def connectionLost(self, reason):
StatefulTelnetProtocol.connectionLost(self, reason)
if self.protocol is not None:
try:
self.protocol.connectionLost(reason)
self.logout()
finally:
del self.protocol, self.logout
def telnet_User(self, line):
self.username = line
self.transport.will(ECHO)
self.transport.write("Password: ")
return 'Password'
def telnet_Password(self, line):
username, password = self.username, line
del self.username
def login(ignored):
creds = credentials.UsernamePassword(username, password)
d = self.portal.login(creds, None, ITelnetProtocol)
d.addCallback(self._cbLogin)
d.addErrback(self._ebLogin)
self.transport.wont(ECHO).addCallback(login)
return 'Discard'
def _cbLogin(self, ial):
interface, protocol, logout = ial
assert interface is ITelnetProtocol
self.protocol = protocol
self.logout = logout
self.state = 'Command'
protocol.makeConnection(self.transport)
self.transport.protocol = protocol
def _ebLogin(self, failure):
self.transport.write("\nAuthentication failed\n")
self.transport.write("Username: ")
self.state = "User"
__all__ = [
# Exceptions
'TelnetError', 'NegotiationError', 'OptionRefused',
'AlreadyNegotiating', 'AlreadyEnabled', 'AlreadyDisabled',
# Interfaces
'ITelnetProtocol', 'ITelnetTransport',
# Other stuff, protocols, etc.
'Telnet', 'TelnetProtocol', 'TelnetTransport',
'TelnetBootstrapProtocol',
]
|
|
# -*- coding: utf-8 -*-
"""
Created on oct 20 23:15:24 2015
@author: marios
Script that makes Xgboost scikit-like.
The initial version of the script came from Guido Tapia (or such is his kaggle name!). I have modified it quite a bit though.
the github from where this was retrieved was : https://github.com/gatapia/py_ml_utils
He has done excellent job in making many commonly used algorithms scikit-like
"""
from sklearn.base import BaseEstimator, ClassifierMixin
import sys
from sklearn.cross_validation import StratifiedKFold
import xgboost as xgb
import numpy as np
from scipy.sparse import csr_matrix
class XGBoostClassifier(BaseEstimator, ClassifierMixin):
def __init__(self, silent=True,
use_buffer=True, num_round=10,num_parallel_tree=1, ntree_limit=0,
nthread=None, booster='gbtree',
eta=0.3, gamma=0.01,
max_depth=6, min_child_weight=1, subsample=1,
colsample_bytree=1,
l=0, alpha=0, lambda_bias=0, objective='reg:linear',
eval_metric='logloss', seed=0, num_class=None,
max_delta_step=0,classes_=None ,
colsample_bylevel=1.0 , sketch_eps=0.1 , sketch_ratio=2.0 ,
opt_dense_col=1, size_leaf_vector=0.0, min_split_loss=0.0,
cache_opt=1, default_direction =0 , k_folds=0 ,early_stopping_rounds=200
):
assert booster in ['gbtree', 'gblinear']
assert objective in ['reg:linear', 'reg:logistic',
'binary:logistic', 'binary:logitraw', 'multi:softmax',
'multi:softprob', 'rank:pairwise','count:poisson']
assert eval_metric in [ 'rmse', 'mlogloss', 'logloss', 'error',
'merror', 'auc', 'ndcg', 'map', 'ndcg@n', 'map@n', 'kappa']
if eval_metric=='kappa':
booster='gblinear'
self.silent = silent
self.use_buffer = use_buffer
self.num_round = num_round
self.ntree_limit = ntree_limit
self.nthread = nthread
self.booster = booster
# Parameter for Tree Booster
self.eta=eta
self.gamma=gamma
self.max_depth=max_depth
self.min_child_weight=min_child_weight
self.subsample=subsample
self.colsample_bytree=colsample_bytree
self.colsample_bylevel=colsample_bylevel
self.max_delta_step=max_delta_step
self.num_parallel_tree=num_parallel_tree
self.min_split_loss=min_split_loss
self.size_leaf_vector=size_leaf_vector
self.default_direction=default_direction
self.opt_dense_col=opt_dense_col
self.sketch_eps=sketch_eps
self.sketch_ratio=sketch_ratio
self.k_folds=k_folds
self.k_models=[]
self.early_stopping_rounds=early_stopping_rounds
# Parameter for Linear Booster
self.l=l
self.alpha=alpha
self.lambda_bias=lambda_bias
# Misc
self.objective=objective
self.eval_metric=eval_metric
self.seed=seed
self.num_class = num_class
self.n_classes_ =num_class
self.classes_=classes_
def set_params(self,random_state=1):
self.seed=random_state
def build_matrix(self, X, opt_y=None, weighting=None):
if opt_y==None:
if weighting==None:
return xgb.DMatrix(csr_matrix(X), missing =-999.0)
else :
#scale weight
sumtotal=float(X.shape[0])
sumweights=np.sum(weighting)
for s in range(0,len(weighting)):
weighting[s]*=sumtotal/sumweights
return xgb.DMatrix(csr_matrix(X), missing =-999.0, weight=weighting)
else:
if weighting==None:
return xgb.DMatrix(csr_matrix(X), label=np.array(opt_y), missing =-999.0)
else :
sumtotal=float(X.shape[0])
sumweights=np.sum(weighting)
for s in range(0,len(weighting)):
weighting[s]*=sumtotal/sumweights
return xgb.DMatrix(csr_matrix(X), label=np.array(opt_y), missing =-999.0, weight=weighting)
def fit(self, X, y,sample_weight=None):
self.k_models=[]
X1 = self.build_matrix(X, y,weighting= sample_weight)#sample_weight)
param = {}
param['booster']=self.booster
param['objective'] = self.objective
param['bst:eta'] = self.eta
param['seed']= self.seed
param['bst:max_depth'] = self.max_depth
if self.eval_metric!='kappa':
param['eval_metric'] = self.eval_metric
param['bst:min_child_weight']= self.min_child_weight
param['silent'] = 1
param['nthread'] = self.nthread
param['bst:subsample'] = self.subsample
param['gamma'] = self.gamma
param['colsample_bytree']= self.colsample_bytree
param['num_parallel_tree']= self.num_parallel_tree
param['colsample_bylevel']= self.colsample_bylevel
#param['min_split_loss']=self.min_split_loss
param['default_direction']=self.default_direction
param['opt_dense_col']=self.opt_dense_col
param['sketch_eps']=self.sketch_eps
param['sketch_ratio']=self.sketch_ratio
param['size_leaf_vector']=self.size_leaf_vector
if self.num_class is not None:
param['num_class']= self.num_class
if self.k_folds <2:
self.bst = xgb.train(param.items(), X1, self.num_round)
else :
number_of_folds=self.k_folds
kfolder2=StratifiedKFold(y, n_folds=number_of_folds,shuffle=True, random_state=self.seed)
## we split 64-16 5 times to make certain all the data has been use in modelling at least once
for train_indexnew, test_indexnew in kfolder2:
if sample_weight==None:
dtrain = xgb.DMatrix(X[train_indexnew], label=y[train_indexnew])
dtvalid = xgb.DMatrix(X[test_indexnew], label=y[test_indexnew])
else :
dtrain = xgb.DMatrix(X[train_indexnew], label=y[train_indexnew], weight=sample_weight[train_indexnew])
dtvalid = xgb.DMatrix(X[test_indexnew], label=y[test_indexnew], weight=sample_weight[test_indexnew])
watchlist = [(dtrain, 'train'), (dtvalid, 'valid')]
gbdt = xgb.train(param.items(), dtrain, self.num_round, watchlist, verbose_eval=False, early_stopping_rounds=self.early_stopping_rounds)#, verbose_eval=250) #, early_stopping_rounds=250, verbose_eval=250)
#predsnew = gbdt.predict(dtest, ntree_limit=gbdt.best_iteration)
self.k_models.append(gbdt)
return self
def predict(self, X):
if self.k_models!=None and len(self.k_models)<2:
X1 = self.build_matrix(X)
return self.bst.predict(X1)
else :
dtest = xgb.DMatrix(X)
preds= [0.0 for k in X.shape[0]]
for gbdt in self.k_models:
predsnew = gbdt.predict(dtest, ntree_limit=(gbdt.best_iteration+1)*self.num_parallel_tree)
for g in range (0, predsnew.shape[0]):
preds[g]+=predsnew[g]
for g in range (0, len(preds)):
preds[g]/=float(len(self.k_models))
def predict_proba(self, X):
try:
rows=(X.shape[0])
except:
rows=len(X)
X1 = self.build_matrix(X)
if self.k_models!=None and len(self.k_models)<2:
predictions = self.bst.predict(X1)
else :
dtest = xgb.DMatrix(X)
predictions= None
for gbdt in self.k_models:
predsnew = gbdt.predict(dtest, ntree_limit=(gbdt.best_iteration+1)*self.num_parallel_tree)
if predictions==None:
predictions=predsnew
else:
for g in range (0, predsnew.shape[0]):
predictions[g]+=predsnew[g]
for g in range (0, len(predictions)):
predictions[g]/=float(len(self.k_models))
predictions=np.array(predictions)
if self.objective == 'multi:softprob': return predictions.reshape( rows, self.num_class)
return np.vstack([1 - predictions, predictions]).T
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""This library provides a set of classes and functions that helps train models.
## Optimizers
The Optimizer base class provides methods to compute gradients for a loss and
apply gradients to variables. A collection of subclasses implement classic
optimization algorithms such as GradientDescent and Adagrad.
You never instantiate the Optimizer class itself, but instead instantiate one
of the subclasses.
@@Optimizer
@@GradientDescentOptimizer
@@AdadeltaOptimizer
@@AdagradOptimizer
@@AdagradDAOptimizer
@@MomentumOptimizer
@@AdamOptimizer
@@FtrlOptimizer
@@ProximalGradientDescentOptimizer
@@ProximalAdagradOptimizer
@@RMSPropOptimizer
## Gradient Computation
TensorFlow provides functions to compute the derivatives for a given
TensorFlow computation graph, adding operations to the graph. The
optimizer classes automatically compute derivatives on your graph, but
creators of new Optimizers or expert users can call the lower-level
functions below.
@@gradients
@@AggregationMethod
@@stop_gradient
@@hessians
## Gradient Clipping
TensorFlow provides several operations that you can use to add clipping
functions to your graph. You can use these functions to perform general data
clipping, but they're particularly useful for handling exploding or vanishing
gradients.
@@clip_by_value
@@clip_by_norm
@@clip_by_average_norm
@@clip_by_global_norm
@@global_norm
## Decaying the learning rate
@@exponential_decay
@@inverse_time_decay
@@natural_exp_decay
@@piecewise_constant
@@polynomial_decay
## Moving Averages
Some training algorithms, such as GradientDescent and Momentum often benefit
from maintaining a moving average of variables during optimization. Using the
moving averages for evaluations often improve results significantly.
@@ExponentialMovingAverage
## Coordinator and QueueRunner
See [Threading and Queues](../../how_tos/threading_and_queues/index.md)
for how to use threads and queues. For documentation on the Queue API,
see [Queues](../../api_docs/python/io_ops.md#queues).
@@Coordinator
@@QueueRunner
@@add_queue_runner
@@start_queue_runners
## Distributed execution
See [Distributed TensorFlow](../../how_tos/distributed/index.md) for
more information about how to configure a distributed TensorFlow program.
@@Server
@@Supervisor
@@SessionManager
@@ClusterSpec
@@replica_device_setter
@@MonitoredTrainingSession
@@MonitoredSession
@@SingularMonitoredSession
@@Scaffold
@@SessionCreator
@@ChiefSessionCreator
@@WorkerSessionCreator
## Reading Summaries from Event Files
See [Summaries and
TensorBoard](../../how_tos/summaries_and_tensorboard/index.md) for an
overview of summaries, event files, and visualization in TensorBoard.
@@summary_iterator
## Training Utilities
@@global_step
@@basic_train_loop
@@get_global_step
@@assert_global_step
@@write_graph
@@SessionRunHook
@@LoggingTensorHook
@@StopAtStepHook
@@CheckpointSaverHook
@@NewCheckpointReader
@@StepCounterHook
@@NanLossDuringTrainingError
@@NanTensorHook
@@SummarySaverHook
@@GlobalStepWaiterHook
@@SessionRunArgs
@@SessionRunContext
@@SessionRunValues
@@LooperThread
"""
# pylint: enable=line-too-long
# Optimizers.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys as _sys
from tensorflow.python.ops import io_ops as _io_ops
from tensorflow.python.ops import state_ops as _state_ops
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=g-bad-import-order,unused-import
from tensorflow.python.training.adadelta import AdadeltaOptimizer
from tensorflow.python.training.adagrad import AdagradOptimizer
from tensorflow.python.training.adagrad_da import AdagradDAOptimizer
from tensorflow.python.training.proximal_adagrad import ProximalAdagradOptimizer
from tensorflow.python.training.adam import AdamOptimizer
from tensorflow.python.training.ftrl import FtrlOptimizer
from tensorflow.python.training.momentum import MomentumOptimizer
from tensorflow.python.training.moving_averages import ExponentialMovingAverage
from tensorflow.python.training.optimizer import Optimizer
from tensorflow.python.training.rmsprop import RMSPropOptimizer
from tensorflow.python.training.gradient_descent import GradientDescentOptimizer
from tensorflow.python.training.proximal_gradient_descent import ProximalGradientDescentOptimizer
from tensorflow.python.training.sync_replicas_optimizer import SyncReplicasOptimizerV2
# Utility classes for training.
from tensorflow.python.training.coordinator import Coordinator
from tensorflow.python.training.coordinator import LooperThread
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.training.queue_runner import *
# For the module level doc.
from tensorflow.python.training import input as _input
from tensorflow.python.training.input import *
# pylint: enable=wildcard-import
from tensorflow.python.training.basic_session_run_hooks import LoggingTensorHook
from tensorflow.python.training.basic_session_run_hooks import StopAtStepHook
from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverHook
from tensorflow.python.training.basic_session_run_hooks import StepCounterHook
from tensorflow.python.training.basic_session_run_hooks import NanLossDuringTrainingError
from tensorflow.python.training.basic_session_run_hooks import NanTensorHook
from tensorflow.python.training.basic_session_run_hooks import SummarySaverHook
from tensorflow.python.training.basic_session_run_hooks import GlobalStepWaiterHook
from tensorflow.python.training.basic_loops import basic_train_loop
from tensorflow.python.training.device_setter import replica_device_setter
from tensorflow.python.training.monitored_session import Scaffold
from tensorflow.python.training.monitored_session import MonitoredTrainingSession
from tensorflow.python.training.monitored_session import SessionCreator
from tensorflow.python.training.monitored_session import ChiefSessionCreator
from tensorflow.python.training.monitored_session import WorkerSessionCreator
from tensorflow.python.training.monitored_session import MonitoredSession
from tensorflow.python.training.monitored_session import SingularMonitoredSession
from tensorflow.python.training.saver import Saver
from tensorflow.python.training.saver import checkpoint_exists
from tensorflow.python.training.saver import generate_checkpoint_state_proto
from tensorflow.python.training.saver import get_checkpoint_mtimes
from tensorflow.python.training.saver import get_checkpoint_state
from tensorflow.python.training.saver import latest_checkpoint
from tensorflow.python.training.saver import update_checkpoint_state
from tensorflow.python.training.saver import export_meta_graph
from tensorflow.python.training.saver import import_meta_graph
from tensorflow.python.training.session_run_hook import SessionRunHook
from tensorflow.python.training.session_run_hook import SessionRunArgs
from tensorflow.python.training.session_run_hook import SessionRunContext
from tensorflow.python.training.session_run_hook import SessionRunValues
from tensorflow.python.training.session_manager import SessionManager
from tensorflow.python.training.summary_io import summary_iterator
from tensorflow.python.training.supervisor import Supervisor
from tensorflow.python.training.training_util import write_graph
from tensorflow.python.training.training_util import global_step
from tensorflow.python.training.training_util import get_global_step
from tensorflow.python.training.training_util import assert_global_step
from tensorflow.python.pywrap_tensorflow import do_quantize_training_on_graphdef
from tensorflow.python.pywrap_tensorflow import NewCheckpointReader
# pylint: disable=wildcard-import
# Training data protos.
from tensorflow.core.example.example_pb2 import *
from tensorflow.core.example.feature_pb2 import *
from tensorflow.core.protobuf.saver_pb2 import *
# Utility op. Open Source. TODO(touts): move to nn?
from tensorflow.python.training.learning_rate_decay import *
# pylint: enable=wildcard-import
# Distributed computing support.
from tensorflow.core.protobuf.tensorflow_server_pb2 import ClusterDef
from tensorflow.core.protobuf.tensorflow_server_pb2 import JobDef
from tensorflow.core.protobuf.tensorflow_server_pb2 import ServerDef
from tensorflow.python.training.server_lib import ClusterSpec
from tensorflow.python.training.server_lib import Server
# Symbols whitelisted for export without documentation.
_allowed_symbols = [
# TODO(cwhipkey): review these and move to contrib or expose through
# documentation.
"generate_checkpoint_state_proto", # Used internally by saver.
"checkpoint_exists", # Only used in test?
"get_checkpoint_mtimes", # Only used in test?
# Legacy: remove.
"do_quantize_training_on_graphdef", # At least use grah_def, not graphdef.
# No uses within tensorflow.
"queue_runner", # Use tf.train.start_queue_runner etc directly.
# This is also imported internally.
# TODO(drpng): document these. The reference in howtos/distributed does
# not link.
"SyncReplicasOptimizerV2",
# Protobufs:
"BytesList", # from example_pb2.
"ClusterDef",
"Example", # from example_pb2
"Feature", # from example_pb2
"Features", # from example_pb2
"FeatureList", # from example_pb2
"FeatureLists", # from example_pb2
"FloatList", # from example_pb2.
"Int64List", # from example_pb2.
"JobDef",
"SaverDef", # From saver_pb2.
"SequenceExample", # from example_pb2.
"ServerDef",
]
# Include extra modules for docstrings because:
# * Input methods in tf.train are documented in io_ops.
# * Saver methods in tf.train are documented in state_ops.
remove_undocumented(__name__, _allowed_symbols,
[_sys.modules[__name__], _io_ops, _state_ops])
|
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2015--, The Horizomer Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
# dependencies: scikit-bio >= 0.2.3, < 0.3.0
from unittest import TestCase, main
from shutil import rmtree
from tempfile import mkdtemp
from os import makedirs
from os.path import join, exists
import numpy
import numpy.testing as npt
import pandas as pd
from skbio.util import remove_files
import skbio.io
from horizomer.distance_method import (
preprocess_data,
parse_blast,
normalize_distances,
cluster_distances,
detect_outlier_genes,
launch_blast,
launch_diamond,
distance_method)
class DistanceMethodTests(TestCase):
""" Tests for distance-method HGT detection """
def setUp(self):
""" Set up working directory and test files
"""
self.working_dir = mkdtemp()
self.target_proteomes_dir = join(self.working_dir, "DB")
if not exists(self.target_proteomes_dir):
makedirs(self.target_proteomes_dir)
# species 1
self.species_1_fp = join(self.target_proteomes_dir, "species_1.fasta")
with open(self.species_1_fp, 'w') as tmp:
tmp.write(species_1)
# species 2
self.species_2_fp = join(self.target_proteomes_dir, "species_2.fasta")
with open(self.species_2_fp, 'w') as tmp:
tmp.write(species_2)
# species 3
self.species_3_fp = join(self.target_proteomes_dir, "species_3.fasta")
with open(self.species_3_fp, 'w') as tmp:
tmp.write(species_3)
# species 4
self.species_4_fp = join(self.target_proteomes_dir, "species_4.fasta")
with open(self.species_4_fp, 'w') as tmp:
tmp.write(species_4)
# blast alignments (query vs. all species)
self.blast_fp = join(self.working_dir, "blast.txt")
with open(self.blast_fp, 'w') as tmp:
tmp.write(blast_alignments)
# blast alignments (query vs. all species)
self.phylip_fp = join(self.working_dir, "distances.txt")
with open(self.phylip_fp, 'w') as tmp:
tmp.write(phylip_output)
# list of files to remove
self.files_to_remove = [self.species_1_fp,
self.species_2_fp,
self.species_3_fp,
self.species_4_fp,
self.blast_fp,
self.phylip_fp]
def tearDown(self):
remove_files(self.files_to_remove)
rmtree(self.working_dir)
def assert_frames_equal(self, actual, expected, use_close=False):
"""
Custom assert_frames_equal() function for testing pandas DataFrame.
See
http://nbviewer.ipython.org/gist/jiffyclub/ac2e7506428d5e1d587b
for details.
Compare DataFrame items by index and column and
raise AssertionError if any item is not equal.
Ordering is unimportant, items are compared only by label.
NaN and infinite values are supported.
Parameters
----------
actual : pandas.DataFrame
expected : pandas.DataFrame
use_close : bool, optional
If True, use numpy.testing.assert_allclose instead of
numpy.testing.assert_equal.
"""
if use_close:
comp = npt.assert_allclose
else:
comp = npt.assert_equal
assert (isinstance(actual, pd.DataFrame) and
isinstance(expected, pd.DataFrame)), \
'Inputs must both be pandas DataFrames.'
for i, exp_row in expected.iterrows():
assert i in actual.index, 'Expected row {!r} not found.'.format(i)
act_row = actual.loc[i]
for j, exp_item in exp_row.iteritems():
assert j in act_row.index, \
'Expected column {!r} not found.'.format(j)
act_item = act_row[j]
try:
comp(act_item, exp_item)
except AssertionError as e:
raise AssertionError(
e.message + '\n\nColumn: {!r}\nRow: {!r}'.format(j, i))
def test_preprocess_data(self):
""" Test functionality of preprocess_data()
"""
gene_map, ref_db, species = preprocess_data(self.working_dir,
self.target_proteomes_dir,
['fa', 'fasta', 'faa'])
gene_map_exp = {'G1_SE001': '0_0', 'G1_SE002': '1_0',
'G1_SE003': '2_0', 'G1_SE004': '3_0',
'0_0': 'G1_SE001', '1_0': 'G1_SE002',
'2_0': 'G1_SE003', '3_0': 'G1_SE004',
'G2_SE001': '0_1', 'G2_SE002': '1_1',
'G2_SE003': '2_1', 'G2_SE004': '3_1',
'0_1': 'G2_SE001', '1_1': 'G2_SE002',
'2_1': 'G2_SE003', '3_1': 'G2_SE004',
'G3_SE001': '0_2', 'G3_SE002': '1_2',
'G3_SE003': '2_2', 'G3_SE004': '3_2',
'0_2': 'G3_SE001', '1_2': 'G3_SE002',
'2_2': 'G3_SE003', '3_2': 'G3_SE004',
'G4_SE001': '0_3', 'G4_SE002': '1_3',
'G4_SE003': '2_3', 'G4_SE004': '3_3',
'0_3': 'G4_SE001', '1_3': 'G4_SE002',
'2_3': 'G4_SE003', '3_3': 'G4_SE004',
'G5_SE001': '0_4', 'G5_SE002': '1_4',
'G5_SE003': '2_4', 'G5_SE004': '3_4',
'0_4': 'G5_SE001', '1_4': 'G5_SE002',
'2_4': 'G5_SE003', '3_4': 'G5_SE004'}
ref_db_exp = {}
for seq in skbio.io.read(self.species_1_fp, format='fasta'):
ref_db_exp[seq.metadata['id']] = seq
for seq in skbio.io.read(self.species_2_fp, format='fasta'):
ref_db_exp[seq.metadata['id']] = seq
for seq in skbio.io.read(self.species_3_fp, format='fasta'):
ref_db_exp[seq.metadata['id']] = seq
for seq in skbio.io.read(self.species_4_fp, format='fasta'):
ref_db_exp[seq.metadata['id']] = seq
num_species_exp = 4
self.assertDictEqual(gene_map, gene_map_exp)
self.assertDictEqual(ref_db, ref_db_exp)
self.assertEqual(species, num_species_exp)
def test_parse_blast(self):
""" Test functionality of parse_blast()
"""
hits_exp = {'G1_SE001': ['G1_SE001', 'G1_SE002', 'G1_SE003',
'G1_SE004'],
'G4_SE001': ['G4_SE001', 'G4_SE002', 'G4_SE003',
'G4_SE004'],
'G3_SE001': ['G3_SE001', 'G3_SE002', 'G3_SE003',
'G3_SE004'],
'G5_SE001': ['G5_SE001', 'G5_SE002', 'G5_SE003',
'G5_SE004'],
'G2_SE001': ['G2_SE001', 'G2_SE002', 'G2_SE003',
'G2_SE004']}
gene_map = {'G1_SE001': '0_0', 'G1_SE002': '1_0', 'G1_SE003': '2_0',
'G1_SE004': '3_0', '0_0': 'G1_SE001', '1_0': 'G1_SE002',
'2_0': 'G1_SE003', '3_0': 'G1_SE004', 'G2_SE001': '0_1',
'G2_SE002': '1_1', 'G2_SE003': '2_1', 'G2_SE004': '3_1',
'0_1': 'G2_SE001', '1_1': 'G2_SE002', '2_1': 'G2_SE003',
'3_1': 'G2_SE004', 'G3_SE001': '0_2', 'G3_SE002': '1_2',
'G3_SE003': '2_2', 'G3_SE004': '3_2', '0_2': 'G3_SE001',
'1_2': 'G3_SE002', '2_2': 'G3_SE003', '3_2': 'G3_SE004',
'G4_SE001': '0_3', 'G4_SE002': '1_3', 'G4_SE003': '2_3',
'G4_SE004': '3_3', '0_3': 'G4_SE001', '1_3': 'G4_SE002',
'2_3': 'G4_SE003', '3_3': 'G4_SE004', 'G5_SE001': '0_4',
'G5_SE002': '1_4', 'G5_SE003': '2_4', 'G5_SE004': '3_4',
'0_4': 'G5_SE001', '1_4': 'G5_SE002', '2_4': 'G5_SE003',
'3_4': 'G5_SE004'}
hits = {}
parse_blast(self.blast_fp, hits, gene_map)
self.assertDictEqual(hits, hits_exp)
def test_normalize_distances(self):
""" Test functionality of normalize_distances()
Phylip alignments (row IDs symbolize species_gene):
2_1 0.000000 0.379562 0.473355 0.521700
3_1 0.379562 0.000000 0.587981 0.660393
0_1 0.473355 0.587981 0.000000 0.722046
1_1 0.521700 0.660393 0.722046 0.000000
Z-score normalized by rows:
[[ nan -1.33276108 0.25673322 1.07602786]
[-1.36991607 nan 0.38082407 0.989092 ]
[-1.19162122 -0.06375679 nan 1.25537801]
[-1.3488877 0.30650842 1.04237928 nan]]
Re-ordered by rows and columns to correspond to ascending species names
[[ nan 1.25537801 -1.19162122 -0.06375679]
[ 1.04237928 nan -1.3488877 0.30650842]
[ 0.25673322 1.07602786 nan -1.33276108]
[ 0.38082407 0.989092 -1.36991607 nan]]
"""
num_species = 4
i = 0
species_set_dict = {}
species_set_dict_exp = {'IIII': 1}
gene_bitvector_map = {}
gene_bitvector_map_exp = {0: 'IIII'}
full_distance_matrix = numpy.zeros(
shape=(1, num_species, num_species), dtype=float)
full_distance_matrix_exp = numpy.array(
[[[numpy.nan, 1.25537801, -1.19162122, -0.06375679],
[1.04237928, numpy.nan, -1.3488877, 0.30650842],
[0.25673322, 1.07602786, numpy.nan, -1.33276108],
[0.38082407, 0.989092, -1.36991607, numpy.nan]]])
normalize_distances(phylip_fp=self.phylip_fp,
full_distance_matrix=full_distance_matrix,
num_species=num_species,
full_distance_matrix_offset=i,
species_set_dict=species_set_dict,
gene_bitvector_map=gene_bitvector_map)
numpy.testing.assert_almost_equal(full_distance_matrix[0][0],
full_distance_matrix_exp[0][0])
self.assertDictEqual(species_set_dict, species_set_dict_exp)
self.assertDictEqual(gene_bitvector_map, gene_bitvector_map_exp)
def test_cluster_distances(self):
""" Test functionality of cluster_distances()
"""
species_set_dict = {'IIIIIIII': 100, 'IIOOOIII': 50, 'IIIIIII0': 10,
'OIOIIIII': 5, 'IIIOOIII': 8, 'OOOOOIOO': 12}
gene_clusters_list_exp = [('IIIIIIII', ['IIIIIIII', 'IIIIIII0',
'IIIOOIII', 'OIOIIIII']),
('IIOOOIII', ['IIOOOIII', 'OOOOOIOO'])]
gene_clusters_list_act = cluster_distances(
species_set_dict=species_set_dict, species_set_size=30,
hamming_distance=2)
self.assertTrue(len(gene_clusters_list_exp),
len(gene_clusters_list_act))
for core_cluster_exp in gene_clusters_list_exp:
self.assertTrue(core_cluster_exp in gene_clusters_list_act)
for core_cluster_act in gene_clusters_list_act:
self.assertTrue(core_cluster_act in gene_clusters_list_exp)
def test_detect_outlier_genes(self):
""" Test functionality of detect_outlier_genes()
"""
species_set = ['IIII']
gene_bitvector_map = {0: 'IIII', 1: 'IIII', 2: 'IIII',
3: 'IIII', 4: 'IIII'}
full_distance_matrix = numpy.array(
[[[numpy.nan, 1.20467207, 0.03920422, -1.24387629],
[0.70710678, numpy.nan, -1.41421356, 0.70710678],
[0.70710678, 1.41421356, numpy.nan, -0.70710678],
[1.24387629, 1.20467207, 0.03920422, numpy.nan]],
[[numpy.nan, 1.26889551, -1.175214, -0.09368151],
[1.16820922, numpy.nan, -1.27436935, 0.10616013],
[0.50122985, 0.89462587, numpy.nan, -1.39585572],
[0.55177142, 0.85179386, -1.40356529, numpy.nan]],
[[numpy.nan, 1.33958186, -1.06239803, -0.27718382],
[1.2373387, numpy.nan, -0.02558867, -1.21175004],
[0.284687, 1.05732936, numpy.nan, -1.34201637],
[0.78533243, 0.62588164, -1.41121407, numpy.nan]],
[[numpy.nan, 1.38826553, -0.92766886, -0.46059667],
[1.15415521, numpy.nan, -1.2848518, 0.13069659],
[0.28409367, 1.05773152, numpy.nan, -1.34182519],
[0.26316662, 1.0717693, -1.33493592, numpy.nan]],
[[numpy.nan, 1.25537801, -1.19162122, -0.06375679],
[1.04237928, numpy.nan, -1.3488877, 0.30650842],
[0.25673322, 1.07602786, numpy.nan, -1.33276108],
[0.38082407, 0.989092, -1.36991607, numpy.nan]]])
outlier_genes_exp = set([0])
outlier_genes = detect_outlier_genes(
species_set=species_set,
gene_bitvector_map=gene_bitvector_map,
full_distance_matrix=full_distance_matrix,
stdev_offset=1.5,
outlier_hgt=0.5,
num_species=4,
total_genes=5)
self.assertSetEqual(outlier_genes, outlier_genes_exp)
def test_launch_blast(self):
"""Test functionality of launch_blast()
"""
align_exp = [{'qseqid': 'G1_SE001',
'sseqid': 'G1_SE002',
'pident': 60.92,
'length': 888,
'mismatch': 328,
'gapopen': 5,
'qstart': 1,
'qend': 870,
'sstart': 1,
'send': 887,
'evalue': 0.000000e+00,
'bitscore': 1098,
'qcovs': 100},
{'qseqid': 'G2_SE001',
'sseqid': 'G2_SE002',
'pident': 53.64,
'length': 494,
'mismatch': 229,
'gapopen': 0,
'qstart': 1,
'qend': 494,
'sstart': 1,
'send': 494,
'evalue': 0.000000e+00,
'bitscore': 566,
'qcovs': 100},
{'qseqid': 'G3_SE001',
'sseqid': 'G3_SE002',
'pident': 64.66,
'length': 116,
'mismatch': 40,
'gapopen': 1,
'qstart': 1,
'qend': 115,
'sstart': 1,
'send': 116,
'evalue': 2.9999999999999994e-56,
'bitscore': 164,
'qcovs': 100},
{'qseqid': 'G4_SE001',
'sseqid': 'G4_SE002',
'pident': 48.29,
'length': 292,
'mismatch': 147,
'gapopen': 1,
'qstart': 1,
'qend': 288,
'sstart': 1,
'send': 292,
'evalue': 1.9999999999999996e-106,
'bitscore': 305,
'qcovs': 100},
{'qseqid': 'G5_SE001',
'sseqid': 'G5_SE002',
'pident': 50.00,
'length': 670,
'mismatch': 320,
'gapopen': 6,
'qstart': 2,
'qend': 663,
'sstart': 1,
'send': 663,
'evalue': 0.000000e+00,
'bitscore': 674,
'qcovs': 99}]
df_exp = pd.DataFrame(align_exp,
columns=['qseqid', 'sseqid', 'pident', 'length',
'mismatch', 'gapopen', 'qstart', 'qend',
'sstart', 'send', 'evalue', 'bitscore',
'qcovs'])
out_file_fp = launch_blast(self.species_1_fp,
self.species_2_fp,
self.working_dir)
df_act = skbio.io.read(out_file_fp, format='blast+6',
into=pd.DataFrame,
columns=['qseqid', 'sseqid', 'pident', 'length',
'mismatch', 'gapopen', 'qstart',
'qend', 'sstart', 'send', 'evalue',
'bitscore', 'qcovs'])
self.assert_frames_equal(df_exp, df_act)
def test_launch_diamond(self):
"""Test functionality of launch_diamond()
"""
align_exp = [{'qseqid': 'G1_SE001',
'sseqid': 'G1_SE002',
'pident': 60.9,
'length': 888,
'mismatch': 328,
'gapopen': 5,
'qstart': 1,
'qend': 870,
'sstart': 1,
'send': 887,
'evalue': 0.000000e+00,
'bitscore': 1092.8},
{'qseqid': 'G2_SE001',
'sseqid': 'G2_SE002',
'pident': 53.6,
'length': 494,
'mismatch': 229,
'gapopen': 0,
'qstart': 1,
'qend': 494,
'sstart': 1,
'send': 494,
'evalue': 3.1999999999999995e-160,
'bitscore': 550.1},
{'qseqid': 'G3_SE001',
'sseqid': 'G3_SE002',
'pident': 64.7,
'length': 116,
'mismatch': 40,
'gapopen': 1,
'qstart': 1,
'qend': 115,
'sstart': 1,
'send': 116,
'evalue': 2.2999999999999996e-45,
'bitscore': 166.4},
{'qseqid': 'G4_SE001',
'sseqid': 'G4_SE002',
'pident': 48.3,
'length': 292,
'mismatch': 147,
'gapopen': 1,
'qstart': 1,
'qend': 288,
'sstart': 1,
'send': 292,
'evalue': 4.799999999999999e-84,
'bitscore': 296.2},
{'qseqid': 'G5_SE001',
'sseqid': 'G5_SE002',
'pident': 50.1,
'length': 669,
'mismatch': 319,
'gapopen': 6,
'qstart': 2,
'qend': 662,
'sstart': 1,
'send': 662,
'evalue': 1.4999999999999994e-192,
'bitscore': 657.9}]
df_exp = pd.DataFrame(align_exp,
columns=['qseqid', 'sseqid', 'pident', 'length',
'mismatch', 'gapopen', 'qstart', 'qend',
'sstart', 'send', 'evalue', 'bitscore'])
out_file_fp = launch_diamond(self.species_1_fp,
self.species_2_fp,
self.working_dir,
tmp_dir=self.working_dir)
df_act = skbio.io.read(out_file_fp, format='blast+6',
into=pd.DataFrame,
columns=['qseqid', 'sseqid', 'pident',
'length', 'mismatch', 'gapopen',
'qstart', 'qend', 'sstart', 'send',
'evalue', 'bitscore'])
self.assert_frames_equal(df_exp, df_act)
def test_distance_method(self):
""" Test functionality of distance_method_main()
"""
output_hgt_fp = join(self.working_dir, "hgt_result.txt")
distance_method(self.species_1_fp,
self.target_proteomes_dir,
self.working_dir,
output_hgt_fp,
'diamond')
hgt_exp = []
hgt_act = []
with open(output_hgt_fp, 'r') as output_hgt_f:
for line in output_hgt_f:
if line.startswith('#'):
continue
if line not in ['\n', '\r\n']:
hgt_act.append(line.strip().split()[0])
self.assertListEqual(hgt_exp, hgt_act)
def test_distance_method_pass_alignments(self):
""" Test functionality of distance_method_main() with alignments
"""
output_hgt_fp = join(self.working_dir, "hgt_result.txt")
distance_method(self.species_1_fp,
self.target_proteomes_dir,
self.working_dir,
output_hgt_fp,
'diamond',
tabular_alignments_fp=self.blast_fp)
hgt_exp = []
hgt_act = []
with open(output_hgt_fp, 'r') as output_hgt_f:
for line in output_hgt_f:
if line.startswith('#'):
continue
if line not in ['\n', '\r\n']:
hgt_act.append(line.strip().split()[0])
self.assertListEqual(hgt_exp, hgt_act)
phylip_output = """ 4
2_1 0.000000 0.379562 0.473355 0.521700
3_1 0.379562 0.000000 0.587981 0.660393
0_1 0.473355 0.587981 0.000000 0.722046
1_1 0.521700 0.660393 0.722046 0.000000
"""
blast_alignments = """G1_SE001 G1_SE001 100.00 862 0 0 1 862 1 \
862 0.0 1803 100
G2_SE001 G2_SE001 100.00 494 0 0 1 494 1 494 0.0 1023 100
G3_SE001 G3_SE001 100.00 115 0 0 1 115 1 115 1e-85 239 100
G4_SE001 G4_SE001 100.00 288 0 0 1 288 1 288 0.0 599 100
G5_SE001 G5_SE001 100.00 663 0 0 1 663 1 663 0.0 1377 100
G1_SE001 G1_SE002 58.11 888 345 6 1 862 1 887 0.0 1048 100
G2_SE001 G2_SE002 53.64 494 229 0 1 494 1 494 0.0 566 100
G3_SE001 G3_SE002 64.66 116 40 1 1 115 1 116 3e-56 164 100
G4_SE001 G4_SE002 48.29 292 147 1 1 288 1 292 2e-106 305 100
G5_SE001 G5_SE002 50.00 670 320 6 2 663 1 663 0.0 674 99
G1_SE001 G1_SE003 66.74 869 280 4 1 862 1 867 0.0 1191 100
G2_SE001 G2_SE003 64.65 495 174 1 1 494 1 495 0.0 655 100
G3_SE001 G3_SE003 68.97 116 35 1 1 115 1 116 2e-59 172 100
G4_SE001 G4_SE003 57.40 277 113 2 1 273 1 276 1e-117 335 95
G5_SE001 G5_SE003 58.01 674 262 9 1 663 1 664 0.0 769 100
G1_SE001 G1_SE004 65.25 872 291 6 1 862 1 870 0.0 1142 100
G2_SE001 G2_SE004 59.92 494 198 0 1 494 1 494 0.0 578 100
G3_SE001 G3_SE004 66.12 121 35 2 1 115 1 121 6e-59 171 100
G4_SE001 G4_SE004 55.23 277 120 1 1 273 1 277 7e-111 318 95
G5_SE001 G5_SE004 56.06 685 270 8 1 663 1 676 0.0 738 100
G1_SE001 G1_SE004 100.00 862 0 0 1 862 1 862 0.0 1803 100
G2_SE001 G2_SE004 59.92 494 198 0 1 494 1 494 0.0 578 100
G3_SE001 G3_SE004 66.12 121 35 2 1 115 1 121 6e-59 171 100
G4_SE001 G4_SE004 55.23 277 120 1 1 273 1 277 7e-111 318 95
G5_SE001 G5_SE004 56.06 685 270 8 1 663 1 676 0.0 738 100
"""
species_1 = """>G1_SE001
FDDSSLLEIFTSNNSNSSFSEPTVQLASYAEADPVEAASLSGILGQCTRVRHMMSSVTREVMPLQSTRSAKYVGPGV
PPFATAGQGGGDEQFKMADTPCKGVKMEKLKWAEDRHKPLVFLIGDAMYLMVPAENKITQYYNGICNGAGEVWDHLF
YKAECLHCFGFVGESVAYGNNGWSVADVGTVGTKGAGYMVYESLHATIPYALNGRQTDGLRLTYEPEDGSMLAANAI
PYGCVGPDCGDIGEVQSYGQMSNLGEYHLATFKLERDKMRVSAKDAKDSEYPVDGQEGFTDSSDGKGVDVYGPGQHA
YARLVVGKRDRQHATLAEMAEDGYADKMEPRCAQQPATINYNAGEVVGEERITTDIIAREYMFTKLTWNKTSPGYNY
VGAVQSTLLDFPGLWTATNVSREEQAKIHHPEGNVPDHLFCQPNNPPRDYPAKLILFLGILTSTIKSPAETWDAGLS
GQDSKIEMVKLHPLYHIDSSYAPMLNKHSSCIGCPTPLMLPPSAGKLLMLRPHEGTTTATESESYDTGSSAKFFLCY
SPDPVEIFGVPMMQAHNYHPKSVWFHLGNVLKHLGGSKDTSWRGLIVHMPRLLLEQLDAFTELGNGNHKYDSEISND
LGTEGLVALKRRILAQAYAAPNANDFYIGHDTLCAPFIASRKIWAWGKTQVSLEKGNAWAHAVLSPWIIKKEVAQGT
AITALIKSRPIDLPGNGIIGTHHDRPIGAMMVSAKAEEALAATAALPTTALAVSYETASDARQGLIGGLHSSPQFAP
AITGLINYLIERTDNVDLHMAFYVLHVGIVPQKYLARKSTRRGTCWDMHQGCLNTACRSLPAPNAQYHIPISKTLTL
TTAMHKTCIDLAKVWLGDAGGPL
>G2_SE001
GKLKNSIIDPWPGDPFAIASSDQTAAVAIVHAASEYISHYGYGYKAQLMLKIDIQESCNANGGAGGGGCQYAWWTAW
ASIFTQSPDVSISQTRVIYFTSAALGLIGFWSMLLGGAFRGGAEAWNAKADLKQAKVKTRKSAFFNNKNELASVPDI
VLPYPADKSDSSSDMKYFGSSMSKKMIAGYTPAAARPRVTITVEELKMSAKAEYLEDLLKNSPHLHGEIGDSTEKML
LSMTAQCKCTATSEALSYKDQKGGRDAAAPKKDFHGSCGVTFPYGCCYPEKKTAADEIVNLALGVCSSNLKVLQRPG
NIKVEAYIDACVVLDGNVKTGDGESRITLDEIHPFSVLLGEGNISKKVTGTHIGSHFDSITIPIGGQFGLAGVELIT
YQADSKDAIGRAYDKKPIILWFQGVAHELGGPIVPAADETIRIPDYITFVEFKHFDPSTSVVCEDDAAKLDENDKER
ANETKVQEEHSLKAVPTRKRLGARAKSIPFEL
>G3_SE001
VVEDNNQGAPGVVQIFYGNGTLHQEDCFSGPQAIGPGDASPGTLIQVVRGRKTHTEFVNALIKGTDNAPTGERVHIQ
WGLLMPPNFLGPEVKTNLYLDKNFKCFKQFGSIQVKAS
>G4_SE001
LNMYVATSHQEFTGQLYDGKKPTPLVDSPPMNDCQRMSWLFMHTLNTRYKSNDLANGEVRLKAQKHVYVQSFKAATA
YASKVILIEVVTLEQVKSSTLALANAFEKISVAVYKQLLRYATVSETTPGSVLQVEVGARDGILFDGEMLVHSDEAN
SIWGLVLYKGSAKSKLHFGYLFPVTAVIGKVTFPKFKRHPNAGYVDGGLPALKMAFTLTFKFSSHFYPRVQDQRFKD
WINVFHVPYFWGDVKKQRALNLGSTLELLNGVVSDPCEYRLLEETGLGGKAKNAVRT
>G5_SE001
DTGFASEDYVGVEWHYTEVIVVLESPKDRRYKSTAFPKEIGCGYGTLAENRSIWERGRTEPNANELISSSPPLVFPP
IMAHAHGNLPPYGQSKWGSVTWYKLLLALIAETYAVLNVLGLTADPLPRRLGVGVTNVCGAHVLFHDEEKSKEGQTT
VKSSLLIDIEGAALKSILQYFLTEASDNGKDTKENLKPCVYEIRWMSEEGSIAVPDYLSATDFACGSGFVFLMMVNK
FGYFEIRDGQICGGEGTLLILVLGQEIPDEKYFMAKGTTFGRNDFESNAMNHQKVVMTILPNNWPISGVKTDTADQI
LDGCFGFIPLPSARKATFAVDSALGTGSHLIKRTGTNAMVIYVVIVLVCATLVPGSPNYLTGIMLSDVQLLVCDALS
DSVKWLFAPTKLLEIKPTYIVTADMHTSTETKAQKDVIGKRMDFASNGLNTRAIKIEFQLLYSMAYGFLGFLVLRAC
TCGKFAQIVNDVCATLVWGDALGYLSATNQNTLEYGTTGHSENTESFELNPYKIENQQTDEAPRIANKIALKRNGAA
AGRMAANYTLGDFYLLTEYSCNNCKVTDGAVFDYATGVERGLDRHTEVQLVTPEPLLTGEAQNKHQLAVRVGWLAYA
QFMAPPIEADVTQTSLLPASVTRGYERSGETGSGFTKTALGNEAGAQ
"""
species_2 = """>G1_SE002
FDDVSLLNIVTSDNSQTKFNVPTVQLSGVLEAEPTELAVLSAILAMCNAVRQILPGVTRDPTDLAKRRTARYVGPGV
PPFATAGQGGGDEKVDMNETPCKGFPIGQLIWAEERDKALVFLNSENVYLLVPSENKTEYLEGICNGATNVWGHLFY
RSDCLHCFALVGDSVPSGNGGWQVTDLGAVGTRATGYMIYEHLQAGIPYALDGLQTAGLRITYAPQVANMLPANAIP
YDCIGPDCGEIGEVLAYGHCSSLGEYHLESFKLERDKLKVSAKSATECEYPVGGTIQFTDTSSASGVDVYGPGHHVY
ERLVVAEKDHQLASFAELADDGHADKVDEACAEGSATIEYSKGEEKGEDVIPTTTLFAKTYMTKTVQRGKTSPGYNY
AGTVKSTLLAMPGMWVADNIAYEEQAKIHHPQGNVPNHLFCNPNNLPRDYPAKLILFLSILTAEIKSPMAVWTAGLS
GQDNRIRLIKLEALWHIDVHCAAPKPLPAPSMYLPCLKKSSIPLGVPDELMLPKATGKLLMLRTSHEKSKIATEFIA
YDSNMSVKFFLCYAPDQVEIYGEPFVQSQKSTPKAVWFHLGAFLKHLAGSKDKAWKGLIVHMPRILIEKLECFYDLG
NGNHNYDSEISNDLGTEGLVTLARRILAEAYAAPNDSEFYIGQDTLCSPFVASRKAWSWGRIQVSLEKGDEHKNAVL
SPWRIKKEVAMGAPITSLKKSGPIGLPANGIVGSHHDKPIKERVVSANATEALAALGALARTQIAASTQIASQEREG
KYGALQMTPKFAYSITGSISYPIERTDKVDILMARYVLHEGIIEQSYLARGSANRGLCWVYGQGCLNTACSPTGPTL
PAPNAQYHLPLSSTLVTESAMGKTCIDPVKGWPGDAGGPL
>G2_SE002
GKLKDSFFHPWPTDPLATSRTSQTSALAIIEMPSSYITHYGYGFWFHKMLKYNFSDSCQAKGGAGDGGCNRDWWTSW
ADCFQQNPDISVAQSRVMYVESAALGLMGFWFLHLGGAFKGGEEKWHVKTDLRCSNVAPKSAGFLKDKQRLASFPDI
RTPKGKDKSDTSTNMNYFGSLLSQRMISGYTPKAGRPRITLIVEELKMAKKVKYLTDFLKMIPHLHHANGDHEEKEL
LGLTPQAKCSATNQFSCFQHSKTGTDPAALKMAFNGSCGFTLSYQACFEYHASAGSAILHITVQICPSGLKVLKRQG
AVKVAASAEYAVVLDGLSKYYDGSSRIIADEHKPLQVLLGAGTLNNGVMGTHWGSHVESMTIAIGGEFGLASVELSS
YQADSCNALGSAYDNKPIILWFQAVAHNLGGPMIPSNNATVRIPQYVSFVEFEHFNPSTGVVCQDDQNRLDQDDRER
GDREGVQESHGLKALPTLKRLSKQALSIAFDL
>G3_SE002
VVEDNSQGAPGVVSVFFGNGTLHQADCFLGPQAVGGGDTHPGTLVQVIRGRKAHTELVNALVGTSNNAPTGDQVHIQ
WGISVTSEPMGPEASNKLFKIDKNFRCFKQFGSAEVWTS
>G4_SE002
MNMYIATADMQFTGKFHDGEKSAPALDAPALKEDEMFQWIFTHKKSTKYDSADLLKGEVQLKDRKHVYIDDFTADAA
FSSKVFVLEVASSKQTKTAALGLRMVLEKSNIAVVKERLRYATVYDKTRGTLLIVGVGYRNGVLFDGEMLVHNEEGN
AIWGLVLAKGDASTLMHVGYIVASASVIVSVTFRKFNRKPNDGYSDGGLPTLKASFSWTFRFCNHFWTEIFDQRLTR
QVQDIIAVISEPFYYSDAKRERKLHLGNTLKVLSGVVSDPCEFKLMDESKLAGAQKTLART
>G5_SE002
TGVESDSYIGEDPHYSNIVTVIDNPNEQKYKTNAFPQSMGCGYGSVAESHKIFGRGRHEPHLKHVMYSSPPFIFPPI
LSTANGNEPAYGQSQRGSAAWYKWLSKVSNVKIPLISSSSAALLVLGLTSLAVPKGLGSGWTAGCGNFVIFHGEEGN
EEGQATVASDLLIGVERGVLRAVLGYYLTESSDNKKDTEEDLRPCIYIIRWPSREGLQSVRNIVKATESAVTPGIVF
FMMISKFATFILGDGKVCGGAGMLLINILGGEIPEVKYFQAKGTTFGYGGFATGSMDHDNTVTVPPNNWPMTGGLTS
IAFQLLSDTFGFVPKPIAQRATFALEEELGTNAQSIKTTDTKALVIYVVGLIVCASLTPGQSHVLHGIILSDVRLVV
CDAASRGVQYAPTRELEIKPTYIFPTDSHESMATQAKTELLGAQGEFAANGLGIEKEHQSDYSFAYGFLGFISFRAG
TCGKFAEIVTDTCKSMVFGAQLRTLWASKETTLEYATDGHTAQSESWPLGPFKVEKRSTDEAKSVATNVGLKVNGED
AARETEEYRLGDFYLLTEYVDNCKVTEGKWLDYASSVEKGSDRHMKVQMIAPKPLISGKGRAGSQASNRVGWLEYRN
HMASPLESEVTRSHLDGACVRRGYDRVGEMGSGLTKNSLNISEAAAH
"""
species_3 = """>G1_SE003
FDDISLLEIVTSDNSNSSLSVATVQLMTYAEALPVEAASLSGQLAQCTTVRHIVSSVSRDVMPLQSTRSARYVGPEV
PPFKTAGQGGGDEEFNMSESPCKGLPMDKLKWAEERHKALVFLLGDAMFLLVPAENKTKWYKGICNGSGEVWDHLFY
KSECLHCFALVGMSVAYGNNGLQVAHVGTVGTKGAGYMIFEWLGAAIPYALNGLQTDGLRITYEPQIGAMLLANIPY
GCIGPDCGDVGEVHNYGQCSNLREYHLATKKLERDKMRVTAKDAKECEYPVEGQEGFTDSSDGSGVDVFGPGSHAFA
RLVVGERDHQHATLAELAEDGYADKMENTCAQGPGTINYNAGEEVGEEVIPTDIIARTYMLKKLQRNKTSPGYNYVG
TVRSTLLAMPGLWTATEVASDEQAKIHHPQGNVPDHLLCKPNSPKQDYPAKLIVFLGILTSTITSPAKVWDAGLSGQ
EDRIELIKLEPLYHINIDYAPLLQRHSICVGCPTPLMVEKSAGKLLMLRTPHEGSTTATEFESYDTGSSVKFFLCTS
PDAIEIYGVPLLQSHDSHPKSVWFHLGNFLKHLGGTKDTSWKGLIVHMPRFVLEKLEAFTQLGKGNHKYDSEISNDL
GTEGLIALTRLILAEASAAPQLNDVYIGQDTLCKPFIASRKAWAWGKIQISLEKGNAWKHSVLSPWIIKKEVAQGAA
VTAKTKSGPIDLPGNGIVGPHKDKPIDCRMVSAHAEEALAALAALPRTTYAVSTETAREAREGLYGALHMSPQFAPA
ITGLLNYIIERTDDVDLLMAFYVLHVGIIEQLYLARKSTNRGLCWDFGQGCLNHACLPAPNAQFHVPISKTLTLETA
LHKTCIDLLKGWLGDSSGAL
>G2_SE003
GKLKDSFFDPWPGDPLATARSHQTAALDIVHAASSYITHYGYGYKSHKMLKYDVSDSCEAKGGAGGGGCQHDGWTTW
AAIFTQSPDISIAQNRVIYFETAALGMMGFWFMELGGVFRGGEEAWTVRAELKCANGAPKKATFFTDKQKLASVPDI
VTPKGVSKFDSSTNMKYMGSVMSKKGLSGYIPAAGRPRITMEVEELKWASKAEYLEDFLKMSPVLHGEIGDHTKKEL
IGLTPQCKCEKTSQALSFKDQKSGVDPAALKKAFGHQSCGISFSYGACFEEKKAGADQIVHLSVQVCSNYLKVLKKQ
HAIKVEAYFESCVELDGLSKYYDGRSPVIIDEHRPLSVLLGAGQLTRKVTGTHIGTHISSMTIAIGGQFGLASVELW
NYQADACDAIGSAYDSEPIILWFQGAAHPLGGPMIPANDETVRVADYVTFVEFEHYDPHTGIVTEDDAAKLDQSERD
RGDETKVQEAHSLEAVPTLKRLGAQAKSVPFEL
>G3_SE003
VVEDNNQGAPGVVYVFHGNGTLHDEDCFTSPRAIGPPDSHPGTLVKVIHGRKAHTEFVNALIGGSENAPTGDRVHIQ
RGIVMPPEPLGPEGQANLDGLDKNFRCLKQLGTAEVRAS
>G4_SE003
LNMYVAKADVQFTGQLHDGKKPSPLFSAPALNDDERLEWVFTHIQSTRYNHGDLQKGQVQLKVRKHVYIQEFEADSA
FSAKIIVLEVKSLESKSAALALKAAFELSNVAVYKQRLRYAKVYAETRGVILLVQVGAREGILFDGAMLIHDDEADV
IVGLALHKGSAQSIMHIGLMFPAAAVIGKVTYCKFTRKPNDGYVNGGLPALKMAFPLSFKFSDHFFDEIQDQAFIRF
VKDWIAIVQVSYFYGDIQKQRKTHLGSTIELLSGVVSDPCEFKFLYFVLLHLRLAITFSEKQLGSLDLLSKMPTLID
ETNLAGADKNLAKT
>G5_SE003
NTGAESVPYVGEEWKYTNVVAILENPESQYYKSSAYPKEIGCAYGGRAPSHSIWERGRHEPNADHLLASSSPLIFAP
VAAHARGDEPPYGQSKWGTLTIHKWLAKTDSKLKLLLLSATYETFCVLGLTAAAVPKDLGKGVTNVCGNVVLFHDAK
GSVEGECTIKSSLLIGIEGAALREILEYFLKQASDKKKDTNTNLKPCLYIIRWASDEGLISVTNYLKATESAIVCGF
VFFMMISKLIYFVFKDGKVCGGEGMVLILILNGEIEEVKYFVAKGTTFGRNGFATNCLSYQGVMTAIPNEWPITGGL
TNTAFQLLDGSFGGVPLPTARKATFANDTSRGTNAQIIKTTDTRSELVIYVVKLIVCSTMNPGQPNLLHGIMLSDNR
LVVCNAASSGVKYTPTKLLEIKPSYYLPGDSHQSTKTKSQKEVIGLRLEFAANGLSIEVEFALSYSFGYGFLGILSL
RAANCGKFAAIVNDTCKTNTWRDQLNVIWAPAEGTLQLATTGQSENTESFDLGPYKVENKHSDEAPRIASKIGLKTN
GIDAGREPEEYAIRGDYYLLTDYCSNLEVTYGKVFDFAAGTEKGLDRHMEVQLITPSPMLSGKGKDQSVRAGWLAYA
NFMAPPLKREVTQSSLDGASVTRGYECHGEMGLSLTTTSLGITERGAQ
"""
species_4 = """>G1_SE004
FDDSSLLEIFTSNNSNSSFSEPTVQLASYAEADPVEAASLSGILGQCTRVRHMMSSVTREVMPLQSTRSAKYVGPGV
PPFATAGQGGGDEQFKMADTPCKGVKMEKLKWAEDRHKPLVFLIGDAMYLMVPAENKITQYYNGICNGAGEVWDHLF
YKAECLHCFGFVGESVAYGNNGWSVADVGTVGTKGAGYMVYESLHATIPYALNGRQTDGLRLTYEPEDGSMLAANAI
PYGCVGPDCGDIGEVQSYGQMSNLGEYHLATFKLERDKMRVSAKDAKDSEYPVDGQEGFTDSSDGKGVDVYGPGQHA
YARLVVGKRDRQHATLAEMAEDGYADKMEPRCAQQPATINYNAGEVVGEERITTDIIAREYMFTKLTWNKTSPGYNY
VGAVQSTLLDFPGLWTATNVSREEQAKIHHPEGNVPDHLFCQPNNPPRDYPAKLILFLGILTSTIKSPAETWDAGLS
GQDSKIEMVKLHPLYHIDSSYAPMLNKHSSCIGCPTPLMLPPSAGKLLMLRPHEGTTTATESESYDTGSSAKFFLCY
SPDPVEIFGVPMMQAHNYHPKSVWFHLGNVLKHLGGSKDTSWRGLIVHMPRLLLEQLDAFTELGNGNHKYDSEISND
LGTEGLVALKRRILAQAYAAPNANDFYIGHDTLCAPFIASRKIWAWGKTQVSLEKGNAWAHAVLSPWIIKKEVAQGT
AITALIKSRPIDLPGNGIIGTHHDRPIGAMMVSAKAEEALAATAALPTTALAVSYETASDARQGLIGGLHSSPQFAP
AITGLINYLIERTDNVDLHMAFYVLHVGIVPQKYLARKSTRRGTCWDMHQGCLNTACRSLPAPNAQYHIPISKTLTL
TTAMHKTCIDLAKVWLGDAGGPL
>G2_SE004
GKLKDSFFDPRPGNPLAVARSHQKAAHAIIHAASNSITHYGYGFRSHKMLKYDVSDAVEAKGGAGGGGCQHDRWKTW
AEIFTQSSDISIAQSRVVYFESAALGLMGFWFMHLGGAFHGGEKAWNVKADLKCANVAPKKATIGRWEVPREGVEDY
FTDEKKQAVFSSTDMKYMGSVMAKKGMSGYTPKAGSRRICVSVEEIKMASDAEYLEEFLKQSPHLRGEIGDHTKKQL
VGMNPQCNCSRTSKALSFKEQSGGVDPAELKKPFHGSCGVTFSYGACFEEKKSGADEIVELQVQTCVSHLKVLKREG
ALKVEAYIESCVVLDGLSKYYDGRSCVVVDEHRPLRVLLGEGTKSKKVTGTHIGTHIKSMTIAIGGQFGLASVESTS
YTADVCDAIGSAYDSKPIVLWFEGAEKEQGGTVIPSNDETARLPDYVTFVEFKHFDPSTGLLCDGVAAKLDQDEKER
GSETKVQEGHALSAVSTLKHLGAQAKSIPFEL
>G3_SE004
VVEDNNQGAPGVVSVFYGDSQLHQEDCFTSPKAIGPGDRHPGTLVQVIRGRKVHTEFTNALIGATEGARTGDRVHIQ
WGIIMEGNAAPPEPLGPESNSNLFGVDKNFRCFKQLGSAEARAS
>G4_SE004
MNMYVATSDIQFSGQVHEGKRPSPLFDSPALGDDKRLRCVVTHIQSTRWDSGDLQKGEVHLKARKHSYIQTLEADSS
FSAKVTVLDIESLSQSKSAALALRARLEKGDVAVYAQRLKSATVYAKTRGTLLLVEVGARMGILYDGEFLIHADEAN
AAVGLVLHKGSAQSVMPIGYLFPPAAVIGKVTFCKFTRAPNDGYVDGELPALKMSFALSFKFTSYFFPEVQDQSFNR
FVKNWIAIVQVAYFYGDIQRQRRTHLPDTIELLSGVVIDPCEGHLLYFVLLHLRLDITYVESRMGSLKQLTKMSTLI
DESNLSGSEKNLTST
>G5_SE004
DTGVESVEYVGEEWHYTTVVPDLENPEKENYKSSTFPKSIGCGFGNLAQSHTIGERGRHEPNQDHLLAQSPPLIFPP
VLAHAKGNEPPYGQSQWGVATWYKWLAKATSKLKLPLIASTYVLLLVLRFSALALPKDLGKGLANVCGNAVLFHDAK
GREENQAIIITSLLITIEGQALREIKDYFLTSAPTNQKDTDTNLQPCVYVMRRASEEGLISVTNYLKATASAIVCGF
VFFMMLAAFVYFKCESGKVCGREGMLQILILGREIWDGKYFLAKKTTFGTNGFATQCMDYQRVLTPLPNEQAAAPGF
CGKDHSWPINAGRTNTAFQLLDGVFGFRSLPKARKATFAVDTALGTNAELIKTADTKSLVIYLVKLITCATLMPGQP
NLLHGIMLCDTRLVVCDAASSGVKFAPCKLLEIKPNYMLPADSHTSTKTKSQKEVLGLRVEFADNGLSIQVEFALSY
SFGYGFLGFLSLRAGHCGTFAEAVNDACETIVWRDRKGVVWATNENSCAVTGHSTDTESFKLGTYNVENNHTDEAPR
IANKIGLKVTGIAAGREPEEYSIGDFYLVTNGCNNVQVIHGKIFDFASGIESGLDRHMEAQLVTPKCLLTGAGKAQV
QLSIRVGWLMYANLMAPPLKEDVHQDSLDGVSVTRGYECSGEMGLGLTETSMGITDAGAH
"""
if __name__ == '__main__':
main()
|
|
#
# epydoc package file
#
# A python documentation Module
# Edward Loper
#
# $Id: __init__.py,v 1.10 2004/03/09 05:02:50 edloper Exp $
#
"""
Markup language support for docstrings. Each submodule defines a
parser for a single markup language. These parsers convert an
object's docstring to a L{ParsedDocstring}, a standard intermediate
representation that can be used to generate output.
C{ParsedDocstring}s support the following operations:
- output generation (L{to_plaintext()<ParsedDocstring.to_plaintext>},
L{to_html()<ParsedDocstring.to_html>}, and
L{to_latex()<ParsedDocstring.to_latex>}).
- Summarization (L{summary()<ParsedDocstring.summary>}).
- Field extraction (L{split_fields()<ParsedDocstring.split_fields>}).
- Index term extraction (L{index_terms()<ParsedDocstring.index_terms>}.
The L{parse()} function provides a single interface to the
C{epydoc.markup} package: it takes a docstring and the name of a
markup language; delegates to the appropriate parser; and returns the
parsed docstring (along with any errors or warnings that were
generated).
The C{ParsedDocstring} output generation methods (C{to_M{format}()})
use a L{DocstringLinker} to link the docstring output with the rest of
the documentation that epydoc generates. C{DocstringLinker}s are
currently responsible for translating two kinds of crossreference:
- index terms (L{translate_indexterm()
<DocstringLinker.translate_indexterm>}).
- identifier crossreferences (L{translate_identifier_xref()
<DocstringLinker.translate_identifier_xref>}).
A parsed docstring's fields can be extracted using the
L{ParsedDocstring.split_fields()} method. This method divides a
docstring into its main body and a list of L{Field}s, each of which
encodes a single field. The field's bodies are encoded as
C{ParsedDocstring}s.
Markup errors are represented using L{ParseError}s. These exception
classes record information about the cause, location, and severity of
each error.
The C{epydoc.markup} module also defines several utility functions,
such as L{wordwrap}, L{plaintext_to_latex}, and L{plaintext_to_html},
which are used by several different markup language parsers.
@sort: parse, ParsedDocstring, Field, DocstringLinker
@group Errors and Warnings: ParseError
@group Utility Functions: wordwrap, plaintet_to_html, plaintext_to_latex,
parse_type_of
@var SCRWIDTH: The default width with which text will be wrapped
when formatting the output of the parser.
@type SCRWIDTH: C{int}
@var _parse_warnings: Used by L{_parse_warn}.
"""
__docformat__ = 'epytext en'
import re, types, sys
from epydoc.imports import import_module
##################################################
## Contents
##################################################
#
# 1. parse() dispatcher
# 2. ParsedDocstring abstract base class
# 3. Field class
# 4. Docstring Linker
# 5. ParseError exceptions
# 6. Misc helpers
#
##################################################
## Dispatcher
##################################################
def parse(docstring, markup='plaintext', errors=None, **options):
"""
Parse the given docstring, and use it to construct a
C{ParsedDocstring}. If any fatal C{ParseError}s are encountered
while parsing the docstring, then the docstring will be rendered
as plaintext, instead.
@type docstring: C{string}
@param docstring: The docstring to encode.
@type markup: C{string}
@param markup: The name of the markup language that is used by
the docstring. If the markup language is not supported, then
the docstring will be treated as plaintext. The markup name
is case-insensitive.
@param errors: A list where any errors generated during parsing
will be stored. If no list is specified, then fatal errors
will generate exceptions, and non-fatal errors will be
ignored.
@type errors: C{list} of L{ParseError}
@rtype: L{ParsedDocstring}
@return: A L{ParsedDocstring} that encodes the contents of
C{docstring}.
@raise ParseError: If C{errors} is C{None} and an error is
encountered while parsing.
"""
# Initialize errors list.
raise_on_error = (errors is None)
if errors == None: errors = []
# Normalize the markup language name.
markup = markup.lower()
# Is the markup language valid?
if not re.match(r'\w+', markup):
_parse_warn('Warning: Bad markup language name: %s' % markup)
return plaintext.parse_docstring(docstring, errors, **options)
# Is the markup language supported?
try: exec('from epydoc.markup.%s import parse_docstring' % markup)
except:
_parse_warn('Warning: Unsupported markup language: %s' % markup)
return plaintext.parse_docstring(docstring, errors, **options)
# Parse the docstring.
try: parsed_docstring = parse_docstring(docstring, errors, **options)
except KeyboardInterrupt: raise
except Exception, e:
errors.append(ParseError('Internal error: %s' % e))
return plaintext.parse_docstring(docstring, errors, **options)
# Check for fatal errors.
fatal_errors = [e for e in errors if e.is_fatal()]
if fatal_errors and raise_on_error: raise fatal_errors[0]
if fatal_errors:
return plaintext.parse_docstring(docstring, errors, **options)
return parsed_docstring
# only issue each warning once:
_parse_warnings = {}
def _parse_warn(estr):
"""
Print a warning message. If the given error has already been
printed, then do nothing.
"""
global _parse_warnings
if _parse_warnings.has_key(estr): return
_parse_warnings[estr] = 1
if sys.stderr.softspace: print >>sys.stderr
print >>sys.stderr, estr
##################################################
## ParsedDocstring
##################################################
class ParsedDocstring:
"""
A standard intermediate representation for parsed docstrings that
can be used to generate output. Parsed docstrings are produced by
markup parsers (such as L{epytext.parse} or L{javadoc.parse}).
C{ParsedDocstring}s support several kinds of operation:
- output generation (L{to_plaintext()}, L{to_html()}, and
L{to_latex()}).
- Summarization (L{summary()}).
- Field extraction (L{split_fields()}).
- Index term extraction (L{index_terms()}.
The output generation methods (C{to_M{format}()}) use a
L{DocstringLinker} to link the docstring output with the rest
of the documentation that epydoc generates.
Subclassing
===========
The only method that a subclass is I{required} to implement is
L{to_plaintext()}; but it is often useful to override the other
methods. The default behavior of each method is described below:
- C{to_I{format}}: Calls C{to_plaintext}, and uses the string it
returns to generate verbatim output.
- C{summary}: Returns C{self} (i.e., the entire docstring).
- C{split_fields}: Returns C{(self, [])} (i.e., extracts no
fields).
- C{index_terms}: Returns C{[]} (i.e., extracts no index terms).
If and when epydoc adds more output formats, new C{to_I{format}}
methods will be added to this base class; but they will always
be given a default implementation.
"""
def split_fields(self, errors=None):
"""
Split this docstring into its body and its fields.
@return: A tuple C{(M{body}, M{fields})}, where C{M{body}} is
the main body of this docstring, and C{M{fields}} is a list
of its fields.
@rtype: C{(L{ParsedDocstring}, list of L{Field})}
@param errors: A list where any errors generated during
splitting will be stored. If no list is specified, then
errors will be ignored.
@type errors: C{list} of L{ParseError}
"""
# Default behavior:
return self, []
def summary(self):
"""
@return: A short summary of this docstring. Typically, the
summary consists of the first sentence of the docstring.
@rtype: L{ParsedDocstring}
"""
# Default behavior:
return self
def concatenate(self, other):
"""
@return: A new parsed docstring containing the concatination
of this docstring and C{other}.
@raise ValueError: If the two parsed docstrings are
incompatible.
"""
# Default behavior:
raise ValueError, 'Could not concatenate docstrings'
def __add__(self, other): return self.concatenate(other)
def to_html(self, docstring_linker, **options):
"""
Translate this docstring to HTML.
@param docstring_linker: An HTML translator for crossreference
links into and out of the docstring.
@type docstring_linker: L{DocstringLinker}
@param options: Any extra options for the output. Unknown
options are ignored.
@return: An HTML fragment that encodes this docstring.
@rtype: C{string}
"""
# Default behavior:
plaintext = plaintext_to_html(self.to_plaintext(docstring_linker))
return '<pre class="literalblock">\n%s\n</pre>\n' % plaintext
def to_latex(self, docstring_linker, **options):
"""
Translate this docstring to LaTeX.
@param docstring_linker: A LaTeX translator for crossreference
links into and out of the docstring.
@type docstring_linker: L{DocstringLinker}
@param options: Any extra options for the output. Unknown
options are ignored.
@return: A LaTeX fragment that encodes this docstring.
@rtype: C{string}
"""
# Default behavior:
plaintext = plaintext_to_latex(self.to_plaintext(docstring_linker))
return '\\begin{alltt}\n%s\\end{alltt}\n\n' % plaintext
def to_plaintext(self, docstring_linker, **options):
"""
Translate this docstring to plaintext.
@param docstring_linker: A plaintext translator for
crossreference links into and out of the docstring.
@type docstring_linker: L{DocstringLinker}
@param options: Any extra options for the output. Unknown
options are ignored.
@return: A plaintext fragment that encodes this docstring.
@rtype: C{string}
"""
raise NotImplementedError, 'ParsedDocstring.to_plaintext()'
def index_terms(self):
"""
@return: The list of index terms that are defined in this
docstring. Each of these items will be added to the index
page of the documentation.
@rtype: C{list} of C{ParsedDocstring}
"""
# Default behavior:
return []
##################################################
## Fields
##################################################
class Field:
"""
The contents of a docstring's field. Docstring fields are used
to describe specific aspects of an object, such as a parameter of
a function or the author of a module. Each field consists of a
tag, an optional argument, and a body:
- The tag specifies the type of information that the field
encodes.
- The argument specifies the object that the field describes.
The argument may be C{None} or a C{string}.
- The body contains the field's information.
Tags are automatically downcased and stripped; and arguments are
automatically stripped.
"""
def __init__(self, tag, arg, body):
self._tag = tag.lower().strip()
if arg is None: self._arg = None
else: self._arg = arg.strip()
self._body = body
def tag(self):
"""
@return: This field's tag.
@rtype: C{string}
"""
return self._tag
def arg(self):
"""
@return: This field's argument, or C{None} if this field has
no argument.
@rtype: C{string} or C{None}
"""
return self._arg
def body(self):
"""
@return: This field's body.
@rtype: L{ParsedDocstring}
"""
return self._body
def __repr__(self):
if self._arg is None:
return '<Field @%s: ...>' % self._tag
else:
return '<Field @%s %s: ...>' % (self._tag, self._arg)
##################################################
## Docstring Linker (resolves crossreferences)
##################################################
class DocstringLinker:
"""
A translator for crossreference links into and out of a
C{ParsedDocstring}. C{DocstringLinker} is used by
C{ParsedDocstring} to convert these crossreference links into
appropriate output formats. For example,
C{DocstringLinker.to_html} expects a C{DocstringLinker} that
converts crossreference links to HTML.
"""
def translate_indexterm(self, indexterm):
"""
Translate an index term to the appropriate output format. The
output will typically include a crossreference anchor.
@type indexterm: L{ParsedDocstring}
@param indexterm: The index term to translate.
@rtype: C{string}
@return: The translated index term.
"""
raise NotImplementedError, 'DocstringLinker.translate_indexterm()'
def translate_identifier_xref(self, identifier, label=None):
"""
Translate a crossreference link to a Python identifier to the
appropriate output format. The output will typically include
a reference or pointer to the crossreference target.
@type identifier: C{string}
@param identifier: The name of the Python identifier that
should be linked to.
@type label: C{string} or C{None}
@param label: The label that should be used for the identifier,
if it's different from the name of the identifier.
@rtype: C{string}
@return: The translated crossreference link.
"""
raise NotImplementedError, 'DocstringLinker.translate_xref()'
##################################################
## ParseError exceptions
##################################################
class ParseError(Exception):
"""
The base class for errors generated while parsing docstrings.
@ivar _linenum: The line on which the error occured within the
docstring. The linenum of the first line is 0.
@type _linenum: C{int}
@ivar _offset: The line number where the docstring begins. This
offset is added to C{_linenum} when displaying the line number
of the error. Default value: 1.
@type _offset: C{int}
@ivar _descr: A description of the error.
@type _descr: C{string}
@ivar _fatal: True if this is a fatal error.
@type _fatal: C{boolean}
"""
def __init__(self, descr, linenum=None, is_fatal=1):
"""
@type descr: C{string}
@param descr: A description of the error.
@type linenum: C{int}
@param linenum: The line on which the error occured within
the docstring. The linenum of the first line is 0.
@type is_fatal: C{boolean}
@param is_fatal: True if this is a fatal error.
"""
self._descr = descr
self._linenum = linenum
self._fatal = is_fatal
self._offset = 1
def is_fatal(self):
"""
@return: true if this is a fatal error. If an error is fatal,
then epydoc should ignore the output of the parser, and
parse the docstring as plaintext.
@rtype: C{boolean}
"""
return self._fatal
def linenum(self):
"""
@return: The line number on which the error occured (including
any offset). If the line number is unknown, then return
C{None}.
@rtype: C{int} or C{None}
"""
if self._linenum is None: return None
else: return self._offset + self._linenum
def set_linenum_offset(self, offset):
"""
Set the line number offset for this error. This offset is the
line number where the docstring begins. This offset is added
to C{_linenum} when displaying the line number of the error.
@param offset: The new line number offset.
@type offset: C{int}
@rtype: C{None}
"""
self._offset = offset
def __str__(self):
"""
Return a string representation of this C{ParseError}. This
multi-line string contains a description of the error, and
specifies where it occured.
@return: the informal representation of this C{ParseError}.
@rtype: C{string}
"""
if self._linenum is not None:
str = '%5s: ' % ('L'+`self._linenum+self._offset`)
else:
str = ' - '
if self._fatal:
str += 'Error: '
else:
str += 'Warning: '
return str + wordwrap(self._descr, 7, startindex=len(str))[:-1]
def __repr__(self):
"""
Return the formal representation of this C{ParseError}.
C{ParseError}s have formal representations of the form::
<ParseError on line 12>
@return: the formal representation of this C{ParseError}.
@rtype: C{string}
"""
if self._linenum is None:
return '<ParseError on line %d' % self._offset
else:
return '<ParseError on line %d>' % (self._linenum+self._offset)
def __cmp__(self, other):
"""
Compare two C{ParseError}s, based on their line number.
- Return -1 if C{self.linenum<other.linenum}
- Return +1 if C{self.linenum>other.linenum}
- Return 0 if C{self.linenum==other.linenum}.
The return value is undefined if C{other} is not a
ParseError.
@rtype: C{int}
"""
if not isinstance(other, ParseError): return -1000
return cmp(self._linenum+self._offset,
other._linenum+other._offset)
##################################################
## Misc helpers
##################################################
# These are used by multiple markup parsers
# Default screen width, for word-wrapping
SCRWIDTH = 73
def wordwrap(str, indent=0, right=SCRWIDTH, startindex=0):
"""
Word-wrap the given string. All sequences of whitespace are
converted into spaces, and the string is broken up into lines,
where each line begins with C{indent} spaces, followed by one or
more (space-deliniated) words whose length is less than
C{right-indent}. If a word is longer than C{right-indent}
characters, then it is put on its own line.
@param str: The string that should be word-wrapped.
@type str: C{int}
@param indent: The left margin of the string. C{indent} spaces
will be inserted at the beginning of every line.
@type indent: C{int}
@param right: The right margin of the string.
@type right: C{int}
@type startindex: C{int}
@param startindex: The index at which the first line starts. This
is useful if you want to include other contents on the first
line.
@return: A word-wrapped version of C{str}.
@rtype: C{string}
"""
words = str.split()
out_str = ' '*(indent-startindex)
charindex = max(indent, startindex)
for word in words:
if charindex+len(word) > right and charindex > 0:
out_str += '\n' + ' '*indent
charindex = indent
out_str += word+' '
charindex += len(word)+1
return out_str.rstrip()+'\n'
def plaintext_to_html(str):
"""
@return: An HTML string that encodes the given plaintext string.
In particular, special characters (such as C{'<'} and C{'&'})
are escaped.
@rtype: C{string}
"""
str = str.replace('&', '&').replace('"', '"')
str = str.replace('<', '<').replace('>', '>')
return str.replace('@', '@')
def plaintext_to_latex(str, nbsp=0, breakany=0):
"""
@return: A LaTeX string that encodes the given plaintext string.
In particular, special characters (such as C{'$'} and C{'_'})
are escaped, and tabs are expanded.
@rtype: C{string}
@param breakany: Insert hyphenation marks, so that LaTeX can
break the resulting string at any point. This is useful for
small boxes (e.g., the type box in the variable list table).
@param nbsp: Replace every space with a non-breaking space
(C{'~'}).
"""
# These get converted to hyphenation points later
if breakany: str = re.sub('(.)', '\\1\1', str)
# These get converted to \textbackslash later.
str = str.replace('\\', '\0')
# Expand tabs
str = str.expandtabs()
# These elements need to be backslashed.
str = re.sub(r'([#$&%_\${}])', r'\\\1', str)
# These elements have special names.
str = str.replace('|', '{\\textbar}')
str = str.replace('<', '{\\textless}')
str = str.replace('>', '{\\textgreater}')
str = str.replace('^', '{\\textasciicircum}')
str = str.replace('~', '{\\textasciitilde}')
str = str.replace('\0', r'{\textbackslash}')
# replace spaces with non-breaking spaces
if nbsp: str = str.replace(' ', '~')
# Convert \1's to hyphenation points.
if breakany: str = str.replace('\1', r'\-')
return str
def parse_type_of(obj):
"""
@return: A C{ParsedDocstring} that encodes the type of the given
object.
@rtype: L{ParsedDocstring}
@param obj: The object whose type should be returned as DOM document.
@type obj: any
"""
# This is a bit hackish; oh well. :)
from epydoc.markup.epytext import ParsedEpytextDocstring
from xml.dom.minidom import Document
doc = Document()
epytext = doc.createElement('epytext')
para = doc.createElement('para')
doc.appendChild(epytext)
epytext.appendChild(para)
if type(obj) is types.InstanceType:
link = doc.createElement('link')
name = doc.createElement('name')
target = doc.createElement('target')
para.appendChild(link)
link.appendChild(name)
link.appendChild(target)
name.appendChild(doc.createTextNode(str(obj.__class__.__name__)))
target.appendChild(doc.createTextNode(str(obj.__class__)))
else:
code = doc.createElement('code')
para.appendChild(code)
code.appendChild(doc.createTextNode(type(obj).__name__))
return ParsedEpytextDocstring(doc)
##################################################
## Sub-module Imports
##################################################
# By default, just import plaintext. That way we don't have to wait
# for other modules (esp restructuredtext) to load if we're not going
# to use them.
import plaintext
|
|
# encoding=utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import requests
import json
import time
from random import randint
import pandas as pd
import os.path
import sys
import codecs
import numpy as np
import re
import urllib2
print sys.stdout.encoding
class GoogleTrendCrawler:
def __init__(self, path, startyear):
self.startyear = startyear
if not os.path.exists(path):
os.mkdir(path)
self.logfilename = path+"log-fails.txt"
self.html_base = u"http://www.google.com/trends/fetchComponent?q="
self.query_type = u"&cid=TIMESERIES_GRAPH_0&export=3"
self.path = path
self.headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
def create_sample(self, N, allpeople, path):
# sample 100 people
allpeople.reset_index(inplace=True)
people = allpeople.ix[np.random.random_integers(0, len(allpeople), N)]
people.to_csv(path+"selected_people.csv")
return people
def create_stratified_sample(self, N, allpeople, path):
women = allpeople[allpeople["gender"] == "female"]
women.reset_index(inplace=True)
men = allpeople[allpeople["gender"] == "male"]
men.reset_index(inplace=True)
print "women and men shape: "
print women.shape
print men.shape
women = women.ix[np.random.random_integers(0, len(women), N)]
#women.dropnan(inplace=True, how=any)
men = men.ix[np.random.random_integers(0, len(men), N)]
print women.shape
print men.shape
people = women.append(men)
print people.head()
print people.shape
people.to_csv(path+"selected_people.csv")
return people
def crawl_wikipedia_people(self, N, alldatafile, birth_lim):
allpeople = pd.read_csv(alldatafile, delimiter=",", header=0)
print allpeople.head(n=5)
if birth_lim:
allpeople = allpeople[(allpeople.birth_year >= self.startyear) & (allpeople.birth_year <= 2000)]
print "start crawling %s randomly selected people"%N
if os.path.isfile(self.path+"selected_people.csv"):
print "use selected_people file that lies here %s"%self.path
people = pd.DataFrame.from_csv(self.path+"selected_people.csv")
else:
print "select people"
people = self.create_sample(N, allpeople, self.path)
#people = people.shuffle(axis=0)
#print people.head()
self.run(people, self.path)
def crawl_strata_wikipedia_people(self, N, alldatafile, birthstrata, minmax_edition):
allpeople = pd.read_csv(alldatafile, delimiter=",", header=0)
print allpeople.head(n=5)
if birthstrata:
allpeople = allpeople[(allpeople.birth_year >= 1900) & (allpeople.birth_year <= 2000)]
if len(minmax_edition) == 2:
min = minmax_edition[0]
max = minmax_edition[1]
allpeople = allpeople[(allpeople.edition_count >= min) & (allpeople.edition_count <= max)]
print "start crawling %s randomly selected people"%N
if os.path.isfile(self.path+"selected_people.csv"):
print "use selected_people file that lies here %s"%self.path
people = pd.DataFrame.from_csv(self.path+"selected_people.csv")
else:
print "select people"
people = self.create_stratified_sample(N, allpeople, self.path)
self.run(people, self.path)
def crawl_nobelprize_winner(self):
allpeople = pd.read_csv('data/nobel_identifier_all.csv', delimiter=",", header=None)
allpeople.columns = ["name", "freebase", "year"]
print "start crawling %s randomly selected people"%len(allpeople)
print allpeople.head(n=1)
print allpeople.shape
self.run(allpeople, self.path)
def run(self, people, path):
logfile = open(self.logfilename, 'w+')
print people.head()
for ind,vals in people.iterrows():
#print vals
name = vals["label"]
ind = str(vals["index"])
print name
print ind
if str(name).lower() == "nan":
continue
if "(" in name:
#remove additional info from name
pos = name.find("(")
name = name[0:pos]
# remove the letter-dot stuff
name = re.sub(r'\s+[A-Z]\.\s+', ' ', name)
# remove quoted stuff e.g. Katharina "kate" Mcking
name = re.sub(r'\s+"[A-Za-z\s]+"\s+', ' ', name)
# remove stuff after the comma e.g. James Dean, King from Sweden
name = re.sub(r',\s*.+', ' ', name)
#name = name.encode("utf8")
#name = unicode(name, 'cp1252')
if os.path.isfile(path+ind+".json"):
print "found & rename"
os.rename(path+ind+".json", path+name.replace('/', '')+".json")
elif os.path.isfile(path+(name.replace('/', ''))+".json"):
print "found "
#os.rename(path+(name.replace('/', ''))+".json", path+ind+".json")
else:
# make request
try:
#q = u"asdf,qwerty"
full_query = self.html_base + name + self.query_type
print(full_query)
# set header to pretend a user-visit
response = requests.get(full_query, headers=self.headers)
print(response.status_code)
with open(path+name.replace('/', '')+".json", 'w') as outfile:
if response.status_code == 200:
# no data found
outfile.write(response.text.encode("utf8"))
outfile.close()
elif response.status_code == 203:
if response.content.startswith("<!DOCTYPE html>"):
# quota limit
outfile.close()
os.remove(path+name.replace('/', '')+".json")
time.sleep(randint(10,30))
else:
print (response.content)
data = json.loads(response.text.encode("utf8"))
json.dump(data, outfile)
except Exception:
logfile.write("\n%s"%name)
# wait a random amount of time between requests to avoid bot detection
#time.sleep(randint(10,10)) #30
logfile.close()
if __name__ == "__main__":
startyear = 1900
crawler = GoogleTrendCrawler('data/trends-sample-birth'+str(startyear)+'/', startyear)
crawler.crawl_wikipedia_people(2000, 'data/consolidated_person_data.csv', True)
|
|
"""Classes for representing a Collection+JSON document."""
from __future__ import absolute_import, unicode_literals
import json
__version__ = '0.1.1'
class ArrayProperty(object):
"""A descriptor that converts from any enumerable to a typed Array."""
def __init__(self, cls, name):
"""Constructs typed array property
:param cls type: the type of objects expected in the array
:param name str: name of the property
"""
self.cls = cls
self.name = name
def __get__(self, instance, owner):
target = instance
if target is None:
target = owner
if self.name in target.__dict__:
return target.__dict__[self.name]
raise AttributeError
def __set__(self, instance, value):
if value is None:
value = []
instance.__dict__[self.name] = Array(self.cls, self.name, value)
class DictProperty(object):
"""A descriptor that converts to a dictionary containing Arrays or objects of a given type"""
def __init__(self, cls, name):
"""Constructs the dictionary
:param cls type: the expected type of the objects
"""
self.cls = cls
self.name = name
def __get__(self, instance, owner):
target = instance
if target is None:
target = owner
if self.name in target.__dict__:
return target.__dict__[self.name]
raise AttributeError
def __set__(self, instance, vals):
instance.__dict__[self.name] = {}
if vals is not None:
for name, value in vals.items():
if value is None or isinstance(value, self.cls):
instance.__dict__[self.name][name] = value
elif isinstance(value, dict):
instance.__dict__[self.name][name] = self.cls(**value)
elif isinstance(value, list):
self.cls = self.cls
instance.__dict__[self.name][name] = Array(self.cls, None, value)
else:
raise TypeError("Invalid value '%s', "
"expected dict, list or '%s'" % (value,
self.cls.__name__))
class TypedProperty(object):
"""A descriptor for assigning only a specific type of instance.
Additionally supports assigning a dictionary convertable to the type.
"""
def __init__(self, cls, name):
"""Constructs the typed property
:param cls type: the type of object expected
"""
self.cls = cls
self.name = name
def __get__(self, instance, owner):
target = instance
if target is None:
target = owner
if self.name in target.__dict__:
return target.__dict__[self.name]
raise AttributeError
def __set__(self, instance, value):
if value is None or isinstance(value, self.cls):
instance.__dict__[self.name] = value
elif isinstance(value, dict):
instance.__dict__[self.name] = self.cls(**value)
else:
raise TypeError("Invalid value '%s', "
"expected dict or '%s'" % (value,
self.cls.__name__))
class ComparableObject(object):
"""Abstract base class for objects implementing equality comparison.
This class provides default __eq__ and __ne__ implementations.
"""
def __eq__(self, other):
"""Return True if both instances are equivalent."""
return (type(self) == type(other) and
self.__dict__ == other.__dict__)
def __ne__(self, other):
"""Return True if both instances are not equivalent."""
return (type(self) != type(other) or
self.__dict__ != other.__dict__)
class Data(ComparableObject):
"""Object representing a Collection+JSON data object."""
def __init__(self, name, value=None, prompt=None, array=None, object=None):
self.name = name
self.value = value
self.array = array
self.object = object
self.prompt = prompt
property_count = 0
if value is not None: property_count = property_count+1
if array is not None: property_count = property_count+1
if object is not None: property_count = property_count+1
if property_count > 1:
raise ValueError('Data can only have one of the three properties.')
def __repr__(self):
data = "name='%s'" % self.name
if self.prompt is not None:
data += " prompt='%s'" % self.prompt
return "<Data: %s>" % data
def to_dict(self):
"""Return a dictionary representing a Data object."""
output = {
'name': self.name
}
if self.value is not None:
output['value'] = self.value
elif self.array is not None:
output['array'] = self.array
elif self.object is not None:
output['object'] = self.object
if self.prompt is not None:
output['prompt'] = self.prompt
return output
class Link(ComparableObject):
"""Object representing a Collection+JSON link object."""
def __init__(self, href, rel, name=None, render=None, prompt=None,
length=None, inline=None):
self.href = href
self.rel = rel
self.name = name
self.render = render
self.prompt = prompt
self.length = length
self.inline = inline
def __repr__(self):
data = "rel='%s'" % self.rel
if self.name:
data += " name='%s'" % self.name
if self.render:
data += " render='%s'" % self.render
if self.prompt:
data += " prompt='%s'" % self.prompt
if self.length:
data += " length='%s'" % self.length
if self.inline:
data += " inline='%s'" % self.inline
return "<Link: %s>" % data
def to_dict(self):
"""Return a dictionary representing a Link object."""
output = {
'href': self.href,
'rel': self.rel,
}
if self.name is not None:
output['name'] = self.name
if self.render is not None:
output['render'] = self.render
if self.prompt is not None:
output['prompt'] = self.prompt
if self.length is not None:
output['length'] = self.length
if self.inline is not None:
output['inline'] = self.inline
return output
class Error(ComparableObject):
"""Object representing a Collection+JSON error object."""
def __init__(self, code=None, message=None, title=None):
self.code = code
self.message = message
self.title = title
def __repr__(self):
data = ''
if self.code is not None:
data += " code='%s'" % self.code
if self.message is not None:
data += " message='%s'" % self.message
if self.title is not None:
data += " title='%s'" % self.title
return "<Error%s>" % data
def to_dict(self):
"""Return a dictionary representing the Error instance."""
output = {}
if self.code:
output['code'] = self.code
if self.message:
output['message'] = self.message
if self.title:
output['title'] = self.title
return output
class Template(ComparableObject):
"""Object representing a Collection+JSON template object."""
data = ArrayProperty(Data, "data")
@staticmethod
def from_json(data):
"""Return a template instance.
Convenience method for parsing 'write' responses,
which should only contain a template object.
This method parses a json string into a Template object.
Raises `ValueError` when no valid document is provided.
"""
try:
data = json.loads(data)
kwargs = data.get('template')
if not kwargs:
raise ValueError
except ValueError:
raise ValueError('Not valid Collection+JSON template data.')
template = Template(**kwargs)
return template
def __init__(self, data=None):
self.data = data
def __repr__(self):
data = [str(item.name) for item in self.data]
return "<Template: data=%s>" % data
def __getattr__(self, name):
return getattr(self.data, name)
@property
def properties(self):
"""Return a list of names that can be looked up on the template."""
return [item.name for item in self.data]
def to_dict(self):
"""Return a dictionary representing a Template object."""
return {
'template': self.data.to_dict()
}
class Array(ComparableObject, list):
"""Object representing a Collection+JSON array."""
def __init__(self, item_class, collection_name, items):
self.item_class = item_class
self.collection_name = collection_name
super(Array, self).__init__(self._build_items(items))
def _build_items(self, items):
result = []
for item in items:
if isinstance(item, self.item_class):
result.append(item)
elif isinstance(item, dict):
result.append(self.item_class(**item))
else:
raise ValueError("Invalid value for %s: %r" % (
self.item_class.__name__, item))
return result
def __eq__(self, other):
"""Return True if both instances are equivalent."""
return (super(Array, self).__eq__(other) and
list.__eq__(self, other))
def __ne__(self, other):
"""Return True if both instances are not equivalent."""
return (super(Array, self).__ne__(other) or
list.__ne__(self, other))
def __getattr__(self, name):
results = self.find(name=name)
if not results:
raise AttributeError
elif len(results) == 1:
results = results[0]
return results
def _matches(self, name=None, rel=None):
for item in self:
item_name = getattr(item, 'name', None)
item_rel = getattr(item, 'rel', None)
if name is not None and item_name == name and rel is None:
# only searching by name
yield item
elif rel is not None and item_rel == rel and name is None:
# only searching by rel
yield item
elif item_name == name and item_rel == rel:
# searching by name and rel
yield item
def find(self, name=None, rel=None):
"""Return a list of items in the array matching name and/or rel.
If both name and rel parameters are provided, returned items must match
both properties.
"""
return list(self._matches(name=name, rel=rel))
def get(self, name=None, rel=None):
"""Return the first item in the array matching name and/or rel.
If both name and rel parameters are provided, the returned item must
match both properties.
If no item is found, raises ValueError.
"""
try:
return next(self._matches(name=name, rel=rel))
except StopIteration:
raise ValueError('No matching item found.')
def to_dict(self):
"""Return a dictionary representing an Array object."""
if self.item_class is Collection:
data = {
item.href: item.to_dict() for item in self
}
else:
data = [
item.to_dict() for item in self
]
if self.collection_name is not None:
return {
self.collection_name: data
}
return data
class Item(ComparableObject):
"""Object representing a Collection+JSON item object."""
data = ArrayProperty(Data, "data")
links = ArrayProperty(Link, "links")
def __init__(self, href=None, data=None, links=None):
self.href = href
self.data = data
self.links = links
def __repr__(self):
return "<Item: href='%s'>" % self.href
def __getattr__(self, name):
return getattr(self.data, name)
@property
def properties(self):
"""Return a list of names that can be looked up on the item."""
return [item.name for item in self.data]
def to_dict(self):
"""Return a dictionary representing an Item object."""
output = {}
if self.href:
output['href'] = self.href
if self.data:
output.update(self.data.to_dict())
if self.links:
output.update(self.links.to_dict())
return output
class Query(ComparableObject):
"""Object representing a Collection+JSON query object."""
data = ArrayProperty(Data, "data")
def __init__(self, href, rel, name=None, prompt=None, data=None):
self.href = href
self.rel = rel
self.name = name
self.prompt = prompt
self.data = data
def __repr__(self):
data = "rel='%s'" % self.rel
if self.name:
data += " name='%s'" % self.name
if self.prompt:
data += " prompt='%s'" % self.prompt
return "<Query: %s>" % data
def to_dict(self):
"""Return a dictionary representing a Query object."""
output = {
'href': self.href,
'rel': self.rel,
}
if self.name is not None:
output['name'] = self.name
if self.prompt is not None:
output['prompt'] = self.prompt
if len(self.data):
output.update(self.data.to_dict())
return output
class Collection(ComparableObject):
"""Object representing a Collection+JSON document."""
@staticmethod
def from_json(data):
"""Return a Collection instance.
This method parses a json string into a Collection object.
Raises `ValueError` when no valid document is provided.
"""
try:
data = json.loads(data)
kwargs = data.get('collection')
if not kwargs:
raise ValueError
if 'inline' in kwargs and kwargs['inline']:
kwargs['inline'] = [Collection(**data.get('collection'))
for data in kwargs['inline'].values()]
except ValueError:
raise ValueError('Not a valid Collection+JSON document.')
collection = Collection(**kwargs)
return collection
def __new__(cls, *args, **kwargs):
cls.error = TypedProperty(Error, 'error')
cls.errors = DictProperty(Error, 'errors')
cls.template = TypedProperty(Template, 'template')
cls.items = ArrayProperty(Item, 'items')
cls.links = ArrayProperty(Link, 'links')
cls.inline = ArrayProperty(Collection, 'inline')
cls.queries = ArrayProperty(Query, 'queries')
return super(Collection, cls).__new__(cls)
def __init__(self, href, links=None, items=None, inline=None, queries=None,
template=None, error=None, errors=None, version='1.0'):
self.version = version
self.href = href
self.error = error
self.errors = errors
self.template = template
self.items = items
self.links = links
self.inline = inline
self.queries = queries
def __repr__(self):
return "<Collection: version='%s' href='%s'>" % (
self.version, self.href)
def __str__(self):
return json.dumps(self.to_dict())
def to_dict(self):
"""Return a dictionary representing a Collection object."""
output = {
'collection': {
'version': self.version,
'href': self.href,
}
}
if self.links:
output['collection'].update(self.links.to_dict())
if self.items:
output['collection'].update(self.items.to_dict())
if self.inline:
output['collection'].update(self.inline.to_dict())
if self.queries:
output['collection'].update(self.queries.to_dict())
if self.template:
output['collection'].update(self.template.to_dict())
if self.error:
output['collection'].update(self.error.to_dict())
if self.errors:
output['collection']['errors'] = {name : value.to_dict() for name, value in self.errors.items()}
return output
|
|
from __future__ import unicode_literals
import uuid
from jinja2 import Template
from moto.core import BaseBackend
from moto.core.utils import get_random_hex
class HealthCheck(object):
def __init__(self, health_check_id, health_check_args):
self.id = health_check_id
self.ip_address = health_check_args.get("ip_address")
self.port = health_check_args.get("port", 80)
self._type = health_check_args.get("type")
self.resource_path = health_check_args.get("resource_path")
self.fqdn = health_check_args.get("fqdn")
self.search_string = health_check_args.get("search_string")
self.request_interval = health_check_args.get("request_interval", 30)
self.failure_threshold = health_check_args.get("failure_threshold", 3)
@property
def physical_resource_id(self):
return self.id
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']['HealthCheckConfig']
health_check_args = {
"ip_address": properties.get('IPAddress'),
"port": properties.get('Port'),
"type": properties['Type'],
"resource_path": properties.get('ResourcePath'),
"fqdn": properties.get('FullyQualifiedDomainName'),
"search_string": properties.get('SearchString'),
"request_interval": properties.get('RequestInterval'),
"failure_threshold": properties.get('FailureThreshold'),
}
health_check = route53_backend.create_health_check(health_check_args)
return health_check
def to_xml(self):
template = Template("""<HealthCheck>
<Id>{{ health_check.id }}</Id>
<CallerReference>example.com 192.0.2.17</CallerReference>
<HealthCheckConfig>
<IPAddress>{{ health_check.ip_address }}</IPAddress>
<Port>{{ health_check.port }}</Port>
<Type>{{ health_check._type }}</Type>
<ResourcePath>{{ health_check.resource_path }}</ResourcePath>
<FullyQualifiedDomainName>{{ health_check.fqdn }}</FullyQualifiedDomainName>
<RequestInterval>{{ health_check.request_interval }}</RequestInterval>
<FailureThreshold>{{ health_check.failure_threshold }}</FailureThreshold>
{% if health_check.search_string %}
<SearchString>{{ health_check.search_string }}</SearchString>
{% endif %}
</HealthCheckConfig>
<HealthCheckVersion>1</HealthCheckVersion>
</HealthCheck>""")
return template.render(health_check=self)
class RecordSet(object):
def __init__(self, kwargs):
self.name = kwargs.get('Name')
self._type = kwargs.get('Type')
self.ttl = kwargs.get('TTL')
self.records = kwargs.get('ResourceRecords', [])
self.set_identifier = kwargs.get('SetIdentifier')
self.weight = kwargs.get('Weight')
self.region = kwargs.get('Region')
self.health_check = kwargs.get('HealthCheckId')
self.hosted_zone_name = kwargs.get('HostedZoneName')
self.hosted_zone_id = kwargs.get('HostedZoneId')
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
zone_name = properties.get("HostedZoneName")
if zone_name:
hosted_zone = route53_backend.get_hosted_zone_by_name(zone_name)
else:
hosted_zone = route53_backend.get_hosted_zone(properties["HostedZoneId"])
record_set = hosted_zone.add_rrset(properties)
return record_set
@classmethod
def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name):
cls.delete_from_cloudformation_json(original_resource.name, cloudformation_json, region_name)
return cls.create_from_cloudformation_json(new_resource_name, cloudformation_json, region_name)
@classmethod
def delete_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
# this will break if you changed the zone the record is in, unfortunately
properties = cloudformation_json['Properties']
zone_name = properties.get("HostedZoneName")
if zone_name:
hosted_zone = route53_backend.get_hosted_zone_by_name(zone_name)
else:
hosted_zone = route53_backend.get_hosted_zone(properties["HostedZoneId"])
try:
hosted_zone.delete_rrset_by_name(resource_name)
except KeyError:
pass
@property
def physical_resource_id(self):
return self.name
def to_xml(self):
template = Template("""<ResourceRecordSet>
<Name>{{ record_set.name }}</Name>
<Type>{{ record_set._type }}</Type>
{% if record_set.set_identifier %}
<SetIdentifier>{{ record_set.set_identifier }}</SetIdentifier>
{% endif %}
{% if record_set.weight %}
<Weight>{{ record_set.weight }}</Weight>
{% endif %}
{% if record_set.region %}
<Region>{{ record_set.region }}</Region>
{% endif %}
<TTL>{{ record_set.ttl }}</TTL>
<ResourceRecords>
{% for record in record_set.records %}
<ResourceRecord>
<Value>{{ record }}</Value>
</ResourceRecord>
{% endfor %}
</ResourceRecords>
{% if record_set.health_check %}
<HealthCheckId>{{ record_set.health_check }}</HealthCheckId>
{% endif %}
</ResourceRecordSet>""")
return template.render(record_set=self)
def delete(self, *args, **kwargs):
''' Not exposed as part of the Route 53 API - used for CloudFormation. args are ignored '''
hosted_zone = route53_backend.get_hosted_zone_by_name(self.hosted_zone_name)
if not hosted_zone:
hosted_zone = route53_backend.get_hosted_zone(self.hosted_zone_id)
hosted_zone.delete_rrset_by_name(self.name)
class FakeZone(object):
def __init__(self, name, id_, comment=None):
self.name = name
self.id = id_
if comment is not None:
self.comment = comment
self.private_zone = False
self.rrsets = []
def add_rrset(self, record_set):
record_set = RecordSet(record_set)
self.rrsets.append(record_set)
return record_set
def upsert_rrset(self, record_set):
new_rrset = RecordSet(record_set)
for i, rrset in enumerate(self.rrsets):
if rrset.name == new_rrset.name:
self.rrsets[i] = new_rrset
break
else:
self.rrsets.append(new_rrset)
return new_rrset
def delete_rrset_by_name(self, name):
self.rrsets = [record_set for record_set in self.rrsets if record_set.name != name]
def delete_rrset_by_id(self, set_identifier):
self.rrsets = [record_set for record_set in self.rrsets if record_set.set_identifier != set_identifier]
def get_record_sets(self, type_filter, name_filter):
record_sets = list(self.rrsets) # Copy the list
if type_filter:
record_sets = [record_set for record_set in record_sets if record_set._type == type_filter]
if name_filter:
record_sets = [record_set for record_set in record_sets if record_set.name == name_filter]
return record_sets
@property
def physical_resource_id(self):
return self.name
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
name = properties["Name"]
hosted_zone = route53_backend.create_hosted_zone(name)
return hosted_zone
class RecordSetGroup(object):
def __init__(self, hosted_zone_id, record_sets):
self.hosted_zone_id = hosted_zone_id
self.record_sets = record_sets
@property
def physical_resource_id(self):
return "arn:aws:route53:::hostedzone/{0}".format(self.hosted_zone_id)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
zone_name = properties["HostedZoneName"]
hosted_zone = route53_backend.get_hosted_zone_by_name(zone_name)
record_sets = properties["RecordSets"]
for record_set in record_sets:
hosted_zone.add_rrset(record_set)
record_set_group = RecordSetGroup(hosted_zone.id, record_sets)
return record_set_group
class Route53Backend(BaseBackend):
def __init__(self):
self.zones = {}
self.health_checks = {}
def create_hosted_zone(self, name, comment=None):
new_id = get_random_hex()
new_zone = FakeZone(name, new_id, comment=comment)
self.zones[new_id] = new_zone
return new_zone
def get_all_hosted_zones(self):
return self.zones.values()
def get_hosted_zone(self, id_):
return self.zones.get(id_)
def get_hosted_zone_by_name(self, name):
for zone in self.get_all_hosted_zones():
if zone.name == name:
return zone
def delete_hosted_zone(self, id_):
zone = self.zones.get(id_)
if zone:
del self.zones[id_]
return zone
def create_health_check(self, health_check_args):
health_check_id = str(uuid.uuid4())
health_check = HealthCheck(health_check_id, health_check_args)
self.health_checks[health_check_id] = health_check
return health_check
def get_health_checks(self):
return self.health_checks.values()
def delete_health_check(self, health_check_id):
return self.health_checks.pop(health_check_id, None)
route53_backend = Route53Backend()
|
|
"""Stuff to parse WAVE files.
Usage.
Reading WAVE files:
f = wave.open(file, 'r')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods read(), seek(), and close().
When the setpos() and rewind() methods are not used, the seek()
method is not necessary.
This returns an instance of a class with the following public methods:
getnchannels() -- returns number of audio channels (1 for
mono, 2 for stereo)
getsampwidth() -- returns sample width in bytes
getframerate() -- returns sampling frequency
getnframes() -- returns number of audio frames
getcomptype() -- returns compression type ('NONE' for linear samples)
getcompname() -- returns human-readable version of
compression type ('not compressed' linear samples)
getparams() -- returns a tuple consisting of all of the
above in the above order
getmarkers() -- returns None (for compatibility with the
aifc module)
getmark(id) -- raises an error since the mark does not
exist (for compatibility with the aifc module)
readframes(n) -- returns at most n frames of audio
rewind() -- rewind to the beginning of the audio stream
setpos(pos) -- seek to the specified position
tell() -- return the current position
close() -- close the instance (make it unusable)
The position returned by tell() and the position given to setpos()
are compatible and have nothing to do with the actual position in the
file.
The close() method is called automatically when the class instance
is destroyed.
Writing WAVE files:
f = wave.open(file, 'w')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods write(), tell(), seek(), and
close().
This returns an instance of a class with the following public methods:
setnchannels(n) -- set the number of channels
setsampwidth(n) -- set the sample width
setframerate(n) -- set the frame rate
setnframes(n) -- set the number of frames
setcomptype(type, name)
-- set the compression type and the
human-readable compression type
setparams(tuple)
-- set all parameters at once
tell() -- return current position in output file
writeframesraw(data)
-- write audio frames without pathing up the
file header
writeframes(data)
-- write audio frames and patch up the file header
close() -- patch up the file header and close the
output file
You should set the parameters before the first writeframesraw or
writeframes. The total number of frames does not need to be set,
but when it is set to the correct value, the header does not have to
be patched up.
It is best to first set all parameters, perhaps possibly the
compression type, and then write audio frames using writeframesraw.
When all frames have been written, either call writeframes('') or
close() to patch up the sizes in the header.
The close() method is called automatically when the class instance
is destroyed.
"""
import __builtin__
__all__ = ["open", "openfp", "Error"]
class Error(Exception):
pass
WAVE_FORMAT_PCM = 0x0001
_array_fmts = None, 'b', 'h', None, 'l'
# Determine endian-ness
import struct
if struct.pack("h", 1) == "\000\001":
big_endian = 1
else:
big_endian = 0
from chunk import Chunk
class Wave_read:
"""Variables used in this class:
These variables are available to the user though appropriate
methods of this class:
_file -- the open file with methods read(), close(), and seek()
set through the __init__() method
_nchannels -- the number of audio channels
available through the getnchannels() method
_nframes -- the number of audio frames
available through the getnframes() method
_sampwidth -- the number of bytes per audio sample
available through the getsampwidth() method
_framerate -- the sampling frequency
available through the getframerate() method
_comptype -- the AIFF-C compression type ('NONE' if AIFF)
available through the getcomptype() method
_compname -- the human-readable AIFF-C compression type
available through the getcomptype() method
_soundpos -- the position in the audio stream
available through the tell() method, set through the
setpos() method
These variables are used internally only:
_fmt_chunk_read -- 1 iff the FMT chunk has been read
_data_seek_needed -- 1 iff positioned correctly in audio
file for readframes()
_data_chunk -- instantiation of a chunk class for the DATA chunk
_framesize -- size of one frame in the file
"""
def initfp(self, file):
self._convert = None
self._soundpos = 0
self._file = Chunk(file, bigendian = 0)
if self._file.getname() != 'RIFF':
raise Error, 'file does not start with RIFF id'
if self._file.read(4) != 'WAVE':
raise Error, 'not a WAVE file'
self._fmt_chunk_read = 0
self._data_chunk = None
while 1:
self._data_seek_needed = 1
try:
chunk = Chunk(self._file, bigendian = 0)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == 'fmt ':
self._read_fmt_chunk(chunk)
self._fmt_chunk_read = 1
elif chunkname == 'data':
if not self._fmt_chunk_read:
raise Error, 'data chunk before fmt chunk'
self._data_chunk = chunk
self._nframes = chunk.chunksize / self._framesize
self._data_seek_needed = 0
break
chunk.skip()
if not self._fmt_chunk_read or not self._data_chunk:
raise Error, 'fmt chunk and/or data chunk missing'
def __init__(self, f):
self._i_opened_the_file = None
if type(f) == type(''):
f = __builtin__.open(f, 'rb')
self._i_opened_the_file = f
# else, assume it is an open file object already
self.initfp(f)
def __del__(self):
self.close()
#
# User visible methods.
#
def getfp(self):
return self._file
def rewind(self):
self._data_seek_needed = 1
self._soundpos = 0
def close(self):
if self._i_opened_the_file:
self._i_opened_the_file.close()
self._i_opened_the_file = None
self._file = None
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def getparams(self):
return self.getnchannels(), self.getsampwidth(), \
self.getframerate(), self.getnframes(), \
self.getcomptype(), self.getcompname()
def getmarkers(self):
return None
def getmark(self, id):
raise Error, 'no marks'
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error, 'position not in range'
self._soundpos = pos
self._data_seek_needed = 1
def readframes(self, nframes):
if self._data_seek_needed:
self._data_chunk.seek(0, 0)
pos = self._soundpos * self._framesize
if pos:
self._data_chunk.seek(pos, 0)
self._data_seek_needed = 0
if nframes == 0:
return ''
if self._sampwidth > 1 and big_endian:
# unfortunately the fromfile() method does not take
# something that only looks like a file object, so
# we have to reach into the innards of the chunk object
import array
chunk = self._data_chunk
data = array.array(_array_fmts[self._sampwidth])
nitems = nframes * self._nchannels
if nitems * self._sampwidth > chunk.chunksize - chunk.size_read:
nitems = (chunk.chunksize - chunk.size_read) / self._sampwidth
data.fromfile(chunk.file.file, nitems)
# "tell" data chunk how much was read
chunk.size_read = chunk.size_read + nitems * self._sampwidth
# do the same for the outermost chunk
chunk = chunk.file
chunk.size_read = chunk.size_read + nitems * self._sampwidth
data.byteswap()
data = data.tostring()
else:
data = self._data_chunk.read(nframes * self._framesize)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) / (self._nchannels * self._sampwidth)
return data
#
# Internal methods.
#
def _read_fmt_chunk(self, chunk):
wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack('<hhllh', chunk.read(14))
if wFormatTag == WAVE_FORMAT_PCM:
sampwidth = struct.unpack('<h', chunk.read(2))[0]
self._sampwidth = (sampwidth + 7) / 8
else:
raise Error, 'unknown format: ' + `wFormatTag`
self._framesize = self._nchannels * self._sampwidth
self._comptype = 'NONE'
self._compname = 'not compressed'
class Wave_write:
"""Variables used in this class:
These variables are user settable through appropriate methods
of this class:
_file -- the open file with methods write(), close(), tell(), seek()
set through the __init__() method
_comptype -- the AIFF-C compression type ('NONE' in AIFF)
set through the setcomptype() or setparams() method
_compname -- the human-readable AIFF-C compression type
set through the setcomptype() or setparams() method
_nchannels -- the number of audio channels
set through the setnchannels() or setparams() method
_sampwidth -- the number of bytes per audio sample
set through the setsampwidth() or setparams() method
_framerate -- the sampling frequency
set through the setframerate() or setparams() method
_nframes -- the number of audio frames written to the header
set through the setnframes() or setparams() method
These variables are used internally only:
_datalength -- the size of the audio samples written to the header
_nframeswritten -- the number of frames actually written
_datawritten -- the size of the audio samples actually written
"""
def __init__(self, f):
self._i_opened_the_file = None
if type(f) == type(''):
f = __builtin__.open(f, 'wb')
self._i_opened_the_file = f
self.initfp(f)
def initfp(self, file):
self._file = file
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
def __del__(self):
self.close()
#
# User visible methods.
#
def setnchannels(self, nchannels):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if nchannels < 1:
raise Error, 'bad # of channels'
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error, 'number of channels not set'
return self._nchannels
def setsampwidth(self, sampwidth):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if sampwidth < 1 or sampwidth > 4:
raise Error, 'bad sample width'
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error, 'sample width not set'
return self._sampwidth
def setframerate(self, framerate):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if framerate <= 0:
raise Error, 'bad frame rate'
self._framerate = framerate
def getframerate(self):
if not self._framerate:
raise Error, 'frame rate not set'
return self._framerate
def setnframes(self, nframes):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
if comptype not in ('NONE',):
raise Error, 'unsupported compression type'
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def setparams(self, (nchannels, sampwidth, framerate, nframes, comptype, compname)):
if self._datawritten:
raise Error, 'cannot change parameters after starting to write'
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error, 'not all parameters set'
return self._nchannels, self._sampwidth, self._framerate, \
self._nframes, self._comptype, self._compname
def setmark(self, id, pos, name):
raise Error, 'setmark() not supported'
def getmark(self, id):
raise Error, 'no marks'
def getmarkers(self):
return None
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
self._ensure_header_written(len(data))
nframes = len(data) / (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
if self._sampwidth > 1 and big_endian:
import array
data = array.array(_array_fmts[self._sampwidth], data)
data.byteswap()
data.tofile(self._file)
self._datawritten = self._datawritten + len(data) * self._sampwidth
else:
self._file.write(data)
self._datawritten = self._datawritten + len(data)
self._nframeswritten = self._nframeswritten + nframes
def writeframes(self, data):
self.writeframesraw(data)
if self._datalength != self._datawritten:
self._patchheader()
def close(self):
if self._file:
self._ensure_header_written(0)
if self._datalength != self._datawritten:
self._patchheader()
self._file.flush()
self._file = None
if self._i_opened_the_file:
self._i_opened_the_file.close()
self._i_opened_the_file = None
#
# Internal methods.
#
def _ensure_header_written(self, datasize):
if not self._datawritten:
if not self._nchannels:
raise Error, '# channels not specified'
if not self._sampwidth:
raise Error, 'sample width not specified'
if not self._framerate:
raise Error, 'sampling rate not specified'
self._write_header(datasize)
def _write_header(self, initlength):
self._file.write('RIFF')
if not self._nframes:
self._nframes = initlength / (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
self._form_length_pos = self._file.tell()
self._file.write(struct.pack('<l4s4slhhllhh4s',
36 + self._datalength, 'WAVE', 'fmt ', 16,
WAVE_FORMAT_PCM, self._nchannels, self._framerate,
self._nchannels * self._framerate * self._sampwidth,
self._nchannels * self._sampwidth,
self._sampwidth * 8, 'data'))
self._data_length_pos = self._file.tell()
self._file.write(struct.pack('<l', self._datalength))
def _patchheader(self):
if self._datawritten == self._datalength:
return
curpos = self._file.tell()
self._file.seek(self._form_length_pos, 0)
self._file.write(struct.pack('<l', 36 + self._datawritten))
self._file.seek(self._data_length_pos, 0)
self._file.write(struct.pack('<l', self._datawritten))
self._file.seek(curpos, 0)
self._datalength = self._datawritten
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Wave_read(f)
elif mode in ('w', 'wb'):
return Wave_write(f)
else:
raise Error, "mode must be 'r', 'rb', 'w', or 'wb'"
openfp = open # B/W compatibility
|
|
"""Metadata Parsers.
This module contains metadata parsers, that is, parsers that pull out the
metadata and add it to a TarsqiDocument. The only requirements on each parser is
that it defines an __init__() method that takes a dictionary of options and a
parse() method that takes a TarsqiDocument instance.
Current parsers only deal with the DCT.
"""
from __future__ import absolute_import
import re, time, os, sqlite3
import utilities.logger as logger
from library.main import LIBRARY
class MetadataParser(object):
"""This is the minimal metadata parser that is used as a default. It selects
the DCT from all available sources and picks one of them, or it uses today's
date if no DCT's are available. Subclasses should override the get_dct()
method to define specific DCT extraction methods for the document source."""
def __init__(self, options):
"""At the moment, initialization only uses the --dct option if it is
present, but this could change. Note that the TarsqiDocument does not
exist yet when the MetadataParser is initialized."""
self.options = options
self.tarsqidoc = None # added in by the parse() method
def parse(self, tarsqidoc):
"""Adds metadata to the TarsqiDocument. The only thing it adds to the
metadata dictionary is the DCT, which is set to today."""
self.tarsqidoc = tarsqidoc
self.tarsqidoc.metadata['dct'] = self.get_dct()
self._moderate_dct_vals()
self._import_processing_steps()
def _moderate_dct_vals(self):
"""There are five places where a DCT can be expressed: the DCT handed in
with the --dct option or defined in the config file, the DCT from the
metadata on the TarsqiDocument, the DCT from the metadata on the
SourceDoc, DCTs from the TagRepository on the TarsqiDocument and DCTs
from the TagRepository on the SourceDoc. The first three are single
values or None, the other two are lists of any length. The order of
these five is significant in that a DCT earlier on the list if given
precedence over a DCT later on the list. Collects all the DCT values and
picks the very first one, or today's date if no DCTs are available. Logs
a warning if the DCTs do not all have the same value."""
dcts = []
for dct_val in [self.tarsqidoc.options.dct,
self.tarsqidoc.metadata.get('dct'),
self.tarsqidoc.sourcedoc.metadata.get('dct'),
_get_dct_values(self.tarsqidoc.sourcedoc.tags),
_get_dct_values(self.tarsqidoc.tags)]:
if dct_val is None:
# this is the case where there is no DCT in options or metadata
continue
elif isinstance(dct_val, list):
dcts.extend(dct_val)
else:
dcts.append(dct_val)
if len(set(dcts)) > 1:
logger.warn("WARNING: more than one DCT value available")
dct = dcts[0] if dcts else _get_today()
self.tarsqidoc.metadata['dct'] = dct
def _import_processing_steps(self):
"""The processing steps were parsed by the metadata parser for the TTK
format, here we just import them."""
# steps are imported after the latest processing step was added, so put
# the imported steps at the beginning of the list
steps = self.tarsqidoc.sourcedoc.metadata.get('processing_steps', [])
steps.extend(self.tarsqidoc.metadata['processing_steps'])
self.tarsqidoc.metadata['processing_steps'] = steps
def get_dct(self):
return None
def _get_source(self):
"""A convenience method to lift the SourceDoc out of the tarsqi
instance."""
return self.tarsqidoc.sourcedoc
def _get_tag_content(self, tagname):
"""Return the text content of the first tag with name tagname, return
None if there is no such tag."""
try:
tag = self._get_source().tags.find_tags(tagname)[0]
content = self._get_source().text[tag.begin:tag.end].strip()
return content
except IndexError:
logger.warn("Cannot get the %s tag in this document" % tagname)
return None
class MetadataParserTTK(MetadataParser):
"""The metadata parser for the ttk format. For now this one adds
nothing to the default metadata parser."""
class MetadataParserText(MetadataParser):
"""The metadata parser for the text format. For now this one adds
nothing to the default metadata parser."""
class MetadataParserTimebank(MetadataParser):
"""The parser for Timebank documents. All it does is to overwrite the
get_dct() method."""
def get_dct(self):
"""Extracts the document creation time, and returns it as a string of
the form YYYYMMDD. Depending on the source, the DCT can be found in one
of the following tags: DOCNO, DATE_TIME, PUBDATE or FILEID."""
result = self._get_doc_source()
if result is None:
# dct defaults to today if we cannot find the DOCNO tag in the
# document
return _get_today()
source_identifier, content = result
if source_identifier in ('ABC', 'CNN', 'PRI', 'VOA'):
return content[3:11]
elif source_identifier == 'AP':
dct = self._parse_tag_content("(?:AP-NR-)?(\d+)-(\d+)-(\d+)",
'FILEID')
# the DCT format is YYYYMMDD or YYMMDD
return dct if len(dct) == 8 else '19' + dct
elif source_identifier in ('APW', 'NYT'):
return self._parse_tag_content("(\d+)/(\d+)/(\d+)", 'DATE_TIME')
elif source_identifier == 'SJMN':
pubdate_content = self._get_tag_content('PUBDATE')
return '19' + pubdate_content
elif source_identifier == 'WSJ':
return '19' + content[3:9]
elif source_identifier in ('ea', 'ed'):
return '19' + content[2:8]
def _get_doc_source(self):
"""Return the name of the content provider as well as the content of the
DOCNO tag that has that information."""
content = self._get_tag_content('DOCNO')
content = str(content) # in case the above returned None
for source_identifier in ('ABC', 'APW', 'AP', 'CNN', 'NYT', 'PRI',
'SJMN', 'VOA', 'WSJ', 'ea', 'ed'):
if content.startswith(source_identifier):
return source_identifier, content
logger.warn("Could not determine document source from DOCNO tag")
return None
def _parse_tag_content(self, regexpr, tagname):
"""Return the DCT part of the tag content of tagname, requires a reqular
expression as one of the arguments."""
content_string = self._get_tag_content(tagname)
result = re.compile(regexpr).match(content_string)
if result:
(month, day, year) = result.groups()
return "%s%s%s" % (year, month, day)
else:
logger.warn("Could not get date from %s tag" % tagname)
return _get_today()
class MetadataParserATEE(MetadataParser):
"""The parser for ATEE document."""
def get_dct(self):
"""All ATEE documents have a DATE tag with a value attribute, the value
of that attribute is returned."""
date_tag = self.tarsqidoc.sourcedoc.tags.find_tag('DATE')
return date_tag.attrs['value']
class MetadataParserRTE3(MetadataParser):
"""The parser for RTE3 documents, no differences with the default parser."""
class MetadataParserDB(MetadataParser):
"""A minimal example parser for cases where the DCT is retrieved from a
database. It is identical to MetadataParser except for how it gets the
DCT. This is done by lookup in a database. This here is the simplest
possible case, and it is quite inefficient. It assumes there is an sqlite
database at 'TTK_ROOT/data/in/va/dct.sqlite' which was created as
follows:
$ sqlite3 dct.sqlite
sqlite> create table dct (filename TEXT, dct TEXT)
sqlite> insert into dct values ("test.xml", "1999-12-31");
The get_dct() method uses this database and the location of the database is
specified in the config.txt file. The first use case for this were VA
documents where the DCT was stored externally. To see this in action run
$ python tarsqi.py --source-format=db data/in/va/test.xml out.xml
"""
def get_dct(self):
fname = self._get_source().filename
fname = os.path.basename(fname)
db_location = self.options.getopt('dct-database')
db_connection = sqlite3.connect(db_location)
db_cursor = db_connection.cursor()
db_cursor.execute('SELECT dct FROM dct WHERE filename=?', (fname,))
dct = db_cursor.fetchone()[0]
return dct
def _get_today():
"""Return today's date in YYYYMMDD format."""
return time.strftime("%Y%m%d", time.localtime())
def _get_dct_values(tag_repository):
"""Return the list of nromalized values from all TIMEX3 tags in the
TagRepository."""
timexes = [t for t in tag_repository.find_tags('TIMEX3')
if t.attrs.get('functionInDocument') == 'CREATION_TIME']
values = [t.attrs.get(LIBRARY.timeml.VALUE) for t in timexes]
return values
|
|
# -*- coding: utf-8 -*-
# Copyright 2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import itertools
import logging
import shlex
import six
from .util import separate_groups
class ClusterBalancer(object):
"""Interface that is used to implement any cluster partition balancing approach.
:param cluster_topology: The ClusterTopology object that should be acted on.
:param args: The program arguments.
"""
def __init__(self, cluster_topology, args=None):
self.cluster_topology = cluster_topology
self.args = args
if hasattr(args, 'balancer_args'):
self.parse_args(list(itertools.chain.from_iterable(
shlex.split(arg) for arg in args.balancer_args
)))
else:
self.parse_args([])
self.log = logging.getLogger(self.__class__.__name__)
def parse_args(self, _balancer_args):
"""Parse partition measurer command line arguments.
:param _balancer_args: The list of arguments as strings.
"""
pass
def rebalance(self):
"""Rebalance partitions across the brokers in the cluster."""
raise NotImplementedError("Implement in subclass")
def decommission_brokers(self, broker_ids):
"""Decommission a broker and balance all of its partitions across the cluster.
:param broker_ids: A list of strings representing valid broker ids in the cluster.
:raises InvalidBrokerIdError: A broker id is invalid.
"""
raise NotImplementedError("Implement in subclass")
def add_replica(self, partition_name, count=1):
"""Add replicas of a partition to the cluster, while maintaining the cluster's balance.
:param partition_name: (topic_id, partition_id) of the partition to add replicas of.
:param count: The number of replicas to add.
:raises InvalidReplicationFactorError: The resulting replication factor is invalid.
"""
raise NotImplementedError("Implement in subclass")
def remove_replica(self, partition_name, osr_broker_ids, count=1):
"""Remove replicas of a partition from the cluster, while maintaining the cluster's balance.
:param partition_name: (topic_id, partition_id) of the partition to remove replicas of.
:param osr_broker_ids: A set of the partition's out-of-sync broker ids.
:param count: The number of replicas to remove.
:raises InvalidReplicationFactorError: The resulting replication factor is invalid.
"""
raise NotImplementedError("Implement in subclass")
def score(self):
"""Give the current cluster topology a numerical score.
The score should be relative to other possible cluster assignments.
A result of None signifies that this ClusterBalancer cannot assign a score.
"""
return None
def rebalance_replicas(
self,
max_movement_count=None,
max_movement_size=None,
):
"""Balance replicas across replication-groups.
:param max_movement_count: The maximum number of partitions to move.
:param max_movement_size: The maximum total size of the partitions to move.
:returns: A 2-tuple whose first element is the number of partitions moved
and whose second element is the total size of the partitions moved.
"""
movement_count = 0
movement_size = 0
for partition in six.itervalues(self.cluster_topology.partitions):
count, size = self._rebalance_partition_replicas(
partition,
None if not max_movement_count
else max_movement_count - movement_count,
None if not max_movement_size
else max_movement_size - movement_size,
)
movement_count += count
movement_size += size
return movement_count, movement_size
def _rebalance_partition_replicas(
self,
partition,
max_movement_count=None,
max_movement_size=None,
):
"""Rebalance replication groups for given partition."""
# Separate replication-groups into under and over replicated
total = partition.replication_factor
over_replicated_rgs, under_replicated_rgs = separate_groups(
list(self.cluster_topology.rgs.values()),
lambda g: g.count_replica(partition),
total,
)
# Move replicas from over-replicated to under-replicated groups
movement_count = 0
movement_size = 0
while (
under_replicated_rgs and over_replicated_rgs
) and (
max_movement_size is None or
movement_size + partition.size <= max_movement_size
) and (
max_movement_count is None or
movement_count < max_movement_count
):
# Decide source and destination group
rg_source = self._elect_source_replication_group(
over_replicated_rgs,
partition,
)
rg_destination = self._elect_dest_replication_group(
rg_source.count_replica(partition),
under_replicated_rgs,
partition,
)
if rg_source and rg_destination:
# Actual movement of partition
self.log.debug(
'Moving partition {p_name} from replication-group '
'{rg_source} to replication-group {rg_dest}'.format(
p_name=partition.name,
rg_source=rg_source.id,
rg_dest=rg_destination.id,
),
)
rg_source.move_partition(rg_destination, partition)
movement_count += 1
movement_size += partition.size
else:
# Groups balanced or cannot be balanced further
break
# Re-compute under and over-replicated replication-groups
over_replicated_rgs, under_replicated_rgs = separate_groups(
list(self.cluster_topology.rgs.values()),
lambda g: g.count_replica(partition),
total,
)
return movement_count, movement_size
def _elect_source_replication_group(
self,
over_replicated_rgs,
partition,
):
"""Decide source replication-group based as group with highest replica
count.
"""
return max(
over_replicated_rgs,
key=lambda rg: rg.count_replica(partition),
)
def _elect_dest_replication_group(
self,
replica_count_source,
under_replicated_rgs,
partition,
):
"""Decide destination replication-group based on replica-count."""
min_replicated_rg = min(
under_replicated_rgs,
key=lambda rg: rg.count_replica(partition),
)
# Locate under-replicated replication-group with lesser
# replica count than source replication-group
if min_replicated_rg.count_replica(partition) < replica_count_source - 1:
return min_replicated_rg
return None
|
|
import os
import shutil
import sqlite3
import tempfile
import urlparse
import urllib2
import subprocess
import logging
import argparse
import csv
import sys
this_dir = os.path.realpath(os.path.dirname(__file__))
sys.path.append(os.path.realpath(os.path.join(os.pardir, os.pardir)))
from geodata.encoding import safe_decode
from geodata.geonames.paths import *
from geodata.file_utils import *
from geodata.log import *
from itertools import islice, chain
log_to_file(sys.stderr)
logger = logging.getLogger('geonames.sqlite')
BASE_URL = 'http://download.geonames.org/export/'
DUMP_URL = urlparse.urljoin(BASE_URL, 'dump/')
ALL_COUNTRIES_ZIP_FILE = 'allCountries.zip'
HIERARCHY_ZIP_FILE = 'hierarchy.zip'
ALTERNATE_NAMES_ZIP_FILE = 'alternateNames.zip'
ZIP_URL = urlparse.urljoin(BASE_URL, 'zip/')
GEONAMES_DUMP_FILES = (ALL_COUNTRIES_ZIP_FILE,
HIERARCHY_ZIP_FILE,
ALTERNATE_NAMES_ZIP_FILE)
# base_url, local_dir, is_gzipped, local_filename
GEONAMES_FILES = [(DUMP_URL, '', True, ALL_COUNTRIES_ZIP_FILE),
(DUMP_URL, '', True, HIERARCHY_ZIP_FILE),
(DUMP_URL, '', True, ALTERNATE_NAMES_ZIP_FILE),
(ZIP_URL, 'zip', True, ALL_COUNTRIES_ZIP_FILE),
]
def download_file(url, dest):
logger.info('Downloading file from {}'.format(url))
subprocess.check_call(['wget', url, '-O', dest])
def admin_ddl(admin_level):
columns = ['country_code TEXT'] + \
['admin{}_code TEXT'.format(i)
for i in xrange(1, admin_level)]
create = '''
CREATE TABLE admin{level}_codes (
geonames_id INT,
code TEXT,
name TEXT,
{fields}
)'''.format(level=admin_level,
fields=''',
'''.join(columns))
indices = (
'''CREATE INDEX admin{}_code_index ON
admin{}_codes (code)'''.format(admin_level, admin_level),
'''CREATE INDEX admin{}_gn_id_index ON
admin{}_codes (geonames_id)'''.format(admin_level, admin_level),
)
return (create, ) + indices
geonames_ddl = {
'geonames': (
'''CREATE TABLE geonames (
geonames_id INT PRIMARY KEY,
name TEXT,
ascii_name TEXT,
alternate_names TEXT,
latitude DOUBLE,
longitude DOUBLE,
feature_class TEXT,
feature_code TEXT,
country_code TEXT,
cc2 TEXT,
admin1_code TEXT,
admin2_code TEXT,
admin3_code TEXT,
admin4_code TEXT,
population LONG DEFAULT 0,
elevation INT,
dem INT,
timezone TEXT,
modification_date TEXT)''',
'''CREATE INDEX feature_code ON
geonames (feature_code)''',
'''CREATE INDEX country_code ON
geonames (country_code)''',
'''CREATE INDEX admin_codes ON
geonames (country_code, admin1_code, admin2_code, admin3_code, admin4_code)''',
),
'alternate_names': (
'''CREATE TABLE alternate_names (
alternate_name_id INT PRIMARY KEY,
geonames_id INT,
iso_language TEXT,
alternate_name TEXT,
is_preferred_name BOOLEAN DEFAULT 0,
is_short_name BOOLEAN DEFAULT 0,
is_colloquial BOOLEAN DEFAULT 0,
is_historic BOOLEAN DEFAULT 0)''',
'''CREATE INDEX geonames_id_index ON
alternate_names (geonames_id)''',
'''CREATE INDEX geonames_id_alt_name_index ON
alternate_names(geonames_id, alternate_name)''',
),
'hierarchy': (
'''CREATE TABLE hierarchy (
parent_id INT,
child_id INT,
type TEXT
);''',
'''CREATE INDEX parent_child_index ON
hierarchy (parent_id, child_id)''',
'''CREATE INDEX child_parent_index ON
hierarchy (child_id, parent_id)''',
),
'postal_codes': (
'''CREATE TABLE postal_codes (
country_code TEXT,
postal_code TEXT,
place_name TEXT,
admin1 TEXT,
admin1_code TEXT,
admin2 TEXT,
admin2_code TEXT,
admin3 TEXT,
admin3_code TEXT,
latitude DOUBLE,
longitude DOUBLE,
accuracy INT
)''',
'''CREATE INDEX post_code_index ON
postal_codes (country_code, postal_code)''',
'''CREATE INDEX postal_code_admins ON
postal_codes (country_code, admin1_code, admin2_code, admin3_code)''',
),
'admin1_codes': admin_ddl(1),
'admin2_codes': admin_ddl(2),
'admin3_codes': admin_ddl(3),
'admin4_codes': admin_ddl(4),
}
geonames_file_table_map = {
('', ALL_COUNTRIES_ZIP_FILE): 'geonames',
('', ALTERNATE_NAMES_ZIP_FILE): 'alternate_names',
('', HIERARCHY_ZIP_FILE): 'hierarchy',
('zip', ALL_COUNTRIES_ZIP_FILE): 'postal_codes',
}
country_codes_create_table = (
'drop table if exists country_codes',
'''
create table country_codes as
select distinct country_code from geonames
where feature_code in ('PCL', 'PCLI', 'PCLIX', 'PCLD', 'PCLF', 'PCLS', 'TERR')
''',
)
proper_countries_create_table = (
'drop table if exists proper_countries',
'''
create table proper_countries as
select * from geonames
where feature_code in ('PCL', 'PCLI', 'PCLIX', 'PCLD', 'PCLF', 'PCLS')
and country_code in (select country_code from country_codes)
''',
)
territories_create_table = (
'drop table if exists territories',
'''
create table territories as
select * from geonames where feature_code = 'TERR'
and country_code not in (select country_code from proper_countries);
''',
)
countries_create_table = (
'drop table if exists countries',
'''
create table countries as
select * from proper_countries
union
select * from territories;
''',
'create index country_geonames_id on countries (geonames_id)',
'create index conntry_country_code on countries (country_code)',
)
country_alises_create_table = (
'drop table if exists country_aliases',
'''
create table country_aliases as
select name, country_code
from countries
union
select alternate_name, country_code
from alternate_names an
join countries c
on c.geonames_id = an.geonames_id
where alternate_name != ''
and iso_language not in ('doi','faac','iata',
'icao','link','post','tcid')
'''
)
country_table_create_statements = list(chain(country_codes_create_table,
proper_countries_create_table,
territories_create_table,
countries_create_table,
country_alises_create_table))
def create_table(conn, table):
cursor = conn.cursor()
create_statements = geonames_ddl[table]
cursor.execute('DROP TABLE IF EXISTS {}'.format(table))
for statement in create_statements:
cursor.execute(statement)
conn.commit()
def batch_iter(iterable, batch_size):
source_iter = iter(iterable)
while True:
batch = list(islice(source_iter, batch_size))
if len(batch) > 0:
yield batch
else:
return
def populate_admin_table(conn, admin_level):
logging.info('Doing admin level {}'.format(admin_level))
columns = ['geonames_id',
'admin{}_code'.format(admin_level),
'name',
'country_code']
columns.extend(['admin{}_code'.format(i)
for i in xrange(1, admin_level)])
admin_insert_statement = '''
insert into "admin{}_codes"
select {}
from geonames
where feature_code = "ADM{}"
'''.format(admin_level, ','.join(columns), admin_level)
conn.execute(admin_insert_statement)
conn.commit()
logging.info('Done with admin level {}'.format(admin_level))
def import_geonames_table(conn, table, f, batch_size=2000):
# escape the brackets around the values format string so we can use later
statement = 'INSERT INTO "{}" VALUES ({{}})'.format(table)
cursor = conn.cursor()
for i, batch in enumerate(batch_iter(f, batch_size)):
num_cols = len(batch[0])
cursor.executemany(statement.format(','.join(['?'] * num_cols)), batch)
conn.commit()
cursor = conn.cursor()
logging.info('imported {} batches ({} records)'.format(i + 1, (i + 1) * batch_size))
cursor.close()
def create_geonames_sqlite_db(temp_dir, db_file=DEFAULT_GEONAMES_DB_PATH):
conn = sqlite3.connect(db_file)
logging.info('Created database at {}'.format(db_file))
for url, directory, is_gzipped, filename in GEONAMES_FILES:
table = geonames_file_table_map[(directory, filename)]
create_table(conn, table)
full_url = urlparse.urljoin(url, filename)
dest_dir = os.path.join(temp_dir, directory)
ensure_dir(dest_dir)
dest_file = os.path.join(dest_dir, filename)
download_file(full_url, dest_file)
if is_gzipped:
unzip_file(dest_file, dest_dir)
filename = dest_file.replace('.zip', '.txt')
reader = csv.reader(open(filename), delimiter='\t', quotechar=None)
lines = (map(safe_decode, line) for line in reader)
import_geonames_table(conn, table, lines)
logging.info('Creating countries tables')
for statement in country_table_create_statements:
conn.execute(statement)
conn.commit()
logging.info('Creating admin tables')
for admin_level in xrange(1, 5):
create_table(conn, 'admin{}_codes'.format(admin_level))
populate_admin_table(conn, admin_level)
conn.close()
if __name__ == '__main__':
# Handle argument parsing here
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--temp-dir',
default=tempfile.gettempdir(),
help='Temporary work directory')
parser.add_argument('-o', '--out',
default=DEFAULT_GEONAMES_DB_PATH,
help='SQLite3 db filename')
args = parser.parse_args()
create_geonames_sqlite_db(args.temp_dir, args.out)
|
|
import os
from urllib.parse import urljoin, urlparse
import click
import requests
from . import messages
from .utils import create_temp_dir, get_user_agent
class SingleHostSession(requests.Session):
def __init__(self, host, **kwargs):
super(SingleHostSession, self).__init__()
self.debug = kwargs.pop("debug", False)
self.host = host.rstrip("/")
default_proxies = {}
try:
default_proxies["http"] = os.environ["HTTP_PROXY"]
except KeyError:
pass
try:
default_proxies["https"] = os.environ["HTTPS_PROXY"]
except KeyError:
pass
if default_proxies:
default_proxies.update(kwargs.get("proxies", {}))
kwargs["proxies"] = default_proxies
for key, value in kwargs.items():
setattr(self, key, value)
def request(self, method, url, v3_compatibilty=False, *args, **kwargs):
url = urljoin(self.host, url)
if v3_compatibilty:
# V3 compatibility hack
url = url.replace("control", "api", 1)
return super(SingleHostSession, self).request(
method, url, *args, **kwargs
)
class APIRequestError(click.ClickException):
def show(self, file=None):
click.secho(
"\nError: {}".format(self.format_message()),
file=file,
err=True,
fg="red",
)
class APIRequest(object):
network_exception_message = messages.NETWORK_ERROR_MESSAGE
default_error_message = messages.SERVER_ERROR
response_code_error_map = {
requests.codes.forbidden: messages.AUTH_INVALID_TOKEN,
requests.codes.unauthorized: messages.AUTH_INVALID_TOKEN,
requests.codes.not_found: messages.RESOURCE_NOT_FOUND_ANONYMOUS,
requests.codes.bad_request: messages.BAD_REQUEST,
}
method = "GET"
url = None
default_headers = {"User-Agent": get_user_agent()}
headers = {}
def __init__(
self,
session,
url=None,
url_kwargs=None,
data=None,
files=None,
*args,
**kwargs
):
self.session = session
if url:
self.url = url
self.url_kwargs = url_kwargs or {}
self.data = data or {}
self.files = files or {}
def __call__(self, *args, **kwargs):
return self.request(*args, **kwargs)
def get_url(self):
return self.url.format(**self.url_kwargs)
def get_login(self):
"""Tries to get the login name for the current request"""
# import done here to prevent circular import
from . import cloud
netrc = cloud.WritableNetRC()
host = urlparse(self.session.host).hostname
data = netrc.hosts.get(host)
if data:
return data[0]
return False
def get_error_code_map(self, login=None):
# if a login is provided, change the errormessages accordingly
if login:
self.response_code_error_map[
requests.codes.not_found
] = messages.RESOURCE_NOT_FOUND.format(login=login)
return self.response_code_error_map
def get_headers(self):
headers = self.default_headers.copy()
headers.update(self.headers)
return headers
def request(self, *args, **kwargs):
try:
response = self.session.request(
self.method,
self.get_url(),
data=self.data,
files=self.files,
headers=self.get_headers(),
*args,
**kwargs
)
except (
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
) as e:
raise click.ClickException(messages.NETWORK_ERROR_MESSAGE + str(e))
return self.verify(response)
def verify(self, response):
if not response.ok:
error_msg = self.get_error_code_map(self.get_login()).get(
response.status_code, self.default_error_message
)
response_content = response.text
if not self.session.debug:
response_content = response_content[:300]
if response_content:
error_msg = "{}\n\n{}".format(error_msg, response_content)
raise APIRequestError(error_msg)
return self.process(response)
def process(self, response):
return response.json()
class RawResponse(object):
def process(self, response):
return response
class TextResponse(object):
def process(self, response):
return response.text
class JsonResponse(object):
def process(self, response):
return response.json()
class DjangoFormMixin(object):
success_message = "Request successful"
def verify(self, response):
if response.ok:
return self.success_message
elif response.status_code == requests.codes.bad_request:
formatted = (
"There was an error submitting your request:\n"
"-------------------------------------------\n\n"
)
for field, errors in response.json().items():
formatted += " - {}\n".format(field)
for error in errors:
formatted += " - {}\n".format(error)
formatted += "\n"
return formatted.strip("\n")
return super(DjangoFormMixin, self).verify(response)
class FileResponse(object):
def __init__(self, *args, **kwargs):
self.filename = kwargs.pop("filename", None)
self.directory = kwargs.pop("directory", None)
super(FileResponse, self).__init__(*args, **kwargs)
def process(self, response):
dump_path = os.path.join(
self.directory or create_temp_dir(), self.filename or "data.tar.gz"
)
with open(dump_path, "wb") as f:
for chunk in response.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
return dump_path
def request(self, *args, **kwargs):
kwargs["stream"] = True
return super(FileResponse, self).request(*args, **kwargs)
class LoginRequest(APIRequest):
default_error_message = messages.AUTH_SERVER_ERROR
url = "/api/v1/login-with-token/"
method = "POST"
class LoginStatusRequest(APIRequest):
url = "/track/"
method = "GET"
class ProjectListRequest(APIRequest):
url = "/api/v1/user-websites/"
class ProjectDetailRequest(APIRequest):
url = "/api/v1/website/{website_id}/detail/"
class DeployProjectProgressRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/deploy/"
method = "GET"
class DeployLogRequest(JsonResponse, APIRequest):
url = "api/v1/website/{website_id}/deploy-log/{stage}/"
method = "GET"
class DeployProjectRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/deploy/"
method = "POST"
class RegisterAddonRequest(DjangoFormMixin, JsonResponse, APIRequest):
url = "/api/v1/addon/register/"
method = "POST"
success_message = "Addon successfully registered"
class UploadAddonRequest(TextResponse, APIRequest):
url = "/api/v1/apps/"
method = "POST"
class UploadBoilerplateRequest(TextResponse, APIRequest):
url = "/api/v1/boilerplates/"
method = "POST"
class ProjectLockQueryRequest(APIRequest):
url = "/api/v1/website/{website_id}/lock/"
method = "GET"
def process(self, response):
return response.json("is_locked")
class ProjectLockRequest(TextResponse, APIRequest):
url = "/api/v1/website/{website_id}/lock/"
method = "PUT"
class ProjectUnlockRequest(TextResponse, APIRequest):
url = "/api/v1/website/{website_id}/lock/"
method = "DELETE"
class SlugToIDRequest(APIRequest):
url = "/api/v1/slug-to-id/{website_slug}/"
def process(self, response):
return response.json().get("id")
class IDToSlugRequest(APIRequest):
url = "/api/v1/id-to-slug/{website_id}/"
def process(self, response):
return response.json().get("slug")
class DownloadBackupRequest(FileResponse, APIRequest):
url = "/api/v1/workspace/{website_slug}/download/backup/"
headers = {"accept": "application/x-tar-gz"}
def verify(self, response):
if response.status_code == requests.codes.not_found:
# no backups yet, ignore
return None
return super(DownloadBackupRequest, self).verify(response)
# Download DB
class DownloadDBRequestRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/download/db/request/"
method = "POST"
class DownloadDBProgressRequest(JsonResponse, APIRequest):
method = "GET"
# Download Media
class DownloadMediaRequestRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/download/media/request/"
method = "POST"
class DownloadMediaProgressRequest(JsonResponse, APIRequest):
method = "GET"
# Upload DB
class UploadDBRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/upload/db/"
method = "POST"
class UploadDBProgressRequest(JsonResponse, APIRequest):
method = "GET"
# Upload Media
class UploadMediaFilesRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/upload/media/"
method = "POST"
class UploadMediaFilesProgressRequest(JsonResponse, APIRequest):
method = "GET"
class GetEnvironmentVariablesRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/env/{stage}/environment-variables/"
class GetCustomEnvironmentVariablesRequest(JsonResponse, APIRequest):
url = "/api/v1/website/{website_id}/env/{stage}/environment-variables/custom/"
class SetCustomEnvironmentVariablesRequest(JsonResponse, APIRequest):
method = "POST"
url = "/api/v1/website/{website_id}/env/{stage}/environment-variables/custom/"
# Repository
class RepositoryRequest(JsonResponse, APIRequest):
url = "/api/v2/repositories/?website={website_id}"
class APIV3Request(APIRequest):
def request(self, *args, **kwargs):
return super(APIV3Request, self).request(
v3_compatibilty=True, *args, **kwargs
)
class LogRequest(JsonResponse, APIV3Request):
url = "/apps/v3/environments/{environment_uuid}/logs/"
method = "GET"
class EnvironmentRequest(JsonResponse, APIV3Request):
url = "/apps/v3/environments/{environment_uuid}/"
method = "GET"
|
|
import re
class Assembler:
def inst_CALL(self, m):
self.checkInCode()
opcode = self.OPCODES['CALL']
r_src = 'A'
r_dest = 'A'
r_op = 'A'
if re.match(self.HEX, m.group(2)):
addr = m.group(2)
elif re.match("\(" + self.ALPHANUMERIC + "\)", m.group(2)):
self.enqueuelabel(m.group(2).strip("()"), self.inst_addr+1)
addr = "0xdeadbe"
self.instructions.append(self.inst_addr)
self.push24(opcode << 16)
self.push24(addr)
self.inst_addr += 2
def inst_LD(self, m):
self.checkInCode()
opcode = self.OPCODES['LD']
mode = ''
r_dest = 'A'
r_src = 'A'
r_op = 'A'
value = ''
r_dest = m.group(2).upper()
if re.match(self.REG, m.group(3)):
mode = "REGISTER"
r_src = m.group(3).upper()
elif re.match(self.HEX + "|" + self.INT, m.group(3)):
mode = "VALUE"
value = m.group(3)
elif re.match("\(" + self.ALPHANUMERIC + "\)", m.group(3)):
mode = "VALUE"
self.enqueuelabel(m.group(3).strip("()"), self.inst_addr+1)
value = "0xdeadbe"
else:
self.printerror("Syntax error")
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8(self.MODES[mode] << 4)
self.push8((self.REGISTERS[r_src] << 4) | self.REGISTERS[r_dest])
self.inst_addr += 1
if value != '':
self.push24(int(value, 0))
self.inst_addr += 1
def inst_DBG(self, m):
self.checkInCode()
opcode = self.OPCODES['DBG']
r_src = 'A'
r_dest = 'A'
r_op = 'A'
r_op = m.group(2)
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8(self.REGISTERS[r_op])
self.push8(0x00)
self.inst_addr += 1
def inst_HALT(self, m):
self.checkInCode()
opcode = self.OPCODES['HALT']
self.instructions.append(self.inst_addr)
self.push24(opcode << 16)
self.inst_addr += 1
def inst_MR(self, m):
self.checkInCode()
opcode = self.OPCODES['MR']
mode = ''
r_dest = 'A'
r_src = 'A'
r_op = 'A'
addr = ''
offset = ''
near = False
relative = False
r_dest = m.group(2).upper()
if re.match("\[" + self.REG + "\]", m.group(3), re.IGNORECASE):
mode += "INDIRECT"
r_dest = re.sub('[\[\]]', '',m.group(3).upper())
elif re.match(self.HEX, m.group(3), re.IGNORECASE):
mode += "ABSOLUTE"
addr = m.group(3)
elif re.match("\(" + self.ALPHANUMERIC + "\)", m.group(3)):
mode = "ABSOLUTE"
self.enqueuelabel(m.group(3).strip("()"), self.inst_addr+1)
offset = "0xdeadbe"
else:
self.printerror("Syntax error")
if (m.group(4) != None): # There is offset
mode += m.group(4)
if re.match(self.REG, m.group(4), re.IGNORECASE):
mode += "REG"
r_op = m.group(4).upper()
elif re.match(self.INT, m.group(5), re.IGNORECASE):
if int(m.group(5), 0) <= 15:
mode += "NEAR"
near = True
r_op = m.group(5)
elif int(m.group(5), 0) > self.MAX_INT:
self.printerror("Max. offset 16777215")
else:
mode += "FAR"
offset = m.group(5)
else:
mode += "FAR"
offset = m.group(5)
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8((self.MODES[mode] << 4) | (int(r_op) if near else self.REGISTERS[r_op]))
self.push8((self.REGISTERS[r_src] << 4) | self.REGISTERS[r_dest])
self.inst_addr += 1
if addr != '':
self.push24(int(addr, 0))
self.inst_addr += 1
if offset != '':
self.push24(int(offset, 0))
self.inst_addr += 1
def inst_MW(self, m):
self.checkInCode()
opcode = self.OPCODES['MW']
mode = ''
r_dest = 'A'
r_src = 'A'
r_op = 'A'
addr = ''
offset = ''
near = False
relative = False
r_src = m.group(5).upper()
if re.match("\[" + self.REG + "\]", m.group(2), re.IGNORECASE):
mode += "INDIRECT"
r_dest = re.sub('[\[\]]', '', m.group(2).upper())
elif re.match(self.HEX, m.group(2), re.IGNORECASE):
mode += "ABSOLUTE"
addr = m.group(2)
elif re.match("\(" + self.ALPHANUMERIC + "\)", m.group(2)):
mode = "ABSOLUTE"
self.enqueuelabel(m.group(2).strip("()"), self.inst_addr+1)
addr = "0xdeadbe"
else:
self.printerror("Syntax error")
if m.group(3) != None: # There is offset
mode += m.group(3)
if re.match(self.REG, m.group(4), re.IGNORECASE):
mode += "REG"
r_op = m.group(4).upper()
elif re.match(self.INT, m.group(4), re.IGNORECASE):
if int(m.group(4), 0) <= 15:
mode += "NEAR"
near = True
r_op = m.group(4)
elif int(m.group(4), 0) > self.MAX_INT:
self.printerror("Max. offset 16777215")
else:
mode += "FAR"
offset = m.group(4)
else:
mode += "FAR"
offset = m.group(4)
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8((self.MODES[mode] << 4) | (int(r_op) if near else self.REGISTERS[r_op]))
self.push8((self.REGISTERS[r_src] << 4) | self.REGISTERS[r_dest])
self.inst_addr += 1
if addr != '':
self.push24(int(addr, 0))
self.inst_addr += 1
if offset != '':
self.push24(int(offset, 0))
self.inst_addr += 1
def inst_NOP(self, m):
self.checkInCode()
opcode = self.OPCODES['NOP']
self.instructions.append(self.inst_addr)
self.push24(opcode << 16)
self.inst_addr += 1
def inst_POP(self, m):
self.checkInCode()
opcode = self.OPCODES['POP']
r_src = 'A'
r_dest = 'A'
r_op = 'A'
r_dest = m.group(2)
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8(0x00)
self.push8(self.REGISTERS[r_dest])
self.inst_addr += 1
def inst_PUSH(self, m):
self.checkInCode()
opcode = self.OPCODES['PUSH']
r_src = 'A'
r_dest = 'A'
r_op = 'A'
r_src = m.group(2)
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8(0x00)
self.push8(self.REGISTERS[r_src] << 8)
self.inst_addr += 1
def inst_RET(self, m):
self.checkInCode()
opcode = self.OPCODES['RET']
self.instructions.append(self.inst_addr)
self.push24(opcode << 16)
self.inst_addr += 1
def inst_VR(self, m):
self.checkInCode()
opcode = self.OPCODES['VR']
mode = ''
r_dest = 'A'
r_src = 'A'
r_op = 'A'
addr = ''
offset = ''
near = False
relative = False
r_dest = m.group(2).upper()
if re.match("\[" + self.REG + "\]", m.group(3), re.IGNORECASE):
mode += "INDIRECT"
r_dest = re.sub('[\[\]]', '',m.group(3).upper())
elif re.match(self.HEX, m.group(3), re.IGNORECASE):
mode += "ABSOLUTE"
addr = m.group(3)
elif re.match("\(" + self.ALPHANUMERIC + "\)", m.group(3)):
mode = "ABSOLUTE"
self.enqueuelabel(m.group(3).strip("()"), self.inst_addr+1)
offset = "0xdeadbe"
else:
self.printerror("Syntax error")
if (m.group(4) != None): # There is offset
mode += m.group(4)
if re.match(self.REG, m.group(4), re.IGNORECASE):
mode += "REG"
r_op = m.group(4).upper()
elif re.match(self.INT, m.group(5), re.IGNORECASE):
if int(m.group(5), 0) <= 15:
mode += "NEAR"
near = True
r_op = m.group(5)
elif int(m.group(5), 0) > self.MAX_INT:
self.printerror("Max. offset 16777215")
else:
mode += "FAR"
offset = m.group(5)
else:
mode += "FAR"
offset = m.group(5)
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8((self.MODES[mode] << 4) | (int(r_op) if near else self.REGISTERS[r_op]))
self.push8((self.REGISTERS[r_src] << 4) | self.REGISTERS[r_dest])
self.inst_addr += 1
if addr != '':
self.push24(int(addr, 0))
self.inst_addr += 1
if offset != '':
self.push24(int(offset, 0))
self.inst_addr += 1
def inst_VW(self, m):
self.checkInCode()
opcode = self.OPCODES['VW']
mode = ''
r_dest = 'A'
r_src = 'A'
r_op = 'A'
addr = ''
offset = ''
near = False
relative = False
r_src = m.group(5).upper()
if re.match("\[" + self.REG + "\]", m.group(2), re.IGNORECASE):
mode += "INDIRECT"
r_dest = re.sub('[\[\]]', '', m.group(2).upper())
elif re.match(self.HEX, m.group(2), re.IGNORECASE):
mode += "ABSOLUTE"
addr = m.group(2)
elif re.match("\(" + self.ALPHANUMERIC + "\)", m.group(2)):
mode = "ABSOLUTE"
self.enqueuelabel(m.group(2).strip("()"), self.inst_addr+1)
addr = "0xdeadbe"
else:
self.printerror("Syntax error")
if m.group(3) != None: # There is offset
mode += m.group(3)
if re.match(self.REG, m.group(4), re.IGNORECASE):
mode += "REG"
r_op = m.group(4).upper()
elif re.match(self.INT, m.group(4), re.IGNORECASE):
if int(m.group(4), 0) <= 15:
mode += "NEAR"
near = True
r_op = m.group(4)
elif int(m.group(4), 0) > self.MAX_INT:
self.printerror("Max. offset 16777215")
else:
mode += "FAR"
offset = m.group(4)
else:
mode += "FAR"
offset = m.group(4)
self.instructions.append(self.inst_addr)
self.push8(opcode)
self.push8((self.MODES[mode] << 4) | (int(r_op) if near else self.REGISTERS[r_op]))
self.push8((self.REGISTERS[r_src] << 4) | self.REGISTERS[r_dest])
self.inst_addr += 1
if addr != '':
self.push24(int(addr, 0))
self.inst_addr += 1
if offset != '':
self.push24(int(offset, 0))
self.inst_addr += 1
|
|
# Copyright 2013 OpenStack Foundation
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import migrate
from migrate import exceptions
from oslo_config import cfg
from oslo_db.sqlalchemy import migration
from oslo_utils import importutils
import six
import sqlalchemy
from keystone.common import sql
from keystone import contrib
from keystone import exception
from keystone.i18n import _
CONF = cfg.CONF
DEFAULT_EXTENSIONS = []
MIGRATED_EXTENSIONS = ['endpoint_policy',
'federation',
'oauth1',
'revoke',
'endpoint_filter'
]
# Different RDBMSs use different schemes for naming the Foreign Key
# Constraints. SQLAlchemy does not yet attempt to determine the name
# for the constraint, and instead attempts to deduce it from the column.
# This fails on MySQL.
def get_constraints_names(table, column_name):
fkeys = [fk.name for fk in table.constraints
if (isinstance(fk, sqlalchemy.ForeignKeyConstraint) and
column_name in fk.columns)]
return fkeys
# remove_constraints and add_constraints both accept a list of dictionaries
# that contain:
# {'table': a sqlalchemy table. The constraint is added to dropped from
# this table.
# 'fk_column': the name of a column on the above table, The constraint
# is added to or dropped from this column
# 'ref_column':a sqlalchemy column object. This is the reference column
# for the constraint.
def remove_constraints(constraints):
for constraint_def in constraints:
constraint_names = get_constraints_names(constraint_def['table'],
constraint_def['fk_column'])
for constraint_name in constraint_names:
migrate.ForeignKeyConstraint(
columns=[getattr(constraint_def['table'].c,
constraint_def['fk_column'])],
refcolumns=[constraint_def['ref_column']],
name=constraint_name).drop()
def add_constraints(constraints):
for constraint_def in constraints:
if constraint_def['table'].kwargs.get('mysql_engine') == 'MyISAM':
# Don't try to create constraint when using MyISAM because it's
# not supported.
continue
ref_col = constraint_def['ref_column']
ref_engine = ref_col.table.kwargs.get('mysql_engine')
if ref_engine == 'MyISAM':
# Don't try to create constraint when using MyISAM because it's
# not supported.
continue
migrate.ForeignKeyConstraint(
columns=[getattr(constraint_def['table'].c,
constraint_def['fk_column'])],
refcolumns=[constraint_def['ref_column']]).create()
def rename_tables_with_constraints(renames, constraints, engine):
"""Renames tables with foreign key constraints.
Tables are renamed after first removing constraints. The constraints are
replaced after the rename is complete.
This works on databases that don't support renaming tables that have
constraints on them (DB2).
`renames` is a dict, mapping {'to_table_name': from_table, ...}
"""
if engine.name != 'sqlite':
# SQLite doesn't support constraints, so nothing to remove.
remove_constraints(constraints)
for to_table_name in renames:
from_table = renames[to_table_name]
from_table.rename(to_table_name)
if engine != 'sqlite':
add_constraints(constraints)
def find_migrate_repo(package=None, repo_name='migrate_repo'):
package = package or sql
path = os.path.abspath(os.path.join(
os.path.dirname(package.__file__), repo_name))
if os.path.isdir(path):
return path
raise exception.MigrationNotProvided(package.__name__, path)
def _sync_common_repo(version):
abs_path = find_migrate_repo()
init_version = get_init_version()
with sql.session_for_write() as session:
engine = session.get_bind()
_assert_not_schema_downgrade(version=version)
migration.db_sync(engine, abs_path, version=version,
init_version=init_version, sanity_check=False)
def get_init_version(abs_path=None):
"""Get the initial version of a migrate repository
:param abs_path: Absolute path to migrate repository.
:return: initial version number or None, if DB is empty.
"""
if abs_path is None:
abs_path = find_migrate_repo()
repo = migrate.versioning.repository.Repository(abs_path)
# Sadly, Repository has a `latest` but not an `oldest`.
# The value is a VerNum object which needs to be converted into an int.
oldest = int(min(repo.versions.versions))
if oldest < 1:
return None
# The initial version is one less
return oldest - 1
def _assert_not_schema_downgrade(extension=None, version=None):
if version is not None:
try:
current_ver = int(six.text_type(get_db_version(extension)))
if int(version) < current_ver:
raise migration.exception.DbMigrationError(
_("Unable to downgrade schema"))
except exceptions.DatabaseNotControlledError: # nosec
# NOTE(morganfainberg): The database is not controlled, this action
# cannot be a downgrade.
pass
def _sync_extension_repo(extension, version):
if extension in MIGRATED_EXTENSIONS:
raise exception.MigrationMovedFailure(extension=extension)
with sql.session_for_write() as session:
engine = session.get_bind()
try:
package_name = '.'.join((contrib.__name__, extension))
package = importutils.import_module(package_name)
except ImportError:
raise ImportError(_("%s extension does not exist.")
% package_name)
try:
abs_path = find_migrate_repo(package)
try:
migration.db_version_control(engine, abs_path)
# Register the repo with the version control API
# If it already knows about the repo, it will throw
# an exception that we can safely ignore
except exceptions.DatabaseAlreadyControlledError: # nosec
pass
except exception.MigrationNotProvided as e:
print(e)
sys.exit(1)
_assert_not_schema_downgrade(extension=extension, version=version)
init_version = get_init_version(abs_path=abs_path)
migration.db_sync(engine, abs_path, version=version,
init_version=init_version, sanity_check=False)
def sync_database_to_version(extension=None, version=None):
if not extension:
_sync_common_repo(version)
# If version is greater than 0, it is for the common
# repository only, and only that will be synchronized.
if version is None:
for default_extension in DEFAULT_EXTENSIONS:
_sync_extension_repo(default_extension, version)
else:
_sync_extension_repo(extension, version)
def get_db_version(extension=None):
if not extension:
with sql.session_for_write() as session:
return migration.db_version(session.get_bind(),
find_migrate_repo(),
get_init_version())
try:
package_name = '.'.join((contrib.__name__, extension))
package = importutils.import_module(package_name)
except ImportError:
raise ImportError(_("%s extension does not exist.")
% package_name)
with sql.session_for_write() as session:
return migration.db_version(
session.get_bind(), find_migrate_repo(package), 0)
def print_db_version(extension=None):
try:
db_version = get_db_version(extension=extension)
print(db_version)
except exception.MigrationNotProvided as e:
print(e)
sys.exit(1)
|
|
"""
managers.py
.. moduleauthor:: Ulrich Felzmann <ulrich.felzmann@versi.edu.au>
"""
from datetime import datetime
from django.db import models
from django.db.models import Q
from django.core.exceptions import PermissionDenied
from django.contrib.auth.models import User, Group
from tardis.tardis_portal.auth.localdb_auth import django_user, django_group
class OracleSafeManager(models.Manager):
"""
Implements a custom manager which automatically defers the
retreival of any TextField fields on calls to get_query_set. This
is to avoid the known issue that 'distinct' calls on query_sets
containing TextFields fail when Oracle is being used as the
backend.
"""
def get_query_set(self):
from django.db import connection
if connection.settings_dict['ENGINE'] == 'django.db.backends.oracle':
fields = [a.attname for a in self.model._meta.fields
if a.db_type(connection=connection) == 'NCLOB']
return \
super(OracleSafeManager, self).get_query_set().defer(*fields)
else:
return super(OracleSafeManager, self).get_query_set()
class ExperimentManager(OracleSafeManager):
"""
Implements a custom manager for the Experiment model which checks
the authorisation rules for the requesting user first
To make this work, the request must be passed to all class
functions. The username and the group memberships are then
resolved via the request.groups and request.user objects.
The :py:mod:`tardis.tardis_portal.auth.AuthService` is responsible for
filling the request.groups object.
"""
def all(self, request): #@ReservedAssignment
"""
Returns all experiments a user - either authenticated or
anonymous - is allowed to see and search
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
"""
query = self._query_all_public() | self._query_owned_and_shared(request)
return super(ExperimentManager, self).get_query_set().filter(
query).distinct()
def owned_and_shared(self, request):
return super(ExperimentManager, self).get_query_set().filter(
self._query_owned_and_shared(request)).distinct()
def _query_owned_and_shared(self, request):
# if the user is not authenticated, nothing should be returned
if not request.user.is_authenticated():
return Q(id=None)
# for which experiments does the user have read access
# based on USER permissions?
query = Q(experimentacl__pluginId=django_user,
experimentacl__entityId=str(request.user.id),
experimentacl__canRead=True)\
& (Q(experimentacl__effectiveDate__lte=datetime.today())
| Q(experimentacl__effectiveDate__isnull=True))\
& (Q(experimentacl__expiryDate__gte=datetime.today())
| Q(experimentacl__expiryDate__isnull=True))
# for which does experiments does the user have read access
# based on GROUP permissions
for name, group in request.groups:
query |= Q(experimentacl__pluginId=name,
experimentacl__entityId=str(group),
experimentacl__canRead=True)\
& (Q(experimentacl__effectiveDate__lte=datetime.today())
| Q(experimentacl__effectiveDate__isnull=True))\
& (Q(experimentacl__expiryDate__gte=datetime.today())
| Q(experimentacl__expiryDate__isnull=True))
return query
def _query_all_public(self):
from tardis.tardis_portal.models import Experiment
return ~Q(public_access=Experiment.PUBLIC_ACCESS_NONE)
def get(self, request, experiment_id):
"""
Returns an experiment under the consideration of the ACL rules
Raises PermissionDenied if the user does not have access.
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment to be edited
:type experiment_id: string
"""
experiment = \
super(ExperimentManager, self).get(pk=experiment_id)
# if the experiment is public, return it right away
if experiment.public_access != experiment.PUBLIC_ACCESS_NONE:
return experiment
# if not, is the user logged in at all?
if not request.user.is_authenticated():
raise PermissionDenied
# check if there is a user based authorisation role
query = Q(experiment=experiment,
pluginId=django_user,
entityId=str(request.user.id),
canRead=True)\
& (Q(effectiveDate__lte=datetime.today())
| Q(effectiveDate__isnull=True))\
& (Q(expiryDate__gte=datetime.today())
| Q(expiryDate__isnull=True))
# and finally check all the group based authorisation roles
for name, group in request.groups:
query |= Q(pluginId=name,
entityId=str(group),
experiment=experiment,
canRead=True)\
& (Q(effectiveDate__lte=datetime.today())
| Q(effectiveDate__isnull=True))\
& (Q(expiryDate__gte=datetime.today())
| Q(expiryDate__isnull=True))
# is there at least one ACL rule which satisfies the rules?
from tardis.tardis_portal.models import ExperimentACL
acl = ExperimentACL.objects.filter(query)
if acl.count() == 0:
raise PermissionDenied
else:
return experiment
def owned(self, request):
"""
Return all experiments which are owned by a particular user
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
"""
# the user must be authenticated
if not request.user.is_authenticated():
return super(ExperimentManager, self).get_empty_query_set()
return self.owned_by_user_id(request.user.id)
def owned_by_user_id(self, userId):
"""
Return all experiments which are owned by a particular user id
:param userId: a User ID
:type userId: integer
"""
# build the query to filter the ACL table
query = Q(experimentacl__pluginId=django_user,
experimentacl__entityId=str(userId),
experimentacl__isOwner=True)\
& (Q(experimentacl__effectiveDate__lte=datetime.today())
| Q(experimentacl__effectiveDate__isnull=True))\
& (Q(experimentacl__expiryDate__gte=datetime.today())
| Q(experimentacl__expiryDate__isnull=True))
return super(ExperimentManager, self).get_query_set().filter(query)
def user_acls(self, request, experiment_id):
"""
Returns a list of ACL rules associated with this experiment.
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment
:type experiment_id: string
"""
from tardis.tardis_portal.models import ExperimentACL
return ExperimentACL.objects.filter(pluginId=django_user,
experiment__id=experiment_id,
aclOwnershipType=ExperimentACL.OWNER_OWNED)
def users(self, request, experiment_id):
"""
Returns a list of users who have ACL rules associated with this
experiment.
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment
:type experiment_id: string
"""
acl = self.user_acls(request, experiment_id)
return User.objects.filter(pk__in=[ int(a.entityId) for a in acl ])
def user_owned_groups(self, request, experiment_id):
"""
returns a list of user owned-groups which have ACL rules
associated with this experiment
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment to be edited
:type experiment_id: string
"""
from tardis.tardis_portal.models import ExperimentACL
acl = ExperimentACL.objects.filter(pluginId=django_group,
experiment__id=experiment_id,
aclOwnershipType=ExperimentACL.OWNER_OWNED)
return Group.objects.filter(pk__in=[ str(a.entityId) for a in acl ])
def group_acls_user_owned(self, request, experiment_id):
"""
Returns a list of ACL rules associated with this experiment.
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment
:type experiment_id: string
"""
from tardis.tardis_portal.models import ExperimentACL
return ExperimentACL.objects.filter(pluginId=django_group,
experiment__id=experiment_id,
aclOwnershipType=ExperimentACL.OWNER_OWNED)
def group_acls_system_owned(self, request, experiment_id):
"""
Returns a list of ACL rules associated with this experiment.
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment
:type experiment_id: string
"""
from tardis.tardis_portal.models import ExperimentACL
return ExperimentACL.objects.filter(pluginId=django_group,
experiment__id=experiment_id,
aclOwnershipType=ExperimentACL.SYSTEM_OWNED)
def system_owned_groups(self, request, experiment_id):
"""
returns a list of sytem-owned groups which have ACL rules
associated with this experiment
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment to be edited
:type experiment_id: string
"""
from tardis.tardis_portal.models import ExperimentACL
acl = ExperimentACL.objects.filter(pluginId=django_group,
experiment__id=experiment_id,
aclOwnershipType=ExperimentACL.SYSTEM_OWNED)
return Group.objects.filter(pk__in=[ str(a.entityId) for a in acl ])
def external_users(self, request, experiment_id):
"""
returns a list of groups which have external ACL rules
:param request: a HTTP Request instance
:type request: :py:class:`django.http.HttpRequest`
:param experiment_id: the ID of the experiment to be edited
:type experiment_id: string
"""
from tardis.tardis_portal.models import ExperimentACL
acl = ExperimentACL.objects.exclude(pluginId=django_user)
acl = acl.exclude(pluginId=django_group)
acl = acl.filter(experiment__id=experiment_id)
if not acl:
return None
from tardis.tardis_portal.auth import AuthService
authService = AuthService()
result = []
for a in acl:
group = authService.searchGroups(plugin=a.pluginId,
name=a.entityId)
if group:
result += group
return result
class ParameterNameManager(models.Manager):
def get_by_natural_key(self, namespace, name):
return self.get(schema__namespace=namespace, name=name)
class SchemaManager(models.Manager):
def get_by_natural_key(self, namespace):
return self.get(namespace=namespace)
|
|
import numpy as np
import galsim
import galsim.config.input as gsinput
import ngmix
import os
import copy
from numpy.lib.recfunctions import append_fields
# Balrog files
import mathutil as util
import grid
# TODO: Implement as needed
class BalObject(object):
'''
'''
def __init__(self):
pass
class BalInjectionCatalog(object):
def __init__(self, input_type, inj_type, sub_type, tile, needs_band=False, mixed=False):
self.input_type = input_type
self.inj_type = inj_type
self.sub_type = sub_type
self.needs_band = needs_band
self.mixed = False
self.pos = None
self.indx = None
self.nobjects = None
self.truth_outfile = {}
# Balrog catalogs are constructed for a given Tile and require
# info for construction
self.ramin, self.ramax = tile.ramin, tile.ramax
self.decmin, self.decmax = tile.decmin, tile.decmax
self.ra_boundary_cross = tile.ra_boundary_cross
self.Npix_x, self.Npix_y = tile.Npix_x, tile.Npix_y
self.pixel_scale = tile.pixel_scale
self.wcs = tile.wcs
self.tile_name = tile.tile_name
# This is also set for a given Tile, as it may depend on its area
self.objs_per_real = tile.objs_per_real[input_type]
# Some tests will impose single-object injection - this is turned off by default
# NOTE: Only supported by a few input types for now, identified below
self.single_obj_injection = False
# NOTE: In the future, it may be useful to save the actual input catalog objects
# for each realization. For now it is just wasted memory
# self.cat = None
return
def generate_objects(self, config, realization, mixed_grid=None):
# Generate positions and indices if this is the first realization
if realization == config.realizations[0]:
# Can't guarantee count consistency per real, so use dicts
self.pos = {}
self.indx = {}
self.rotate = {}
self.nobjects= {}
input_type = self.input_type
input_nobjects = config.input_nobjects[input_type]
Nr = config.n_realizations
bg = grid.BaseGrid()
for real in config.realizations:
inj_nobjs = self.objs_per_real
self.nobjects[real] = inj_nobjs
# Generate object coordinates
ps = config.pos_sampling[input_type]
pstype = ps['type']
if pstype == 'uniform':
ra = util.sample_uniform_ra(self.ramin, self.ramax, config.objs_per_real,
boundary_cross=self.ra_boundary_cross)
dec = util.sample_uniform_dec(self.decmin, self.decmax, config.objs_per_real,
unit='deg')
self.pos[real] = np.column_stack((ra, dec))
elif pstype in bg._valid_grid_types:
grid_kwargs = self._build_grid_kwargs(pstype, ps)
gtype = pstype
tile_grid = grid._build_grid(gtype, **grid_kwargs)
self.pos[real] = tile_grid.pos
# NOTE: We ignore the inputted nobjects and use the correct grid value
# instead (user was already warned)
inj_nobjs = np.shape(tile_grid.pos)[0]
self.nobjects[real] = inj_nobjs
elif pstype in bg._valid_mixed_types:
if mixed_grid is None:
N_inj_types = 0
inj_frac = {}
gtypes = set()
gspacing = set()
for inpt, opts in config.pos_sampling.items():
for key, s in {'grid_type':gtypes, 'grid_spacing':gspacing}.items():
try:
s.add(opts[key])
except KeyError:
# Only has to be present for one input type
pass
if opts['type'] == 'MixedGrid':
N_inj_types += 1
inj_frac[inpt] = opts['inj_frac']
else:
raise ValueError('The mixed sampling type {}'.format(opts['type']) +
' is not yet implemented in `generate_objects()`.')
for key, s in {'grid_type':gtypes, 'grid_spacing':gspacing}.items():
if len(s) != 1:
raise ValueError('For now, only one `{}` is allowed '.format(key) +
'for a mixed grid!')
# if key not in ps:
# ps[key] = s.pop()
gtype = gtypes.pop()
mixed_grid = grid.MixedGrid(gtype, N_inj_types, inj_frac)
grid_kwargs = self._build_grid_kwargs(pstype, ps)
mixed_grid.build_grid(**grid_kwargs)
# Objects are assigned immediately since we set all injection
# fractions during construction. Otherwise would have to wait
self.pos[real] = mixed_grid.pos[input_type]
# NOTE: We ignore the inputted nobjects and use the correct grid value
# instead (user was already warned)
inj_nobjs = mixed_grid.nobjects[input_type]
self.nobjects[real] = inj_nobjs
else:
# NOTE: Below is what we would do if we hadn't already already
# ensured object assignment during MixedGrid construction
#
# if mixed_grid.assigned_objects is True:
# self.pos[real] = mixed_grid.pos[input_type]
# else:
# mixed_grid.add_injection(input_type, ps[input_type]['inj_frac'])
self.pos[real] = mixed_grid.pos[input_type]
inj_nobjs = mixed_grid.nobjects[input_type]
self.nobjects[real] = inj_nobjs
else:
# An error should have already occured, but just in case:
raise ValueError('Position sampling type {} is not valid!'.format(gtype))
# Generate object indices (in input catalog)
indices = np.random.choice(xrange(input_nobjects), size=inj_nobjs)
self.indx[real] = indices
# Generate object rotation angles, if desired
if config.rotate_objs is True:
rot = util.sample_uniform(0., 360., self.nobjects[real])
self.rotate[real] = np.array([str(r) + ' deg' for r in rot])
else:
self.rotate[real] = None
# NOTE: This is where we could initialize the injection cat if needed
# self.cat[real] = config.input_cats[inj_type][indices]
# Have to pass around mixed_grid since the injection catalogs
# are otherwise independent
return mixed_grid
def _build_grid_kwargs(self, pstype, ps):
if pstype == 'MixedGrid':
gtype = ps['grid_type']
else:
gtype = pstype
gs = ps['grid_spacing']
# Rotate grid if asked
try:
r = ps['rotate']
if (isinstance(r, str)) and (r.lower() == 'random'):
if gtype == 'RectGrid':
self.grid_rot_angle = np.random.uniform(0., np.pi/2.)
elif gtype == 'HexGrid':
self.grid_rot_angle = np.random.uniform(0., np.pi/3.)
else:
unit = ps['angle_unit']
if unit == 'deg':
if (r>=0.0) and (r<360.0):
self.grid_rot_angle = float(r)
else:
raise ValueError('Grid rotation of {} '.format(r) +
'deg is not valid!')
else:
if (r>=0.0) and (r<2*np.pi):
self.grid_rot_angle = float(r)
else:
raise ValueError('Grid rotation of {} '.format(r) +
'rad is not valid!')
except KeyError:
self.grid_rot_angle = 0.0
# Offset grid if asked
try:
o = ps['offset']
if (isinstance(o, str)) and (o.lower() == 'random'):
self.grid_offset = [np.random.uniform(-gs/2., gs/2.),
np.random.uniform(-gs/2., gs/2.)]
else:
if isinstance(o, list):
self.grid_offset = list(o)
else:
raise ValueError('Grid offset of {} '.format(r) +
'is not an array!')
except KeyError:
self.grid_offset = [0.0, 0.0]
try:
self.angle_unit = ps['angle_unit']
except KeyError:
# Default in radians
self.angle_unit = 'rad'
# Creates the grid given tile parameters and calculates the
# image / world positions for each object
grid_kwargs = dict(grid_spacing=gs,
wcs=self.wcs,
Npix_x=self.Npix_x,
Npix_y=self.Npix_y,
pixscale=self.pixel_scale,
rot_angle = self.grid_rot_angle,
angle_unit = self.angle_unit,
pos_offset = self.grid_offset)
return grid_kwargs
# TODO: Should really make this a static method instead
def _check_for_single_obj_indx(self, config):
# NOTE: Nearly all runs will generate a random sample of indices. However,
# for some testing it would be nice to use an identical object for all
# injections. In this case, the user can set a single index in the 'gal'
# section of the global config
# NOTE: Only currently suppported for sims with a single input type
try:
orig_indx = config.gs_config[0]['gal']['index']
if type(orig_indx) is int:
# Need to find original index of catalog
gs_config = copy.deepcopy(config.gs_config[0])
# Add dummy band index (band doesn't matter)
gs_config['input'][self.input_type].update({'bands':'griz'})
galsim.config.ProcessInput(gs_config)
cat_proxy = gs_config['input_objs'][self.input_type][0]
cat = cat_proxy.getCatalog()
# Specific catalog structures can then generate indices from
# the proxy catalog
return cat, orig_indx
else:
raise TypeError('Can only set a global object index in the '
'config if it is an integer!')
except KeyError:
return None, None
def get_truth_outfile(self, base_outfile, real):
truth_fname = '{}_{}_balrog_truth_cat_{}.fits'.format(self.tile_name, real, self.inj_type)
self.truth_outfile[real] = os.path.join(base_outfile, truth_fname)
return self.truth_outfile[real]
def update_truth_cols(self, config, truth_cat, real):
self.write_new_positions(truth_cat, real)
self.update_colnames(truth_cat, real)
if self.rotate[real] is not None:
self.update_truth_shapes(config, truth_cat, real)
def update_truth_shapes(self, config, truth_cat, real):
# Only here to be inherited by subclasses, but not bothering with
# a full abstract class for now
raise NotImplementedError('Need to implement `update_truth_shapes()` to apply rotations to ' +
'custom BalObjects!')
def update_colnames(self, truth_cat, real):
pass
def write_new_positions(self, truth_cat, real):
pos = self.pos[real]
# If nothing is set for a given custom input, try the obvious
try:
truth_cat[self.inj_type]['ra'] = pos[:,0]
truth_cat[self.inj_type]['dec'] = pos[:,1]
except KeyError:
try:
truth_cat[in_type]['RA'] = pos[:,0]
truth_cat[in_type]['DEC'] = pos[:,1]
except KeyError:
raise('Tried to write truth positions using column names of ra/dec; RA/DEC. '
'Eeither rename position columns or overload `write_new_positions()` '
'for {}'.format(self.input_type))
return
def setup_chip_config(self, config, bal_config, chip, chip_indx):
# Many injection types will requite nothing special in setup
pass
def build_single_chip_config(self, config, bal_config, chip, chip_indx):
pass
def build_multi_chip_config(self, config, bal_config, chip, chip_indx, input_indx):
pass
class DESInjectionCatalog(BalInjectionCatalog):
def __init__(self, input_type, inj_type, sub_type, tile, needs_band, mixed=False):
# All catalogs require band input
assert needs_band is True
super(DESInjectionCatalog, self).__init__(input_type, inj_type, sub_type, tile, needs_band, mixed)
return
def setup_chip_config(self, config, bal_config, chip, chip_indx):
# Only load into memory the needed band catalog information
bal_config[chip_indx]['input'].update({
self.input_type : {'bands' : chip.band}
})
return
def build_single_chip_config(self, config, bal_config, chip, chip_indx):
bal_config[chip_indx]['gal'].update({
'band' : chip.band
})
def build_multi_chip_config(self, config, bal_config, chip, chip_indx, input_indx):
bal_config[chip_indx]['gal']['items'][input_indx].update({
'band' : chip.band
})
return
def update_colnames(self, truth_cat, inj_cat):
# TODO: Fix all columns affected by reddening!
pass
class NGMIXInjectionCatalog(DESInjectionCatalog):
def generate_objects(self, config, realization, mixed_grid=None):
mixed_grid = super(NGMIXInjectionCatalog, self).generate_objects(config,
realization,
mixed_grid=mixed_grid)
# NOTE: See `_check_for_single_obj_indx()` for why we sometimes do this for testing
single_obj_cat, orig_indx = self._check_for_single_obj_indx(config)
if single_obj_cat is not None:
# Specific to ngmix_catalog structure:
indx = int(np.where(single_obj_cat['id']==orig_indx)[0])
self.indx[realization] = indx * np.ones(self.nobjects[realization], dtype='int16')
self.single_obj_injection = True
return mixed_grid
def update_truth_shapes(self, config, truth_cat, real):
g_colname = self.sub_type + '_g'
g1 = truth_cat[self.inj_type][g_colname][:,0]
g2 = truth_cat[self.inj_type][g_colname][:,1]
# Need to unpack array of '{rotation in deg} deg'
deg2rad = np.pi / 180.
theta = np.array([float(r.split()[0].strip()) for r in self.rotate[real]])
g1_rot, g2_rot = ngmix.shape.rotate_shape(g1, g2, deg2rad*theta)
# Update truth catalog shape information
truth_cat[self.inj_type][g_colname][:,0] = g1_rot
truth_cat[self.inj_type][g_colname][:,1] = g2_rot
# These values are also stored in pars
truth_cat[self.inj_type][self.sub_type+'_pars'][:,2] = g1_rot
truth_cat[self.inj_type][self.sub_type+'_pars'][:,3] = g2_rot
# Add rotation angles to truth catalog
truth_cat[self.inj_type] = append_fields(truth_cat[self.inj_type],
'rotation',
theta,
usemask=False)
return
class MEDSInjectionCatalog(DESInjectionCatalog):
def generate_objects(self, config, realization, mixed_grid=None):
mixed_grid = super(MEDSInjectionCatalog, self).generate_objects(config,
realization,
mixed_grid=mixed_grid)
# NOTE: See `_check_for_single_obj_indx()` for why we sometimes do this for testing
single_obj_cat, orig_indx = self._check_for_single_obj_indx(config)
if single_obj_cat is not None:
# Specific to meds_catalog structure:
b = single_obj_cat.keys()[0] # ID's consistent between bands
indx = int(np.where(single_obj_cat[b]['id']==orig_indx)[0])
self.indx[realization] = indx * np.ones(self.nobjects[realization], dtype='int16')
self.single_obj_injection = True
return
def build_single_chip_config(self, config, bal_config, chip, chip_indx):
super(MEDSInjectionCatalog, self).build_single_chip_config(config, bal_config, chip,
chip_indx)
# Only use meds/psf files for needed band
b = config.bindx[chip.band]
meds_file = [bal_config[0]['input'][self.input_type]['meds_files'][b]]
psf_file = [bal_config[0]['input'][self.input_type]['psf_files'][b]]
bal_config[chip_indx]['input'][self.input_type].update({
'meds_files' : meds_file,
'psf_files' : psf_file
})
bal_config[chip_indx]['gal'].update({
'band' : chip.band
})
return
def build_multi_chip_config(self, config, bal_config, chip, chip_indx, input_indx):
super(MEDSInjectionCatalog, self).build_multi_chip_config(config, bal_config, chip,
chip_indx)
# Only use meds/psf files for needed band
b = config.bindx[chip.band]
meds_file = [bal_config[0]['input']['items'][input_indx]['meds_files'][b]]
psf_file = [bal_config[0]['input']['items'][input_indx]['psf_files'][b]]
bal_config[chip_indx]['input']['items'][input_indx].update({
'meds_files' : meds_file,
'psf_files' : psf_file
})
bal_config[chip_indx]['gal']['items'][input_indx].update({
'band' : chip.band
})
return
class DESStarInjectionCatalog(DESInjectionCatalog):
def __init__(self, input_type, inj_type, sub_type, tile, needs_band=False, mixed=False):
super(DESStarInjectionCatalog, self).__init__(input_type, inj_type, sub_type, tile, needs_band, mixed)
# Might add something like this in the future, if we end up passing configs
# during init...
# if config.pos_sampling[self.input_type]['type'] == 'sahar':
# self.sahar_pos = True
# else:
# self.sahar_pos = False
return
def generate_objects(self, config, realization, mixed_grid=None):
if config.pos_sampling[self.input_type]['type'] == 'sahar':
# Sahar has pre-computed positions for her catalogs
self._generate_sahar_coords()
self.sahar_pos = True
return mixed_grid
else:
mixed_grid = super(DESStarInjectionCatalog, self).generate_objects(config,
realization,
mixed_grid=mixed_grid)
self.sahar_pos = False
return mixed_grid
def setup_chip_config(self, config, bal_config, chip, chip_indx):
super(DESStarInjectionCatalog, self).setup_chip_config(config, bal_config, chip, chip_indx)
# Only load into memory the needed band catalog information
bal_config[0]['input'][self.input_type].update({'tile' : self.tile_name})
return
def _generate_sahar_coords(self, config, realization):
'''
For now (Y3), the star catalogs (including positions) are pre-computed. So we just
need to declare some variables for future use.
'''
# TODO: This has not been checked since the move to generalized inputs, although
# not currently in use
inp_type = self.input_type
if self.data_version == 'y3v02':
# The first time, divide up catalog randomly between realizations
if realization == config.realizations[0]:
# Can't guarantee star count consistency, so use dicts
self.indx = {}
self.pos = {}
self.nobjects = {}
# Randomize star catalog order and split into approximately equal parts
# NOTE: If n_realizations > len(realizations), then DO NOT randomly
# shuffle stars, as they are being injected across multiple jobs.
Nr = config.n_realizations
if Nr == len(config.realizations):
rand.shuffle(indices)
indices = [np.array(indices[i::Nr]) for i in range(Nr)]
# Grab star positions
ra = config.input_cats[input_type]['RA_new']
dec = config.input_cats[input_type]['DEC_new']
assert len(ra)==len(dec)
# Sahar's DES Y3 star catalogs are all pre-computed, so we can set
# needed values for all realizations now.
for real in config.realizations:
j = int(np.where(real==np.array(config.realizations))[0])
inds = indices[j]
r, d = ra[inds], dec[inds]
self.indx[real] = inds
self.pos[real] = np.column_stack((r, d))
self.nobjects[real] = len(inds)
return
def update_truth_shapes(self, config, truth_cat, real):
# Stars are injected as delta functions, so no need to update shape info
pass
def write_new_positions(self, truth_cat, realization):
# Currently, all used DES star catalogs have Sahar's naming scheme anyway,
# so this is check is not needed
# if self.sahar_pos is True:
truth_cat[self.inj_type]['RA_new'] = self.pos[realization][:,0]
truth_cat[self.inj_type]['DEC_new'] = self.pos[realization][:,1]
# else:
# super(DESStarInjectionCatalog, self).write_new_positions(truth_cat, realization)
return
def update_colnames(self, truth_cat, inj_cat):
# TODO: Fix all columns affected by reddening! (probably different than ngmix/MEDS)
pass
class COSMOSInjectionCatalog(BalInjectionCatalog):
def setup_chip_config(self, config, bal_config, chip, chip_indx):
# Set the bandpass
bal_config[chip_indx]['stamp'].update({
'type' : 'COSMOSChromatic',
'bandpass' : config.filters[chip.band].band_config
})
return
# TODO: This should be filled with relevant construction info from Alex DW's udg_catalog class
class UDGInjectionCatalog(BalInjectionCatalog):
pass
# TODO: Is there a better place to put this function? It interfaces between `balobject.py`
# and `balinput.py`
class BalInjectionCatalogs(object):
'''
This class exists to store all of the individual injection catalogs in a single
framework as some position sampling requires mixed sampling where each type needs
to be aware of the others.
'''
# TODO: Current state; make sure that `mixed_grid` is reset for a new realization!
def __init__(self, config):
self.pos_sampling = dict(config.pos_sampling)
self._inj_cats = {}
self.mixed_sampling = {}
self.mixed_grid = None
self.input_types = config.input_types
self._determine_mixed_inputs()
return
def _determine_mixed_inputs(self):
# NOTE: For now, only 1 mixed type is allowed!
bg = grid.BaseGrid()
for input_type, inpt in self.input_types.items():
if self.pos_sampling[input_type]['type'] in bg._valid_mixed_types:
self.mixed_sampling[input_type] = True
else:
self.mixed_sampling[input_type] = False
return
def generate_catalogs(self, config, tile, realization):
for input_type, inpt in self.input_types.items():
input_type = inpt.input_type
mixed = self.mixed_sampling[input_type]
inj_cat, mixed_grid = inpt.generate_inj_catalog(config,
tile,
realization,
mixed=mixed,
mixed_grid=self.mixed_grid)
self._inj_cats[input_type] = inj_cat
self.mixed_grid = mixed_grid
return
def __getitem__(self, name):
if name in self._inj_cats:
return self._inj_cats.get(name)
else:
raise AttributeError("No such attribute: " + name)
def __setitem__(self, name, value):
self._inj_cats[name] = value
def __delattr__(self, name):
if name in self:
del self[name]
else:
raise AttributeError("No such attribute: " + name)
def __delattr__(self, name):
if name in self._inj_cats:
self._inj_cats.pop()
else:
raise AttributeError("No such attribute: " + name)
class Galaxy(object):
'''
# TODO: Do we need a galaxy class? (probably not)
'''
def __init__(self):
pass
class Star(object):
'''
# TODO: Do we need a star class? (probably not)
'''
def __init__(self):
pass
def build_bal_inject_cat(input_type, inj_type, sub_type, tile, needs_band, mixed=False):
if input_type in BALROG_INJECTION_TYPES:
# User-defined injection catalog construction
inject_cat = BALROG_INJECTION_TYPES[input_type](input_type,
inj_type,
sub_type,
tile,
needs_band,
mixed)
else:
# Generic injection catalog construction
if input_type not in gsinput.valid_input_types:
raise ValueError('{} is not a native GalSim input type '.format(input_type) +
'or a recognized Balrog input type. Make sure you have written '
'and registered a valid GalSim input type')
inject_cat = BalInjectionCatalog(input_type, inj_type, tile, needs_band, mixed)
return inject_cat
BALROG_INJECTION_TYPES = {
'ngmix_catalog' : NGMIXInjectionCatalog,
'meds_catalog' : MEDSInjectionCatalog,
'udg_catalog' : UDGInjectionCatalog,
'des_star_catalog' : DESStarInjectionCatalog,
'cosmos_chromatic_catalog' : COSMOSInjectionCatalog
}
|
|
# -*- coding: utf-8 -*-
#
# wxcast: A Python API and cli to collect weather information.
#
# Copyright (c) 2021 Sean Marlow
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import click
from collections import OrderedDict
from wxcast import api
from wxcast import utils
def print_license(ctx, param, value):
"""
Eager option to print license information and exit.
"""
if not value or ctx.resilient_parsing:
return
click.echo(
'wxcast Copyright (C) 2021 Sean Marlow. (MIT License)\n\n'
'See LICENSE for more information.'
)
ctx.exit()
@click.group()
@click.version_option()
@click.option(
'--license',
expose_value=False,
is_eager=True,
is_flag=True,
callback=print_license,
help='Display license information and exit.'
)
def main():
"""
Retrieve the latest weather information in your terminal.
Data provided by NWS and AVWX.
NWS: https://forecast-v3.weather.gov/documentation \n
AVWX: https://avwx.rest/
"""
pass
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.argument('location')
def forecast(no_color, location):
"""
Retrieve current 7 day forecast for given location.
Location can be a city, address or zip/postal code.
Examples:
wxcast forecast denver
wxcast forecast "denver, co"
:param location: Location string to get forecast for.
:param no_color: If True do not style string output.
"""
try:
response = api.get_seven_day_forecast(location)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
data = OrderedDict(
(d['name'], d['detailedForecast']) for d in response
)
utils.echo_dict(data, no_color)
@click.command()
@click.option(
'-d', '--decoded',
is_flag=True,
help='Decode raw metar to string format.'
)
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.option(
'-t',
'--temp-unit',
default='C',
type=click.Choice(['C', 'F']),
help='Unit of measurement for temperature values. '
'Default: (C).'
)
@click.argument('icao')
def metar(decoded, no_color, temp_unit, icao):
"""
Retrieve the latest METAR given an airport ICAO code.
Example: wxcast metar -d KSLC
:param decoded: Flag to decode the METAR output.
:param no_color: If True do not style string output.
:param icao: The airport ICAO code to retrieve METAR for.
"""
try:
response = api.get_metar(icao, temp_unit, decoded)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
if decoded:
click.echo(
''.join([
utils.style_string(
'At ', no_color, fg='green'
),
utils.style_string(
response['time'], no_color, fg='blue'
),
utils.style_string(
' the conditions are:', no_color, fg='green'
),
'\n'
])
)
spaces = utils.get_max_key(response)
try:
# Try to convert elevation to ft and meters.
response['elevation'] = '{}ft ({}m)'.format(
int(float(response['elevation']) * 3.28084),
response['elevation']
)
except (KeyError, Exception):
pass
utils.echo_dict(response, no_color, spaces=spaces)
else:
utils.echo_style(response, no_color, fg='blue')
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.option(
'-t',
'--temp-unit',
default='C',
type=click.Choice(['C', 'F']),
help='Unit of measurement for temperature values. '
'Default: (C).'
)
@click.argument('station_id')
def conditions(no_color, temp_unit, station_id):
"""
Retrieve the latest conditions given a weather station id.
Example: wxcast conditions KDTW
:param no_color: If True do not style string output.
:param station_id: The weather station id to retrieve conditions for.
"""
try:
response = api.get_metar(station_id, temp_unit, decoded=True)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
response.pop('station', None)
response.pop('type', None)
response.pop('station', None)
response.pop('sea level pressure', None)
response.pop('remarks', None)
response.pop('elevation', None)
utils.echo_dict(response, no_color)
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
def offices(no_color):
"""
Retrieve the available weather forecast offices (WFO).
Example: wxcast offices
:param no_color: If True do not style string output.
"""
try:
response = api.get_wfo_list()
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
utils.echo_dict(response, no_color)
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.argument('wfo')
def products(no_color, wfo):
"""
Retrieve the available text products for a given wfo.
Example: wxcast products slc
:param no_color: If True do not style string output.
:param wfo: The weather forecast office abbreviation (BOU).
"""
try:
response = api.get_wfo_products(wfo)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
utils.echo_dict(response, no_color)
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.argument('wfo')
@click.argument('product')
def text(no_color, wfo, product):
"""
Retrieve the NWS text product.
Example: wxcast text slc afd
:param no_color: If True do not style string output.
:param wfo: The weather forecast office abbreviation (BOU).
:param product: The text product to retrieve.
"""
try:
response = api.get_nws_product(wfo, product)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
click.echo_via_pager(response)
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.argument('wfo')
def office(no_color, wfo):
"""
Retrieve information for a given wfo.
Example: wxcast info slc
:param no_color: If True do not style string output.
:param wfo: The weather forecast office abbreviation (BOU).
"""
try:
response = api.get_wfo_info(wfo)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
utils.echo_dict(response, no_color)
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.argument('wfo')
def stations(no_color, wfo):
"""
Retrieve a list of stations for a given wfo.
Example: wxcast info slc
:param no_color: If True do not style string output.
:param wfo: The weather forecast office abbreviation (BOU).
"""
try:
response = api.get_stations_for_wfo(wfo)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
utils.echo_style('\n'.join(response), no_color)
@click.command()
@click.option(
'--no-color',
is_flag=True,
help='Remove ANSI color and styling from output.'
)
@click.argument('station_id')
def station(no_color, station_id):
"""
Retrieve info for a weather station.
Example: wxcast station kbna
:param no_color: If True do not style string output.
:param station_id: The weather station id.
"""
try:
response = api.get_station_info(station_id)
except Exception as e:
utils.echo_style(str(e), no_color, fg='red')
else:
try:
# Try to convert elevation to ft and meters.
response['elevation'] = '{}ft ({}m)'.format(
int(float(response['elevation']) * 3.28084),
response['elevation']
)
except (KeyError, Exception):
pass
utils.echo_dict(response, no_color)
main.add_command(metar)
main.add_command(text)
main.add_command(offices)
main.add_command(products)
main.add_command(forecast)
main.add_command(office)
main.add_command(stations)
main.add_command(station)
main.add_command(conditions)
|
|
import getpass
import sys
import shelve
import time
import logging
import os
from urlparse import urljoin
import json
import requests
import Tkinter as tk
import tkFileDialog as tfile
from watchdog.observers import Observer
from watchdog.events import LoggingEventHandler
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from library import Library, get_or_create
from importer import Importer
from uploader import Uploader
from settings import *
# TODO: Figure out how to create database first time and migrations
engine = create_engine('sqlite:///tinsparrow.db', echo=False)
class TinWatch(object):
def __init__(self):
# TODO: Convert to PySide GUI Application
# Currently using tkinter because it's easy
self.session = self.create_db_session()
def create_db_session(self):
Session = sessionmaker()
Session.configure(bind=engine)
return Session()
def main(self):
root = tk.Tk()
app = LoginWindow(root)
root.mainloop()
#
# library = session.query(Library).filter_by(path=LIBRARY_PATH).first()
# if not library:
# library = Library()
# library.path = LIBRARY_PATH
# session.add(library)
#
# session.commit()
#
# importer = Importer()
# importer.find_media(session, library)
#
# session.commit()
#
# uploader = Uploader(token)
# uploader.sync(session)
#
# session.commit()
# logging.basicConfig(level=logging.INFO,
# format='%(asctime)s - %(message)s',
# datefmt='%Y-%m-%d $H:%M:%S')
# path = sys.argv[1] if len(sys.argv) > 1 else '.'
# event_handler = LoggingEventHandler()
# observer = Observer()
# observer.schedule(event_handler, path, recursive=True)
# observer.start()
# try:
# while True:
# time.sleep(1)
# except KeyboardInterrupt:
# observer.stop()
# observer.join()
class SettingsWindow(tk.Tk):
def __init__(self, root):
root.title('Settings')
root.geometry('600x320')
frame = tk.Frame(root, padx=10, pady=10)
frame.pack(fill=tk.BOTH, expand=True)
self.root = root
self.parent = frame
self.initialize()
def initialize(self):
self.file_label = tk.Label(self.parent, text="Set a directory to scan.")
self.file_label.pack()
self.file_button = tk.Button(self.parent, text="Add Music", command=self.ask)
self.file_button.pack()
self.listbox = tk.Listbox(self.parent)
self.listbox.pack()
self.save_button = tk.Button(self.parent, text="Save", command=self.save)
self.save_button.pack()
self.refresh_listbox()
def ask(self):
music_path = tfile.askdirectory(parent=self.parent)
self.add_music_directory(music_path)
def add_music_directory(self, music_path=None):
if not os.path.isdir(music_path):
return
library = get_or_create(self.session, Library, path=music_path)
self.session.commit()
self.refresh_listbox()
def refresh_listbox(self):
self.listbox.delete(0, tk.END)
self.libraries = tw.session.query(Library).all()
for library in self.libraries:
self.listbox.insert(tk.END, library.path)
def save(self):
self.parent.destroy()
MonitorWindow(self.root)
class MonitorWindow(tk.Tk):
def __init__(self, root):
root.title('Monitor')
root.geometry('600x320')
frame = tk.Frame(root, padx=10, pady=10)
frame.pack(fill=tk.BOTH, expand=True)
self.root = root
self.parent = frame
self.initialize()
def initialize(self):
self.settings_button = tk.Button(self.parent, text="Settings", command=self.open_settings)
self.settings_button.pack()
def open_settings(self):
self.parent.destroy()
SettingsWindow(self.root)
class LoginWindow(tk.Tk):
def __init__(self, root):
root.title('Login')
root.geometry('300x160')
frame = tk.Frame(root, padx=10, pady=10)
frame.pack(fill=tk.BOTH, expand=True)
self.parent = frame
self.root = root
self.user_string = tk.StringVar()
self.password_string = tk.StringVar()
self.app_config = shelve.open(CONFIG)
self.initialize()
def initialize(self):
if self.check_token(self.app_config.get('token', False)):
self.open_monitor()
else:
self.user_label = tk.Label(self.parent, text="Username: ")
self.user_label.pack()
self.user_entry = tk.Entry(self.parent, textvariable=self.user_string)
self.user_entry.pack()
self.password_label = tk.Label(self.parent, text="Password: ")
self.password_label.pack()
self.password_entry = tk.Entry(self.parent, show="*", textvariable=self.password_string)
self.password_entry.pack()
self.login_button = tk.Button(self.parent, borderwidth=4, text="Login", width=10, pady=8, command=self.act)
self.login_button.pack(side=tk.BOTTOM)
self.password_entry.bind('<Return>', self.enter)
self.user_entry.focus_set()
def act(self):
token = self.authenticate(self.user_string.get(), self.password_string.get())
if token:
self.app_config['token'] = token
self.open_settings()
else:
self.root.title('Login Failure: Try again...')
def open_settings(self):
self.app_config.close()
self.parent.destroy()
SettingsWindow(self.root)
def open_monitor(self):
self.app_config.close()
self.parent.destroy()
MonitorWindow(self.root)
def enter(self, event):
self.act()
def authenticate(self, username, password):
url = urljoin(API_URL, 'token-auth/')
auth_r = requests.post(url, data={'username': username, 'password': password})
if auth_r.status_code == 200:
token_json = json.loads(auth_r.content)
token = token_json.get('token', None)
if token:
return token
else:
return False
def check_token(self, token):
# TODO: This can be used to check internet connectivity?
url = API_URL
headers = {'Authorization': 'Token {}'.format(token)}
auth_r = requests.get(url, data={}, headers=headers)
if auth_r.status_code == 200:
return True
else:
return False
if __name__ == "__main__":
tw = TinWatch()
tw.main()
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 - 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Shared code between AMQP based openstack.common.rpc implementations.
The code in this module is shared between the rpc implemenations based on AMQP.
Specifically, this includes impl_kombu and impl_qpid. impl_carrot also uses
AMQP, but is deprecated and predates this code.
"""
import collections
import inspect
import sys
import uuid
from eventlet import greenpool
from eventlet import pools
from eventlet import queue
from eventlet import semaphore
from oslo.config import cfg
from keystone.openstack.common import excutils
from keystone.openstack.common.gettextutils import _ # noqa
from keystone.openstack.common import local
from keystone.openstack.common import log as logging
from keystone.openstack.common.rpc import common as rpc_common
amqp_opts = [
cfg.BoolOpt('amqp_durable_queues',
default=False,
deprecated_name='rabbit_durable_queues',
deprecated_group='DEFAULT',
help='Use durable queues in amqp.'),
cfg.BoolOpt('amqp_auto_delete',
default=False,
help='Auto-delete queues in amqp.'),
]
cfg.CONF.register_opts(amqp_opts)
UNIQUE_ID = '_unique_id'
LOG = logging.getLogger(__name__)
class Pool(pools.Pool):
"""Class that implements a Pool of Connections."""
def __init__(self, conf, connection_cls, *args, **kwargs):
self.connection_cls = connection_cls
self.conf = conf
kwargs.setdefault("max_size", self.conf.rpc_conn_pool_size)
kwargs.setdefault("order_as_stack", True)
super(Pool, self).__init__(*args, **kwargs)
self.reply_proxy = None
# TODO(comstud): Timeout connections not used in a while
def create(self):
LOG.debug(_('Pool creating new connection'))
return self.connection_cls(self.conf)
def empty(self):
while self.free_items:
self.get().close()
# Force a new connection pool to be created.
# Note that this was added due to failing unit test cases. The issue
# is the above "while loop" gets all the cached connections from the
# pool and closes them, but never returns them to the pool, a pool
# leak. The unit tests hang waiting for an item to be returned to the
# pool. The unit tests get here via the tearDown() method. In the run
# time code, it gets here via cleanup() and only appears in service.py
# just before doing a sys.exit(), so cleanup() only happens once and
# the leakage is not a problem.
self.connection_cls.pool = None
_pool_create_sem = semaphore.Semaphore()
def get_connection_pool(conf, connection_cls):
with _pool_create_sem:
# Make sure only one thread tries to create the connection pool.
if not connection_cls.pool:
connection_cls.pool = Pool(conf, connection_cls)
return connection_cls.pool
class ConnectionContext(rpc_common.Connection):
"""The class that is actually returned to the create_connection() caller.
This is essentially a wrapper around Connection that supports 'with'.
It can also return a new Connection, or one from a pool.
The function will also catch when an instance of this class is to be
deleted. With that we can return Connections to the pool on exceptions
and so forth without making the caller be responsible for catching them.
If possible the function makes sure to return a connection to the pool.
"""
def __init__(self, conf, connection_pool, pooled=True, server_params=None):
"""Create a new connection, or get one from the pool."""
self.connection = None
self.conf = conf
self.connection_pool = connection_pool
if pooled:
self.connection = connection_pool.get()
else:
self.connection = connection_pool.connection_cls(
conf,
server_params=server_params)
self.pooled = pooled
def __enter__(self):
"""When with ConnectionContext() is used, return self."""
return self
def _done(self):
"""If the connection came from a pool, clean it up and put it back.
If it did not come from a pool, close it.
"""
if self.connection:
if self.pooled:
# Reset the connection so it's ready for the next caller
# to grab from the pool
self.connection.reset()
self.connection_pool.put(self.connection)
else:
try:
self.connection.close()
except Exception:
pass
self.connection = None
def __exit__(self, exc_type, exc_value, tb):
"""End of 'with' statement. We're done here."""
self._done()
def __del__(self):
"""Caller is done with this connection. Make sure we cleaned up."""
self._done()
def close(self):
"""Caller is done with this connection."""
self._done()
def create_consumer(self, topic, proxy, fanout=False):
self.connection.create_consumer(topic, proxy, fanout)
def create_worker(self, topic, proxy, pool_name):
self.connection.create_worker(topic, proxy, pool_name)
def join_consumer_pool(self, callback, pool_name, topic, exchange_name,
ack_on_error=True):
self.connection.join_consumer_pool(callback,
pool_name,
topic,
exchange_name,
ack_on_error)
def consume_in_thread(self):
self.connection.consume_in_thread()
def __getattr__(self, key):
"""Proxy all other calls to the Connection instance."""
if self.connection:
return getattr(self.connection, key)
else:
raise rpc_common.InvalidRPCConnectionReuse()
class ReplyProxy(ConnectionContext):
"""Connection class for RPC replies / callbacks."""
def __init__(self, conf, connection_pool):
self._call_waiters = {}
self._num_call_waiters = 0
self._num_call_waiters_wrn_threshhold = 10
self._reply_q = 'reply_' + uuid.uuid4().hex
super(ReplyProxy, self).__init__(conf, connection_pool, pooled=False)
self.declare_direct_consumer(self._reply_q, self._process_data)
self.consume_in_thread()
def _process_data(self, message_data):
msg_id = message_data.pop('_msg_id', None)
waiter = self._call_waiters.get(msg_id)
if not waiter:
LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s'
', message : %(data)s'), {'msg_id': msg_id,
'data': message_data})
LOG.warn(_('_call_waiters: %s') % str(self._call_waiters))
else:
waiter.put(message_data)
def add_call_waiter(self, waiter, msg_id):
self._num_call_waiters += 1
if self._num_call_waiters > self._num_call_waiters_wrn_threshhold:
LOG.warn(_('Number of call waiters is greater than warning '
'threshhold: %d. There could be a MulticallProxyWaiter '
'leak.') % self._num_call_waiters_wrn_threshhold)
self._num_call_waiters_wrn_threshhold *= 2
self._call_waiters[msg_id] = waiter
def del_call_waiter(self, msg_id):
self._num_call_waiters -= 1
del self._call_waiters[msg_id]
def get_reply_q(self):
return self._reply_q
def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
failure=None, ending=False, log_failure=True):
"""Sends a reply or an error on the channel signified by msg_id.
Failure should be a sys.exc_info() tuple.
"""
with ConnectionContext(conf, connection_pool) as conn:
if failure:
failure = rpc_common.serialize_remote_exception(failure,
log_failure)
msg = {'result': reply, 'failure': failure}
if ending:
msg['ending'] = True
_add_unique_id(msg)
# If a reply_q exists, add the msg_id to the reply and pass the
# reply_q to direct_send() to use it as the response queue.
# Otherwise use the msg_id for backward compatibilty.
if reply_q:
msg['_msg_id'] = msg_id
conn.direct_send(reply_q, rpc_common.serialize_msg(msg))
else:
conn.direct_send(msg_id, rpc_common.serialize_msg(msg))
class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call."""
def __init__(self, **kwargs):
self.msg_id = kwargs.pop('msg_id', None)
self.reply_q = kwargs.pop('reply_q', None)
self.conf = kwargs.pop('conf')
super(RpcContext, self).__init__(**kwargs)
def deepcopy(self):
values = self.to_dict()
values['conf'] = self.conf
values['msg_id'] = self.msg_id
values['reply_q'] = self.reply_q
return self.__class__(**values)
def reply(self, reply=None, failure=None, ending=False,
connection_pool=None, log_failure=True):
if self.msg_id:
msg_reply(self.conf, self.msg_id, self.reply_q, connection_pool,
reply, failure, ending, log_failure)
if ending:
self.msg_id = None
def unpack_context(conf, msg):
"""Unpack context from msg."""
context_dict = {}
for key in list(msg.keys()):
# NOTE(vish): Some versions of python don't like unicode keys
# in kwargs.
key = str(key)
if key.startswith('_context_'):
value = msg.pop(key)
context_dict[key[9:]] = value
context_dict['msg_id'] = msg.pop('_msg_id', None)
context_dict['reply_q'] = msg.pop('_reply_q', None)
context_dict['conf'] = conf
ctx = RpcContext.from_dict(context_dict)
rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict())
return ctx
def pack_context(msg, context):
"""Pack context into msg.
Values for message keys need to be less than 255 chars, so we pull
context out into a bunch of separate keys. If we want to support
more arguments in rabbit messages, we may want to do the same
for args at some point.
"""
if isinstance(context, dict):
context_d = dict([('_context_%s' % key, value)
for (key, value) in context.iteritems()])
else:
context_d = dict([('_context_%s' % key, value)
for (key, value) in context.to_dict().iteritems()])
msg.update(context_d)
class _MsgIdCache(object):
"""This class checks any duplicate messages."""
# NOTE: This value is considered can be a configuration item, but
# it is not necessary to change its value in most cases,
# so let this value as static for now.
DUP_MSG_CHECK_SIZE = 16
def __init__(self, **kwargs):
self.prev_msgids = collections.deque([],
maxlen=self.DUP_MSG_CHECK_SIZE)
def check_duplicate_message(self, message_data):
"""AMQP consumers may read same message twice when exceptions occur
before ack is returned. This method prevents doing it.
"""
if UNIQUE_ID in message_data:
msg_id = message_data[UNIQUE_ID]
if msg_id not in self.prev_msgids:
self.prev_msgids.append(msg_id)
else:
raise rpc_common.DuplicateMessageError(msg_id=msg_id)
def _add_unique_id(msg):
"""Add unique_id for checking duplicate messages."""
unique_id = uuid.uuid4().hex
msg.update({UNIQUE_ID: unique_id})
LOG.debug(_('UNIQUE_ID is %s.') % (unique_id))
class _ThreadPoolWithWait(object):
"""Base class for a delayed invocation manager.
Used by the Connection class to start up green threads
to handle incoming messages.
"""
def __init__(self, conf, connection_pool):
self.pool = greenpool.GreenPool(conf.rpc_thread_pool_size)
self.connection_pool = connection_pool
self.conf = conf
def wait(self):
"""Wait for all callback threads to exit."""
self.pool.waitall()
class CallbackWrapper(_ThreadPoolWithWait):
"""Wraps a straight callback.
Allows it to be invoked in a green thread.
"""
def __init__(self, conf, callback, connection_pool,
wait_for_consumers=False):
"""Initiates CallbackWrapper object.
:param conf: cfg.CONF instance
:param callback: a callable (probably a function)
:param connection_pool: connection pool as returned by
get_connection_pool()
:param wait_for_consumers: wait for all green threads to
complete and raise the last
caught exception, if any.
"""
super(CallbackWrapper, self).__init__(
conf=conf,
connection_pool=connection_pool,
)
self.callback = callback
self.wait_for_consumers = wait_for_consumers
self.exc_info = None
def _wrap(self, message_data, **kwargs):
"""Wrap the callback invocation to catch exceptions.
"""
try:
self.callback(message_data, **kwargs)
except Exception:
self.exc_info = sys.exc_info()
def __call__(self, message_data):
self.exc_info = None
self.pool.spawn_n(self._wrap, message_data)
if self.wait_for_consumers:
self.pool.waitall()
if self.exc_info:
raise self.exc_info[1], None, self.exc_info[2]
class ProxyCallback(_ThreadPoolWithWait):
"""Calls methods on a proxy object based on method and args."""
def __init__(self, conf, proxy, connection_pool):
super(ProxyCallback, self).__init__(
conf=conf,
connection_pool=connection_pool,
)
self.proxy = proxy
self.msg_id_cache = _MsgIdCache()
def __call__(self, message_data):
"""Consumer callback to call a method on a proxy object.
Parses the message for validity and fires off a thread to call the
proxy object method.
Message data should be a dictionary with two keys:
method: string representing the method to call
args: dictionary of arg: value
Example: {'method': 'echo', 'args': {'value': 42}}
"""
# It is important to clear the context here, because at this point
# the previous context is stored in local.store.context
if hasattr(local.store, 'context'):
del local.store.context
rpc_common._safe_log(LOG.debug, _('received %s'), message_data)
self.msg_id_cache.check_duplicate_message(message_data)
ctxt = unpack_context(self.conf, message_data)
method = message_data.get('method')
args = message_data.get('args', {})
version = message_data.get('version')
namespace = message_data.get('namespace')
if not method:
LOG.warn(_('no method for message: %s') % message_data)
ctxt.reply(_('No method for message: %s') % message_data,
connection_pool=self.connection_pool)
return
self.pool.spawn_n(self._process_data, ctxt, version, method,
namespace, args)
def _process_data(self, ctxt, version, method, namespace, args):
"""Process a message in a new thread.
If the proxy object we have has a dispatch method
(see rpc.dispatcher.RpcDispatcher), pass it the version,
method, and args and let it dispatch as appropriate. If not, use
the old behavior of magically calling the specified method on the
proxy we have here.
"""
ctxt.update_store()
try:
rval = self.proxy.dispatch(ctxt, version, method, namespace,
**args)
# Check if the result was a generator
if inspect.isgenerator(rval):
for x in rval:
ctxt.reply(x, None, connection_pool=self.connection_pool)
else:
ctxt.reply(rval, None, connection_pool=self.connection_pool)
# This final None tells multicall that it is done.
ctxt.reply(ending=True, connection_pool=self.connection_pool)
except rpc_common.ClientException as e:
LOG.debug(_('Expected exception during message handling (%s)') %
e._exc_info[1])
ctxt.reply(None, e._exc_info,
connection_pool=self.connection_pool,
log_failure=False)
except Exception:
# sys.exc_info() is deleted by LOG.exception().
exc_info = sys.exc_info()
LOG.error(_('Exception during message handling'),
exc_info=exc_info)
ctxt.reply(None, exc_info, connection_pool=self.connection_pool)
class MulticallProxyWaiter(object):
def __init__(self, conf, msg_id, timeout, connection_pool):
self._msg_id = msg_id
self._timeout = timeout or conf.rpc_response_timeout
self._reply_proxy = connection_pool.reply_proxy
self._done = False
self._got_ending = False
self._conf = conf
self._dataqueue = queue.LightQueue()
# Add this caller to the reply proxy's call_waiters
self._reply_proxy.add_call_waiter(self, self._msg_id)
self.msg_id_cache = _MsgIdCache()
def put(self, data):
self._dataqueue.put(data)
def done(self):
if self._done:
return
self._done = True
# Remove this caller from reply proxy's call_waiters
self._reply_proxy.del_call_waiter(self._msg_id)
def _process_data(self, data):
result = None
self.msg_id_cache.check_duplicate_message(data)
if data['failure']:
failure = data['failure']
result = rpc_common.deserialize_remote_exception(self._conf,
failure)
elif data.get('ending', False):
self._got_ending = True
else:
result = data['result']
return result
def __iter__(self):
"""Return a result until we get a reply with an 'ending' flag."""
if self._done:
raise StopIteration
while True:
try:
data = self._dataqueue.get(timeout=self._timeout)
result = self._process_data(data)
except queue.Empty:
self.done()
raise rpc_common.Timeout()
except Exception:
with excutils.save_and_reraise_exception():
self.done()
if self._got_ending:
self.done()
raise StopIteration
if isinstance(result, Exception):
self.done()
raise result
yield result
def create_connection(conf, new, connection_pool):
"""Create a connection."""
return ConnectionContext(conf, connection_pool, pooled=not new)
_reply_proxy_create_sem = semaphore.Semaphore()
def multicall(conf, context, topic, msg, timeout, connection_pool):
"""Make a call that returns multiple times."""
LOG.debug(_('Making synchronous call on %s ...'), topic)
msg_id = uuid.uuid4().hex
msg.update({'_msg_id': msg_id})
LOG.debug(_('MSG_ID is %s') % (msg_id))
_add_unique_id(msg)
pack_context(msg, context)
with _reply_proxy_create_sem:
if not connection_pool.reply_proxy:
connection_pool.reply_proxy = ReplyProxy(conf, connection_pool)
msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()})
wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool)
with ConnectionContext(conf, connection_pool) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
return wait_msg
def call(conf, context, topic, msg, timeout, connection_pool):
"""Sends a message on a topic and wait for a response."""
rv = multicall(conf, context, topic, msg, timeout, connection_pool)
# NOTE(vish): return the last result from the multicall
rv = list(rv)
if not rv:
return
return rv[-1]
def cast(conf, context, topic, msg, connection_pool):
"""Sends a message on a topic without waiting for a response."""
LOG.debug(_('Making asynchronous cast on %s...'), topic)
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg))
def fanout_cast(conf, context, topic, msg, connection_pool):
"""Sends a message on a fanout exchange without waiting for a response."""
LOG.debug(_('Making asynchronous fanout cast...'))
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
conn.fanout_send(topic, rpc_common.serialize_msg(msg))
def cast_to_server(conf, context, server_params, topic, msg, connection_pool):
"""Sends a message on a topic to a specific server."""
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool, pooled=False,
server_params=server_params) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg))
def fanout_cast_to_server(conf, context, server_params, topic, msg,
connection_pool):
"""Sends a message on a fanout exchange to a specific server."""
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool, pooled=False,
server_params=server_params) as conn:
conn.fanout_send(topic, rpc_common.serialize_msg(msg))
def notify(conf, context, topic, msg, connection_pool, envelope):
"""Sends a notification event on a topic."""
LOG.debug(_('Sending %(event_type)s on %(topic)s'),
dict(event_type=msg.get('event_type'),
topic=topic))
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
if envelope:
msg = rpc_common.serialize_msg(msg)
conn.notify_send(topic, msg)
def cleanup(connection_pool):
if connection_pool:
connection_pool.empty()
def get_control_exchange(conf):
return conf.control_exchange
|
|
# Copyright 2015 Jarrod N. Bakker
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Ryu and OpenFlow modules
from ryu.lib import hub
# Modules
import datetime as dt
import logging
__author__ = "Jarrod N. Bakker"
__status__ = "Development"
class FlowScheduler:
"""Class responsible for scheduling ACL rules to be deployed to
switches.
"""
_TIME_PAUSE = 1 # Pause time for rescheduling in seconds
def __init__(self, logging_config, api, flow_man):
"""Initialise the FlowScheduler object.
:param logging_config: Logging configuration dict.
:param api: ACLSwitch API object.
:param flow_man: FlowManager object.
"""
self._logging = logging.getLogger(__name__)
self._logging.setLevel(logging_config["min_lvl"])
self._logging.propagate = logging_config["propagate"]
self._logging.addHandler(logging_config["handler"])
self._logging.info("Initialising FlowScheduler...")
self._api = api
self._flow_man = flow_man
# The rule time queue is a list of times in 24 hour format to
# a list of rule IDs.
self._rule_time_queue = []
self._rule_deploy_gthread = None
def sched_add_rule(self, rule_id, time_enforce):
"""Insert a rule into the time queue for scheduling.
:param rule_id: ID of the rule to schedule.
:param time_enforce: A tuple of deployment time (24hr format)
and enforcement duration (in seconds). Assumes that a rule can
only be scheduled for one deployment time. Multiple times will
be supported later.
:return: True if successful, False otherwise.
"""
if len(self._rule_time_queue) < 1:
# Queue is empty so just insert the rule at the head
self._rule_time_queue.append([rule_id])
# Start a green thread to distribute time-based rules
self._rule_deploy_gthread = hub.spawn(
self._rule_deploy_alarm)
return True
# The queue is not empty so proceed...
queue_head_id = self._rule_time_queue[0][0]
queue_head_rule = self._api.acl_get_rule(queue_head_id)
queue_head_time = dt.datetime.strptime(
queue_head_rule.time_enforce[0], "%H:%M")
rule_time = dt.datetime.strptime(time_enforce[0], "%H:%M")
# Get the current time and normalise it
cur_time = dt.datetime.strptime(
dt.datetime.now().strftime("%H:%M"),
"%H:%M")
# Check if the queue head needs to be pre-empted
if ((cur_time < queue_head_time and cur_time < rule_time <
queue_head_time) or (queue_head_time < cur_time <
rule_time) or (rule_time < queue_head_time < cur_time
and rule_time < cur_time)):
self._rule_time_queue.insert(0, [rule_id])
hub.kill(self._rule_deploy_gthread)
self._rule_deploy_gthread = hub.spawn(
self._rule_deploy_alarm)
return True
# The rule needs to be inserted elsewhere in the queue
len_queue = len(self._rule_time_queue)
new_rule_time_store = rule_time
for i in range(len_queue):
# Reset any changes made by timedelta
rule_time = new_rule_time_store
rule_i = self._api.acl_get_rule(self._rule_time_queue[i][0])
rule_i_time = dt.datetime.strptime(rule_i.time_enforce[0],
"%H:%M")
if rule_time == rule_i_time:
self._rule_time_queue[i].append(rule_id)
break
if i == (len_queue - 1):
# We have reached the end of the queue
self._rule_time_queue.append([rule_id])
break
if rule_time < cur_time and rule_i_time > rule_time:
# The new rule has a 'smaller' time value than the
# current time but its time for scheduling has already
# passed. This means that the rule should be scheduled
# for tomorrow. To correct the comparisons we'll add a
# day onto the datetime value.
rule_time = rule_time + dt.timedelta(1)
if i == 0 and rule_time < rule_i_time:
self._rule_time_queue.insert(0, [rule_id])
break
rule_j = self._api.acl_get_rule(self._rule_time_queue[
i+1][0])
rule_j_time = dt.datetime.strptime(rule_j.time_enforce[0],
"%H:%M")
if rule_j_time < rule_i_time:
# rule_j_time may be smaller than rule_i_time but it
# may be scheduled for tomorrow.
rule_j_time = rule_j_time + dt.timedelta(1)
if rule_i_time < rule_time < rule_j_time:
self._rule_time_queue.insert(i + 1, [rule_id])
break
return True
def sched_remove_rule(self, rule_id):
"""Remove a rule from the time queue.
:param rule_id: ID of the rule to remove from the queue.
:return: True if successful, False otherwise.
"""
# The first iteration is through elements in head of the queue.
queue_head = True
for time_period in self._rule_time_queue:
for item in time_period:
if item == rule_id:
time_period.remove(rule_id)
# time_period should be removed if rule_id was the
# only one scheduled at the time.
if len(time_period) < 1:
self._rule_time_queue.remove(time_period)
if queue_head:
# If the rule was at the head of the queue
# then we need to respawn the green thread.
hub.kill(self._rule_deploy_gthread)
self._rule_deploy_gthread = hub.spawn(
self._rule_deploy_alarm)
return True
queue_head = False
def get_time_queue(self):
"""Return the queue of scheduled
:return: The time queue as a list of lists.
"""
queue_formatted = []
for time_period in self._rule_time_queue:
time_formatted = []
time = self._api.acl_get_rule(time_period[0]).time_enforce[0]
time_formatted.append(time)
time_formatted.extend(time_period)
queue_formatted.append(time_formatted)
return queue_formatted
def _rule_deploy_alarm(self):
"""Distribute rules to switches when their time arises.
An alarm is scheduled using green threads from Ryu's hub
module. The green thread is used to trigger this function to
distribute rules when needed.
The next alarm is scheduled once all other necessary operations
have been done.
"""
while True:
# Check that the queue is not empty
if len(self._rule_time_queue) < 1:
break
rule_id = self._rule_time_queue[0][0]
rule = self._api.acl_get_rule(rule_id)
time_start = rule.time_enforce[0]
# Normalise next_time
next_scheduled = dt.datetime.strptime(time_start, "%H:%M")
# The current time has to be normalised with the time in a
# rule (i.e. the date of each datetime object is the same)
# before a comparison can be made.
current_time = dt.datetime.now().strftime("%H:%M:%S")
normalised_current = dt.datetime.strptime(current_time,
"%H:%M:%S")
# Compare the two times relative to the current time
time_diff = (next_scheduled - normalised_current).seconds
# Schedule the alarm to wait time_diff seconds
self._logging.debug("Rule scheduler alarm waiting %s "
"seconds. Nxt_sch: %s\tnorm_cur: %s",
time_diff, next_scheduled,
normalised_current)
hub.sleep(time_diff)
# Check that the queue is not empty again
if len(self._rule_time_queue) < 1:
break
# Pop the list of rules to distribute from the head of the
# head of the queue and reinsert it at the tail.
to_dist = self._rule_time_queue.pop(0)
self._rule_time_queue.append(to_dist)
# Check that the current time matches the time of a rule at
# the top of the queue, if not then reschedule the alarm.
rule = self._api.acl_get_rule(to_dist[0])
time_start = rule.time_enforce[0]
if time_start != dt.datetime.now().strftime("%H:%M"):
continue
# Distribute the rules that need to be distributed now
for rule_id in to_dist:
rule = self._api.acl_get_rule(rule_id)
switches = self._api.policy_get_connected_switches(
rule.policy)
self._flow_man.flow_deploy_single_rule(rule, switches)
# Pause for moment to avoid flooding the switch with flow
# mod messages. This happens because time_diff will be
# evaluated again in the loop and it will be equal to 0
# until a second passes.
hub.sleep(self._TIME_PAUSE)
|
|
"""The tests for the heat control thermostat."""
import unittest
from blumate.bootstrap import _setup_component
from blumate.const import (
ATTR_UNIT_OF_MEASUREMENT,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
STATE_OFF,
TEMP_CELSIUS,
)
from blumate.components import thermostat
from tests.common import get_test_home_assistant
ENTITY = 'thermostat.test'
ENT_SENSOR = 'sensor.test'
ENT_SWITCH = 'switch.test'
MIN_TEMP = 3.0
MAX_TEMP = 65.0
TARGET_TEMP = 42.0
class TestSetupThermostatHeatControl(unittest.TestCase):
"""Test the Heat Control thermostat with custom config."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_missing_conf(self):
"""Test set up heat_control with missing config values."""
config = {
'name': 'test',
'target_sensor': ENT_SENSOR
}
self.assertFalse(_setup_component(self.hass, 'thermostat', {
'thermostat': config}))
def test_valid_conf(self):
"""Test set up heat_control with valid config values."""
self.assertTrue(_setup_component(self.hass, 'thermostat',
{'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR}}))
def test_setup_with_sensor(self):
"""Test set up heat_control with sensor to trigger update at init."""
self.hass.states.set(ENT_SENSOR, 22.0, {
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS
})
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR
}})
state = self.hass.states.get(ENTITY)
self.assertEqual(
TEMP_CELSIUS, state.attributes.get('unit_of_measurement'))
self.assertEqual(22.0, state.attributes.get('current_temperature'))
class TestThermostatHeatControl(unittest.TestCase):
"""Test the Heat Control thermostat."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config.temperature_unit = TEMP_CELSIUS
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR
}})
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_defaults_to_unknown(self):
"""Test the setting of defaults to unknown."""
self.assertEqual('unknown', self.hass.states.get(ENTITY).state)
def test_default_setup_params(self):
"""Test the setup with default parameters."""
state = self.hass.states.get(ENTITY)
self.assertEqual(7, state.attributes.get('min_temp'))
self.assertEqual(35, state.attributes.get('max_temp'))
self.assertEqual(None, state.attributes.get('temperature'))
def test_custom_setup_params(self):
"""Test the setup with custom parameters."""
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR,
'min_temp': MIN_TEMP,
'max_temp': MAX_TEMP,
'target_temp': TARGET_TEMP
}})
state = self.hass.states.get(ENTITY)
self.assertEqual(MIN_TEMP, state.attributes.get('min_temp'))
self.assertEqual(MAX_TEMP, state.attributes.get('max_temp'))
self.assertEqual(TARGET_TEMP, state.attributes.get('temperature'))
self.assertEqual(str(TARGET_TEMP), self.hass.states.get(ENTITY).state)
def test_set_target_temp(self):
"""Test the setting of the target temperature."""
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self.assertEqual('30.0', self.hass.states.get(ENTITY).state)
def test_sensor_bad_unit(self):
"""Test sensor that have bad unit."""
self._setup_sensor(22.0, unit='bad_unit')
self.hass.pool.block_till_done()
state = self.hass.states.get(ENTITY)
self.assertEqual(None, state.attributes.get('unit_of_measurement'))
self.assertEqual(None, state.attributes.get('current_temperature'))
def test_sensor_bad_value(self):
"""Test sensor that have None as state."""
self._setup_sensor(None)
self.hass.pool.block_till_done()
state = self.hass.states.get(ENTITY)
self.assertEqual(None, state.attributes.get('unit_of_measurement'))
self.assertEqual(None, state.attributes.get('current_temperature'))
def test_set_target_temp_heater_on(self):
"""Test if target temperature turn heater on."""
self._setup_switch(False)
self._setup_sensor(25)
self.hass.pool.block_till_done()
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_ON, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_set_target_temp_heater_off(self):
"""Test if target temperature turn heater off."""
self._setup_switch(True)
self._setup_sensor(30)
self.hass.pool.block_till_done()
thermostat.set_temperature(self.hass, 25)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_OFF, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_set_temp_change_heater_on(self):
"""Test if temperature change turn heater on."""
self._setup_switch(False)
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self._setup_sensor(25)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_ON, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_temp_change_heater_off(self):
"""Test if temperature change turn heater off."""
self._setup_switch(True)
thermostat.set_temperature(self.hass, 25)
self.hass.pool.block_till_done()
self._setup_sensor(30)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_OFF, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def _setup_sensor(self, temp, unit=TEMP_CELSIUS):
"""Setup the test sensor."""
self.hass.states.set(ENT_SENSOR, temp, {
ATTR_UNIT_OF_MEASUREMENT: unit
})
def _setup_switch(self, is_on):
"""Setup the test switch."""
self.hass.states.set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF)
self.calls = []
def log_call(call):
"""Log service calls."""
self.calls.append(call)
self.hass.services.register('switch', SERVICE_TURN_ON, log_call)
self.hass.services.register('switch', SERVICE_TURN_OFF, log_call)
|
|
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Implementations of interoperability test methods."""
from __future__ import print_function
import enum
import json
import os
import threading
import time
from oauth2client import client as oauth2client_client
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import face
from tests.interop import empty_pb2
from tests.interop import messages_pb2
from tests.interop import test_pb2
_TIMEOUT = 7
class TestService(test_pb2.BetaTestServiceServicer):
def EmptyCall(self, request, context):
return empty_pb2.Empty()
def UnaryCall(self, request, context):
return messages_pb2.SimpleResponse(
payload=messages_pb2.Payload(
type=messages_pb2.COMPRESSABLE,
body=b'\x00' * request.response_size))
def StreamingOutputCall(self, request, context):
for response_parameters in request.response_parameters:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.response_type,
body=b'\x00' * response_parameters.size))
def StreamingInputCall(self, request_iterator, context):
aggregate_size = 0
for request in request_iterator:
if request.payload and request.payload.body:
aggregate_size += len(request.payload.body)
return messages_pb2.StreamingInputCallResponse(
aggregated_payload_size=aggregate_size)
def FullDuplexCall(self, request_iterator, context):
for request in request_iterator:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.payload.type,
body=b'\x00' * request.response_parameters[0].size))
# NOTE(nathaniel): Apparently this is the same as the full-duplex call?
# NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
def HalfDuplexCall(self, request_iterator, context):
return self.FullDuplexCall(request_iterator, context)
def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope):
with stub:
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE, response_size=314159,
payload=messages_pb2.Payload(body=b'\x00' * 271828),
fill_username=fill_username, fill_oauth_scope=fill_oauth_scope)
response_future = stub.UnaryCall.future(request, _TIMEOUT)
response = response_future.result()
if response.payload.type is not messages_pb2.COMPRESSABLE:
raise ValueError(
'response payload type is "%s"!' % type(response.payload.type))
if len(response.payload.body) != 314159:
raise ValueError(
'response body of incorrect size %d!' % len(response.payload.body))
return response
def _empty_unary(stub):
with stub:
response = stub.EmptyCall(empty_pb2.Empty(), _TIMEOUT)
if not isinstance(response, empty_pb2.Empty):
raise TypeError(
'response is of type "%s", not empty_pb2.Empty!', type(response))
def _large_unary(stub):
_large_unary_common_behavior(stub, False, False)
def _client_streaming(stub):
with stub:
payload_body_sizes = (27182, 8, 1828, 45904)
payloads = (
messages_pb2.Payload(body=b'\x00' * size)
for size in payload_body_sizes)
requests = (
messages_pb2.StreamingInputCallRequest(payload=payload)
for payload in payloads)
response = stub.StreamingInputCall(requests, _TIMEOUT)
if response.aggregated_payload_size != 74922:
raise ValueError(
'incorrect size %d!' % response.aggregated_payload_size)
def _server_streaming(stub):
sizes = (31415, 9, 2653, 58979)
with stub:
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=sizes[0]),
messages_pb2.ResponseParameters(size=sizes[1]),
messages_pb2.ResponseParameters(size=sizes[2]),
messages_pb2.ResponseParameters(size=sizes[3]),
))
response_iterator = stub.StreamingOutputCall(request, _TIMEOUT)
for index, response in enumerate(response_iterator):
if response.payload.type != messages_pb2.COMPRESSABLE:
raise ValueError(
'response body of invalid type %s!' % response.payload.type)
if len(response.payload.body) != sizes[index]:
raise ValueError(
'response body of invalid size %d!' % len(response.payload.body))
def _cancel_after_begin(stub):
with stub:
sizes = (27182, 8, 1828, 45904)
payloads = [messages_pb2.Payload(body=b'\x00' * size) for size in sizes]
requests = [messages_pb2.StreamingInputCallRequest(payload=payload)
for payload in payloads]
responses = stub.StreamingInputCall.future(requests, _TIMEOUT)
responses.cancel()
if not responses.cancelled():
raise ValueError('expected call to be cancelled')
class _Pipe(object):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._open = True
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while not self._values and self._open:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
def add(self, value):
with self._condition:
self._values.append(value)
self._condition.notify()
def close(self):
with self._condition:
self._open = False
self._condition.notify()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _ping_pong(stub):
request_response_sizes = (31415, 9, 2653, 58979)
request_payload_sizes = (27182, 8, 1828, 45904)
with stub, _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, _TIMEOUT)
print('Starting ping-pong with response iterator %s' % response_iterator)
for response_size, payload_size in zip(
request_response_sizes, request_payload_sizes):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(
size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
if response.payload.type != messages_pb2.COMPRESSABLE:
raise ValueError(
'response body of invalid type %s!' % response.payload.type)
if len(response.payload.body) != response_size:
raise ValueError(
'response body of invalid size %d!' % len(response.payload.body))
def _cancel_after_first_response(stub):
request_response_sizes = (31415, 9, 2653, 58979)
request_payload_sizes = (27182, 8, 1828, 45904)
with stub, _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, _TIMEOUT)
response_size = request_response_sizes[0]
payload_size = request_payload_sizes[0]
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(
size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
# We test the contents of `response` in the Ping Pong test - don't check
# them here.
response_iterator.cancel()
try:
next(response_iterator)
except Exception:
pass
else:
raise ValueError('expected call to be cancelled')
def _timeout_on_sleeping_server(stub):
request_payload_size = 27182
with stub, _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, 0.001)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
payload=messages_pb2.Payload(body=b'\x00' * request_payload_size))
pipe.add(request)
time.sleep(0.1)
try:
next(response_iterator)
except face.ExpirationError:
pass
else:
raise ValueError('expected call to exceed deadline')
def _empty_stream(stub):
with stub, _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, _TIMEOUT)
pipe.close()
try:
next(response_iterator)
raise ValueError('expected exactly 0 responses')
except StopIteration:
pass
def _compute_engine_creds(stub, args):
response = _large_unary_common_behavior(stub, True, True)
if args.default_service_account != response.username:
raise ValueError(
'expected username %s, got %s' % (args.default_service_account,
response.username))
def _oauth2_auth_token(stub, args):
json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
response = _large_unary_common_behavior(stub, True, True)
if wanted_email != response.username:
raise ValueError(
'expected username %s, got %s' % (wanted_email, response.username))
if args.oauth_scope.find(response.oauth_scope) == -1:
raise ValueError(
'expected to find oauth scope "%s" in received "%s"' %
(response.oauth_scope, args.oauth_scope))
@enum.unique
class TestCase(enum.Enum):
EMPTY_UNARY = 'empty_unary'
LARGE_UNARY = 'large_unary'
SERVER_STREAMING = 'server_streaming'
CLIENT_STREAMING = 'client_streaming'
PING_PONG = 'ping_pong'
CANCEL_AFTER_BEGIN = 'cancel_after_begin'
CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response'
EMPTY_STREAM = 'empty_stream'
COMPUTE_ENGINE_CREDS = 'compute_engine_creds'
OAUTH2_AUTH_TOKEN = 'oauth2_auth_token'
TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server'
def test_interoperability(self, stub, args):
if self is TestCase.EMPTY_UNARY:
_empty_unary(stub)
elif self is TestCase.LARGE_UNARY:
_large_unary(stub)
elif self is TestCase.SERVER_STREAMING:
_server_streaming(stub)
elif self is TestCase.CLIENT_STREAMING:
_client_streaming(stub)
elif self is TestCase.PING_PONG:
_ping_pong(stub)
elif self is TestCase.CANCEL_AFTER_BEGIN:
_cancel_after_begin(stub)
elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE:
_cancel_after_first_response(stub)
elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER:
_timeout_on_sleeping_server(stub)
elif self is TestCase.EMPTY_STREAM:
_empty_stream(stub)
elif self is TestCase.COMPUTE_ENGINE_CREDS:
_compute_engine_creds(stub, args)
elif self is TestCase.OAUTH2_AUTH_TOKEN:
_oauth2_auth_token(stub, args)
else:
raise NotImplementedError('Test case "%s" not implemented!' % self.name)
|
|
import timeit
import numpy as np
from ..image_util import gaussian_blur_gray_image_nz
from .prob_abs_r import ProbAbsoluteReflectance
from .prob_abs_s import ProbAbsoluteShading
class IntrinsicEnergy(object):
def __init__(self, input, params):
self.input = input
self.params = params
# self.prob_abs_r = ProbAbsoluteReflectance(params)
self.prob_abs_s = ProbAbsoluteShading(params)
def compute_unary_costs(self, decomposition, prev_decomposition):
""" Returns unary costs: nnz x nlabels matrix """
if self.params.logging:
t0 = timeit.default_timer()
print("compute_unary_costs...")
intensities = decomposition.intensities
chromaticities = decomposition.chromaticities
nlabels = intensities.shape[0]
unary_costs = np.zeros(
(self.input.mask_nnz, nlabels),
dtype=np.float32)
sigma_spatial = (
self.params.shading_blur_sigma *
self.input.diag / (
1.0 + decomposition.iter_num **
self.params.shading_blur_iteration_pow
)
)
if self.params.logging:
print('blur sigma: %s pixels (image diagonal: %s pixels)' %
(sigma_spatial, self.input.diag))
# obtain previous shading layer, or use a method to create a proxy
if prev_decomposition:
prev_r_nz, prev_s_nz = prev_decomposition.get_r_s_nz()
elif self.params.shading_blur_init_method == "constant":
prev_s_nz = 0.5 * np.ones_like(self.input.image_gray_nz)
elif self.params.shading_blur_init_method == "image":
prev_s_nz = self.input.image_gray_nz
elif self.params.shading_blur_init_method == "none":
prev_s_nz = None
else:
raise ValueError("Unknown shading_blur_init_method: %s" %
self.params.shading_blur_init_method)
if prev_s_nz is not None:
if self.params.shading_blur_log:
# blur in log space
blur_input = np.log(prev_s_nz)
else:
# blur in linear space, then convert to log
blur_input = prev_s_nz
blur_output = gaussian_blur_gray_image_nz(
image_nz=blur_input,
image_shape=self.input.shape,
mask_nz=self.input.mask_nz,
sigma=sigma_spatial,
)
if self.params.shading_blur_log:
log_s_target_nz = blur_output
else:
log_s_target_nz = np.log(blur_output)
else:
log_s_target_nz = None
# (used below)
if self.params.shading_target_chromaticity:
labels_rgb = np.clip(
decomposition.get_reflectances_rgb(), 1e-5, np.inf)
# shading and chromaticity terms
for i in range(nlabels):
s_nz = self.input.image_gray_nz / intensities[i]
r_nz = (self.input.image_rgb_nz /
np.clip(s_nz, 1e-4, 1e5)[:, np.newaxis])
# absolute reflectance and shading
unary_costs[:, i] += (
self.prob_abs_s.cost(s_nz)
# self.prob_abs_r.cost(r_nz)
)
# print('abs ref term: ',self.prob_abs_r.cost(r_nz))
# chromaticity: encourage reflectance intensities to be assigned to
# pixels that share the same chromaticity as the original kmeans
# cluster from which the reflectance intensity was obtained.
if self.params.chromaticity_weight:
if self.params.chromaticity_norm == "L1":
f = np.abs
elif self.params.chromaticity_norm == "L2":
f = np.square
else:
raise ValueError(
"Invalid value of chromaticity_norm: %s" %
self.params.chromaticity_norm)
unary_costs[:, i] += self.params.chromaticity_weight * (
np.sum(
f(self.input.image_irg_nz[:, 1:3] -
chromaticities[i, :]),
axis=1
)
)
# shading smoothness: discourage shading discontinuities
if self.params.shading_target_weight and log_s_target_nz is not None:
if self.params.shading_target_norm == "L2":
f = np.square
elif self.params.shading_target_norm == "L1":
f = np.abs
else:
raise ValueError("Invalid value of shading_target_norm: %s" %
self.params.shading_target_norm)
if self.params.shading_target_chromaticity:
# interpret labels as RGB (intensity with chromaticity),
# thereby penalizing deviations from grayscale in the
# shading channel (though the final answer is always
# grayscale anyway)
label_rgb = labels_rgb[i, :]
s_rgb_nz = self.input.image_rgb_nz / label_rgb[np.newaxis, :]
log_s_rgb_nz = np.log(np.clip(s_rgb_nz, 1e-5, np.inf))
unary_costs[:, i] += (
self.params.shading_target_weight *
np.sum(f(log_s_rgb_nz - log_s_target_nz[:, np.newaxis]), axis=-1)
)
else:
# interpret labels as intensities
log_s_nz = np.log(s_nz)
unary_costs[:, i] += (
self.params.shading_target_weight *
f(log_s_nz - log_s_target_nz)
)
if self.params.logging:
t1 = timeit.default_timer()
print("compute_unary_costs: done (%s s)" % (t1 - t0))
return unary_costs
def compute_pairwise_costs(self, decomposition):
""" Returns the pairwise cost matrix: nlabels x nlabels matrix.
Entry ij is ``abs(intensity[i] - intensity[j])`` """
if self.params.pairwise_intensity_chromaticity:
# interpret labels as RGB (intensity with chromaticity)
nlabels = decomposition.intensities.shape[0]
R = decomposition.get_reflectances_rgb()
if self.params.pairwise_intensity_log:
R = np.log(np.clip(R, 1e-5, np.inf))
binary_costs = np.zeros((nlabels, nlabels), dtype=np.float32)
for i in range(nlabels):
for j in range(i):
cost = np.sum(np.abs(R[i, :] - R[j, :]))
binary_costs[i, j] = cost
binary_costs[j, i] = cost
else:
# interpret labels as intensities
R = decomposition.intensities
if self.params.pairwise_intensity_log:
R = np.log(np.clip(R, 1e-5, np.inf))
binary_costs = np.abs(R[:, np.newaxis] - R[np.newaxis, :])
return binary_costs
def get_features(self):
""" Return an nnz x nfeatures matrix containing the features """
if not hasattr(self, '_features'):
mask_nz = self.input.mask_nz
mask_nnz = self.input.mask_nnz
features = np.zeros((mask_nnz, 5), dtype=np.float32)
# image intensity
features[:, 0] = (
self.input.image_irg[mask_nz[0], mask_nz[1], 0] /
self.params.theta_l)
# image chromaticity
features[:, 1] = (
self.input.image_irg[mask_nz[0], mask_nz[1], 1] /
self.params.theta_c)
features[:, 2] = (
self.input.image_irg[mask_nz[0], mask_nz[1], 2] /
self.params.theta_c)
# pixel location
features[:, 3] = (
mask_nz[0] / (self.params.theta_p * self.input.diag))
features[:, 4] = (
mask_nz[1] / (self.params.theta_p * self.input.diag))
self._features = features
self._features.setflags(write=False)
return self._features
|
|
"""Classes to help gather user submissions."""
import logging
from typing import Dict, Any, Callable, Hashable, List, Optional # noqa pylint: disable=unused-import
import uuid
import voluptuous as vol
from .core import callback, HomeAssistant
from .exceptions import HomeAssistantError
_LOGGER = logging.getLogger(__name__)
RESULT_TYPE_FORM = 'form'
RESULT_TYPE_CREATE_ENTRY = 'create_entry'
RESULT_TYPE_ABORT = 'abort'
RESULT_TYPE_EXTERNAL_STEP = 'external'
RESULT_TYPE_EXTERNAL_STEP_DONE = 'external_done'
# Event that is fired when a flow is progressed via external source.
EVENT_DATA_ENTRY_FLOW_PROGRESSED = 'data_entry_flow_progressed'
class FlowError(HomeAssistantError):
"""Error while configuring an account."""
class UnknownHandler(FlowError):
"""Unknown handler specified."""
class UnknownFlow(FlowError):
"""Uknown flow specified."""
class UnknownStep(FlowError):
"""Unknown step specified."""
class FlowManager:
"""Manage all the flows that are in progress."""
def __init__(self, hass: HomeAssistant, async_create_flow: Callable,
async_finish_flow: Callable) -> None:
"""Initialize the flow manager."""
self.hass = hass
self._progress = {} # type: Dict[str, Any]
self._async_create_flow = async_create_flow
self._async_finish_flow = async_finish_flow
@callback
def async_progress(self) -> List[Dict]:
"""Return the flows in progress."""
return [{
'flow_id': flow.flow_id,
'handler': flow.handler,
'context': flow.context,
} for flow in self._progress.values()]
async def async_init(self, handler: Hashable, *,
context: Optional[Dict] = None,
data: Any = None) -> Any:
"""Start a configuration flow."""
if context is None:
context = {}
flow = await self._async_create_flow(
handler, context=context, data=data)
flow.hass = self.hass
flow.handler = handler
flow.flow_id = uuid.uuid4().hex
flow.context = context
self._progress[flow.flow_id] = flow
return await self._async_handle_step(flow, flow.init_step, data)
async def async_configure(
self, flow_id: str, user_input: Optional[Dict] = None) -> Any:
"""Continue a configuration flow."""
flow = self._progress.get(flow_id)
if flow is None:
raise UnknownFlow
cur_step = flow.cur_step
if cur_step.get('data_schema') is not None and user_input is not None:
user_input = cur_step['data_schema'](user_input)
result = await self._async_handle_step(
flow, cur_step['step_id'], user_input)
if cur_step['type'] == RESULT_TYPE_EXTERNAL_STEP:
if result['type'] not in (RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_EXTERNAL_STEP_DONE):
raise ValueError("External step can only transition to "
"external step or external step done.")
# If the result has changed from last result, fire event to update
# the frontend.
if cur_step['step_id'] != result.get('step_id'):
# Tell frontend to reload the flow state.
self.hass.bus.async_fire(EVENT_DATA_ENTRY_FLOW_PROGRESSED, {
'handler': flow.handler,
'flow_id': flow_id,
'refresh': True
})
return result
@callback
def async_abort(self, flow_id: str) -> None:
"""Abort a flow."""
if self._progress.pop(flow_id, None) is None:
raise UnknownFlow
async def _async_handle_step(self, flow: Any, step_id: str,
user_input: Optional[Dict]) -> Dict:
"""Handle a step of a flow."""
method = "async_step_{}".format(step_id)
if not hasattr(flow, method):
self._progress.pop(flow.flow_id)
raise UnknownStep("Handler {} doesn't support step {}".format(
flow.__class__.__name__, step_id))
result = await getattr(flow, method)(user_input) # type: Dict
if result['type'] not in (RESULT_TYPE_FORM, RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_ABORT,
RESULT_TYPE_EXTERNAL_STEP_DONE):
raise ValueError(
'Handler returned incorrect type: {}'.format(result['type']))
if result['type'] in (RESULT_TYPE_FORM, RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_EXTERNAL_STEP_DONE):
flow.cur_step = result
return result
# We pass a copy of the result because we're mutating our version
result = await self._async_finish_flow(flow, dict(result))
# _async_finish_flow may change result type, check it again
if result['type'] == RESULT_TYPE_FORM:
flow.cur_step = result
return result
# Abort and Success results both finish the flow
self._progress.pop(flow.flow_id)
return result
class FlowHandler:
"""Handle the configuration flow of a component."""
# Set by flow manager
flow_id = None
hass = None
handler = None
cur_step = None
context = None # type: Optional[Dict]
# Set by _async_create_flow callback
init_step = 'init'
# Set by developer
VERSION = 1
@callback
def async_show_form(self, *, step_id: str, data_schema: vol.Schema = None,
errors: Optional[Dict] = None,
description_placeholders: Optional[Dict] = None) \
-> Dict:
"""Return the definition of a form to gather user input."""
return {
'type': RESULT_TYPE_FORM,
'flow_id': self.flow_id,
'handler': self.handler,
'step_id': step_id,
'data_schema': data_schema,
'errors': errors,
'description_placeholders': description_placeholders,
}
@callback
def async_create_entry(self, *, title: str, data: Dict,
description: Optional[str] = None,
description_placeholders: Optional[Dict] = None) \
-> Dict:
"""Finish config flow and create a config entry."""
return {
'version': self.VERSION,
'type': RESULT_TYPE_CREATE_ENTRY,
'flow_id': self.flow_id,
'handler': self.handler,
'title': title,
'data': data,
'description': description,
'description_placeholders': description_placeholders,
}
@callback
def async_abort(self, *, reason: str,
description_placeholders: Optional[Dict] = None) -> Dict:
"""Abort the config flow."""
return {
'type': RESULT_TYPE_ABORT,
'flow_id': self.flow_id,
'handler': self.handler,
'reason': reason,
'description_placeholders': description_placeholders,
}
@callback
def async_external_step(self, *, step_id: str, url: str,
description_placeholders: Optional[Dict] = None) \
-> Dict:
"""Return the definition of an external step for the user to take."""
return {
'type': RESULT_TYPE_EXTERNAL_STEP,
'flow_id': self.flow_id,
'handler': self.handler,
'step_id': step_id,
'url': url,
'description_placeholders': description_placeholders,
}
@callback
def async_external_step_done(self, *, next_step_id: str) -> Dict:
"""Return the definition of an external step for the user to take."""
return {
'type': RESULT_TYPE_EXTERNAL_STEP_DONE,
'flow_id': self.flow_id,
'handler': self.handler,
'step_id': next_step_id,
}
|
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import threading
import time
import traceback
import jsonschema
import six
from rally.common.i18n import _
from rally.common import logging
from rally.common import objects
from rally import consts
from rally import exceptions
from rally import osclients
from rally.plugins.openstack.context.keystone import existing_users
from rally.plugins.openstack.context.keystone import users as users_ctx
from rally.task import context
from rally.task import runner
from rally.task import scenario
from rally.task import sla
LOG = logging.getLogger(__name__)
class ResultConsumer(object):
"""ResultConsumer class stores results from ScenarioRunner, checks SLA."""
def __init__(self, key, task, runner, abort_on_sla_failure):
"""ResultConsumer constructor.
:param key: Scenario identifier
:param task: Instance of Task, task to run
:param runner: ScenarioRunner instance that produces results to be
consumed
:param abort_on_sla_failure: True if the execution should be stopped
when some SLA check fails
"""
self.key = key
self.task = task
self.runner = runner
self.sla_checker = sla.SLAChecker(key["kw"])
self.abort_on_sla_failure = abort_on_sla_failure
self.is_done = threading.Event()
self.unexpected_failure = {}
self.results = []
self.thread = threading.Thread(
target=self._consume_results
)
self.aborting_checker = threading.Thread(target=self.wait_and_abort)
def __enter__(self):
self.thread.start()
self.aborting_checker.start()
self.start = time.time()
return self
def _consume_results(self):
while True:
if self.runner.result_queue:
results = self.runner.result_queue.popleft()
self.results.extend(results)
for r in results:
success = self.sla_checker.add_iteration(r)
if self.abort_on_sla_failure and not success:
self.sla_checker.set_aborted_on_sla()
self.runner.abort()
elif self.is_done.isSet():
break
else:
time.sleep(0.1)
def __exit__(self, exc_type, exc_value, exc_traceback):
self.finish = time.time()
self.is_done.set()
self.aborting_checker.join()
self.thread.join()
if exc_type:
self.sla_checker.set_unexpected_failure(exc_value)
if objects.Task.get_status(
self.task["uuid"]) == consts.TaskStatus.ABORTED:
self.sla_checker.set_aborted_manually()
# NOTE(boris-42): Sort in order of starting instead of order of ending
self.results.sort(key=lambda x: x["timestamp"])
self.task.append_results(self.key, {
"raw": self.results,
"load_duration": self.runner.run_duration,
"full_duration": self.finish - self.start,
"sla": self.sla_checker.results()})
@staticmethod
def is_task_in_aborting_status(task_uuid, check_soft=True):
"""Checks task is in abort stages
:param task_uuid: UUID of task to check status
:type task_uuid: str
:param check_soft: check or not SOFT_ABORTING status
:type check_soft: bool
"""
stages = [consts.TaskStatus.ABORTING, consts.TaskStatus.ABORTED]
if check_soft:
stages.append(consts.TaskStatus.SOFT_ABORTING)
return objects.Task.get_status(task_uuid) in stages
def wait_and_abort(self):
"""Waits until abort signal is received and aborts runner in this case.
Has to be run from different thread simultaneously with the
runner.run method.
"""
while not self.is_done.isSet():
if self.is_task_in_aborting_status(self.task["uuid"],
check_soft=False):
self.runner.abort()
self.task.update_status(consts.TaskStatus.ABORTED)
break
time.sleep(2.0)
class TaskEngine(object):
"""The Task engine class is used to execute benchmark scenarios.
An instance of this class is initialized by the API with the task
configuration and then is used to validate and execute all specified
in config subtasks.
.. note::
Typical usage:
...
admin = .... # contains dict representations of objects.Credential
# with OpenStack admin credentials
users = .... # contains a list of dicts of representations of
# objects.Credential with OpenStack users credentials
engine = TaskEngine(config, task, admin=admin, users=users)
engine.validate() # to test config
engine.run() # to run config
"""
def __init__(self, config, task, admin=None, users=None,
abort_on_sla_failure=False):
"""TaskEngine constructor.
:param config: Dict with configuration of specified benchmark scenarios
:param task: Instance of Task,
the current task which is being performed
:param admin: Dict with admin credentials
:param users: List of dicts with user credentials
:param abort_on_sla_failure: True if the execution should be stopped
when some SLA check fails
"""
try:
self.config = TaskConfig(config)
except Exception as e:
log = [str(type(e)), str(e), json.dumps(traceback.format_exc())]
task.set_failed(log=log)
raise exceptions.InvalidTaskException(str(e))
self.task = task
self.admin = admin and objects.Credential(**admin) or None
self.existing_users = users or []
self.abort_on_sla_failure = abort_on_sla_failure
@logging.log_task_wrapper(LOG.info, _("Task validation check cloud."))
def _check_cloud(self):
clients = osclients.Clients(self.admin)
clients.verified_keystone()
@logging.log_task_wrapper(LOG.info,
_("Task validation of scenarios names."))
def _validate_config_scenarios_name(self, config):
available = set(s.get_name() for s in scenario.Scenario.get_all())
specified = set()
for subtask in config.subtasks:
for s in subtask.workloads:
specified.add(s.name)
if not specified.issubset(available):
names = ", ".join(specified - available)
raise exceptions.NotFoundScenarios(names=names)
@logging.log_task_wrapper(LOG.info, _("Task validation of syntax."))
def _validate_config_syntax(self, config):
for subtask in config.subtasks:
for pos, workload in enumerate(subtask.workloads):
try:
runner.ScenarioRunner.validate(workload.runner)
context.ContextManager.validate(
workload.context, non_hidden=True)
sla.SLA.validate(workload.sla)
except (exceptions.RallyException,
jsonschema.ValidationError) as e:
kw = workload.make_exception_args(
pos, six.text_type(e))
raise exceptions.InvalidTaskConfig(**kw)
def _validate_config_semantic_helper(self, admin, user, workload, pos,
deployment):
try:
scenario.Scenario.validate(
workload.name, workload.to_dict(),
admin=admin, users=[user], deployment=deployment)
except exceptions.InvalidScenarioArgument as e:
kw = workload.make_exception_args(pos, six.text_type(e))
raise exceptions.InvalidTaskConfig(**kw)
def _get_user_ctx_for_validation(self, ctx):
if self.existing_users:
ctx["config"] = {"existing_users": self.existing_users}
user_context = existing_users.ExistingUsers(ctx)
else:
user_context = users_ctx.UserGenerator(ctx)
return user_context
@logging.log_task_wrapper(LOG.info, _("Task validation of semantic."))
def _validate_config_semantic(self, config):
self._check_cloud()
ctx_conf = {"task": self.task, "admin": {"credential": self.admin}}
deployment = objects.Deployment.get(self.task["deployment_uuid"])
# TODO(boris-42): It's quite hard at the moment to validate case
# when both user context and existing_users are
# specified. So after switching to plugin base
# and refactoring validation mechanism this place
# will be replaced
with self._get_user_ctx_for_validation(ctx_conf) as ctx:
ctx.setup()
admin = osclients.Clients(self.admin)
user = osclients.Clients(ctx_conf["users"][0]["credential"])
for u in ctx_conf["users"]:
user = osclients.Clients(u["credential"])
for subtask in config.subtasks:
for pos, workload in enumerate(subtask.workloads):
self._validate_config_semantic_helper(
admin, user, workload,
pos, deployment)
@logging.log_task_wrapper(LOG.info, _("Task validation."))
def validate(self):
"""Perform full task configuration validation."""
self.task.update_status(consts.TaskStatus.VERIFYING)
try:
self._validate_config_scenarios_name(self.config)
self._validate_config_syntax(self.config)
self._validate_config_semantic(self.config)
except Exception as e:
log = [str(type(e)), str(e), json.dumps(traceback.format_exc())]
self.task.set_failed(log=log)
raise exceptions.InvalidTaskException(str(e))
def _get_runner(self, config):
config = config or {"type": "serial"}
return runner.ScenarioRunner.get(config["type"])(self.task, config)
def _prepare_context(self, ctx, name, credential):
scenario_context = copy.deepcopy(
scenario.Scenario.get(name)._meta_get("default_context"))
if self.existing_users and "users" not in ctx:
scenario_context.setdefault("existing_users", self.existing_users)
elif "users" not in ctx:
scenario_context.setdefault("users", {})
scenario_context.update(ctx)
context_obj = {
"task": self.task,
"admin": {"credential": credential},
"scenario_name": name,
"config": scenario_context
}
return context_obj
@logging.log_task_wrapper(LOG.info, _("Benchmarking."))
def run(self):
"""Run the benchmark according to the test configuration.
Test configuration is specified on engine initialization.
:returns: List of dicts, each dict containing the results of all the
corresponding benchmark test launches
"""
self.task.update_status(consts.TaskStatus.RUNNING)
for subtask in self.config.subtasks:
for pos, workload in enumerate(subtask.workloads):
if ResultConsumer.is_task_in_aborting_status(
self.task["uuid"]):
LOG.info("Received aborting signal.")
self.task.update_status(consts.TaskStatus.ABORTED)
return
key = workload.make_key(pos)
LOG.info("Running benchmark with key: \n%s"
% json.dumps(key, indent=2))
runner_obj = self._get_runner(workload.runner)
context_obj = self._prepare_context(
workload.context, workload.name, self.admin)
try:
with ResultConsumer(key, self.task, runner_obj,
self.abort_on_sla_failure):
with context.ContextManager(context_obj):
runner_obj.run(workload.name, context_obj,
workload.args)
except Exception as e:
LOG.exception(e)
if objects.Task.get_status(
self.task["uuid"]) != consts.TaskStatus.ABORTED:
self.task.update_status(consts.TaskStatus.FINISHED)
class TaskConfig(object):
"""Version-aware wrapper around task.
"""
CONFIG_SCHEMA_V1 = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"patternProperties": {
".*": {
"type": "array",
"items": {
"type": "object",
"properties": {
"args": {"type": "object"},
"runner": {
"type": "object",
"properties": {"type": {"type": "string"}},
"required": ["type"]
},
"context": {"type": "object"},
"sla": {"type": "object"},
},
"additionalProperties": False
}
}
}
}
CONFIG_SCHEMA_V2 = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"version": {"type": "number"},
"title": {"type": "string"},
"description": {"type": "string"},
"tags": {
"type": "array",
"items": {"type": "string"}
},
"subtasks": {
"type": "array",
"minItems": 1,
"items": {
"type": "object",
"properties": {
"title": {"type": "string"},
"group": {"type": "string"},
"description": {"type": "string"},
"tags": {
"type": "array",
"items": {"type": "string"}
},
"run_in_parallel": {"type": "boolean"},
"workloads": {
"type": "array",
"minItems": 1,
"maxItems": 1,
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"args": {"type": "object"},
"runner": {
"type": "object",
"properties": {
"type": {"type": "string"}
},
"required": ["type"]
},
"sla": {"type": "object"},
"context": {"type": "object"}
},
"additionalProperties": False,
"required": ["name", "runner"]
}
}
},
"additionalProperties": False,
"required": ["title", "workloads"]
}
}
},
"additionalProperties": False,
"required": ["title", "subtasks"]
}
CONFIG_SCHEMAS = {1: CONFIG_SCHEMA_V1, 2: CONFIG_SCHEMA_V2}
def __init__(self, config):
"""TaskConfig constructor.
:param config: Dict with configuration of specified task
"""
self.version = self._get_version(config)
self._validate_version()
self._validate_json(config)
self.title = config.get("title", "Task")
self.tags = config.get("tags", [])
self.description = config.get("description")
self.subtasks = self._make_subtasks(config)
# if self.version == 1:
# TODO(ikhudoshyn): Warn user about deprecated format
@staticmethod
def _get_version(config):
return config.get("version", 1)
def _validate_version(self):
if self.version not in self.CONFIG_SCHEMAS:
allowed = ", ".join([str(k) for k in self.CONFIG_SCHEMAS])
msg = (_("Task configuration version {0} is not supported. "
"Supported versions: {1}")).format(self.version, allowed)
raise exceptions.InvalidTaskException(msg)
def _validate_json(self, config):
try:
jsonschema.validate(config, self.CONFIG_SCHEMAS[self.version])
except Exception as e:
raise exceptions.InvalidTaskException(str(e))
def _make_subtasks(self, config):
if self.version == 2:
return [SubTask(s) for s in config["subtasks"]]
elif self.version == 1:
subtasks = []
for name, v1_workloads in six.iteritems(config):
for v1_workload in v1_workloads:
v2_workload = copy.deepcopy(v1_workload)
v2_workload["name"] = name
subtasks.append(
SubTask({"title": name, "workloads": [v2_workload]}))
return subtasks
class SubTask(object):
"""Subtask -- unit of execution in Task
"""
def __init__(self, config):
"""Subtask constructor.
:param config: Dict with configuration of specified subtask
"""
self.title = config["title"]
self.tags = config.get("tags", [])
self.group = config.get("group")
self.description = config.get("description")
self.workloads = [Workload(wconf)
for wconf
in config["workloads"]]
self.context = config.get("context", {})
class Workload(object):
"""Workload -- workload configuration in SubTask.
"""
def __init__(self, config):
self.name = config["name"]
self.runner = config.get("runner", {})
self.sla = config.get("sla", {})
self.context = config.get("context", {})
self.args = config.get("args", {})
def to_dict(self):
workload = {"runner": self.runner}
for prop in "sla", "args", "context":
value = getattr(self, prop)
if value:
workload[prop] = value
return workload
def to_task(self):
"""Make task configuration for the workload.
This method returns a dict representing full configuration
of the task containing a single subtask with this single
workload.
:return: dict containing full task configuration
"""
# NOTE(ikhudoshyn): Result of this method will be used
# to store full task configuration in DB so that
# subtask configuration in reports would be given
# in the same format as it was provided by user.
# Temporarily it returns to_dict() in order not
# to break existing reports. It should be
# properly implemented in a patch that will update reports.
# return {self.name: [self.to_dict()]}
return self.to_dict()
def make_key(self, pos):
return {"name": self.name,
"pos": pos,
"kw": self.to_task()}
def make_exception_args(self, pos, reason):
return {"name": self.name,
"pos": pos,
"config": self.to_dict(),
"reason": reason}
|
|
from __future__ import division
from pygame.rect import Rect
from thorpy.elements._sliderutils._dragger import DraggerX, DraggerY
from thorpy.elements.ghost import Ghost
from thorpy.elements.element import Element
from thorpy.elements._sliderutils._shifters import Plus, Minus
from thorpy.miscgui.reaction import Reaction, ConstantReaction
from thorpy.miscgui import constants, functions, parameters, style, painterstyle
class _Slider(object):
"""Not to be instanciated, not an element."""
def __init__(self, length, limvals=None):
limvals = parameters.LIMVALS if limvals is None else limvals
self._length = length
self._limvals = limvals
self._shift = self.val_to_pix(1, 0)
def get_width_val(self):
return self._limvals[1] - self._limvals[0]
def pix_to_val(self, pix, x0):
fraction = float((pix - x0)) / self._length
return self._limvals[0] + fraction * self.get_width_val()
def val_to_pix(self, val, x0):
fraction = float(val - self._limvals[0]) / self.get_width_val()
return int(round(fraction * self._length + x0))
def _refresh_shift(self):
self._shift = self.val_to_pix(1, 0)
class _GraphicalSlider(_Slider, Element):
def __init__(self, length, limvals=None, text="", elements=None,
normal_params=None):
limvals = parameters.LIMVALS if limvals is None else limvals
self._plus = None
self._minus = None
Element.__init__(self, text, elements, normal_params)
_Slider.__init__(self, length, limvals)
self.current_state.autoghost = False
## self._set_wheel_reaction(parameters.BUTTON_UNPRESS_EVENT,
## {"button": parameters.WHEELUP_BUTTON})
## self._set_unwheel_reaction(parameters.BUTTON_PRESS_EVENT,
## {"button": parameters.WHEELDOWN_BUTTON})
self._setup()
self.active_wheel = False
def get_storer_rect(self):
return self.get_family_rect(constants.STATE_NORMAL)
def move(self, shift):
value_before = self.get_value()
Element.move(self, shift)
if self.get_value() != value_before:
self._drag_element.place_at(value_before)
## def _set_wheel_reaction(self, type_, args=None):
## if not args:
## args = {}
## reac_wheelup = Reaction(type_, self._reaction_wheel, args,
## reac_name=constants.REAC_WHEELUP)
##
## def _set_unwheel_reaction(self, type_, args=None):
## if not args:
## args = {}
## reac_wheeldown = Reaction(type_, self._reaction_unwheel, args,
## reac_name=constants.REAC_WHEELDOWN)
##
## def _reaction_wheel(self, event):
## if self.active_wheel:
## if self.collide(event.pos, self.current_state_key):
## self._drag_element.shift(parameters.WHEEL_SLIDER_SHIFT)
##
## def _reaction_unwheel(self, event):
## if self.active_wheel:
## if self.collide(event.pos, self.current_state_key):
## self._drag_element.shift(-parameters.WHEEL_SLIDER_SHIFT)
def _setup(self):
pass
def _press_plus(self):
# change state, blit and update
functions.keypress(self._plus, constants.STATE_PRESSED)
self._drag_element.shift()
def _press_minus(self):
# change state, blit and update
functions.keypress(self._minus, constants.STATE_PRESSED)
self._drag_element.shift(-1)
def _add_buttons(self, size=None):
size = style.SMALL_SIZE if size is None else size
# _plus
self._plus = Plus(text="+")
self._plus.set_painter(painterstyle.DEF_PAINTER(size=size))
self._plus.finish()
self._plus.drag = self._drag_element
reac_plus = ConstantReaction(constants.THORPY_EVENT,
self._plus._reaction_time,
{"id":constants.EVENT_TIME},
reac_name=constants.REAC_MOUSE_REPEAT)
self.add_reaction(reac_plus)
# _minus
self._minus = Minus(text="-")
self._minus.set_painter(painterstyle.DEF_PAINTER(size=size))
self._minus.finish()
self._minus.drag = self._drag_element
reac_minus = ConstantReaction(constants.THORPY_EVENT,
self._plus._reaction_time,
{"id":constants.EVENT_TIME},
reac_name=constants.REAC_MOUSE_REPEAT+0.1)
self.add_reaction(reac_minus)
self.add_elements([self._plus, self._minus])
# reactions to mouse _press (!= reactions to key _press):
self._plus.reactions[constants.REAC_PRESSED +0.1] = ConstantReaction(
constants.THORPY_EVENT,
self._drag_element.shift,
{"id":constants.EVENT_PRESS, "el": self.plus})
self._minus.reactions[constants.REAC_PRESSED +0.1] = ConstantReaction(
constants.THORPY_EVENT,
self._drag_element.shift,
{"id":constants.EVENT_PRESS, "el": self.minus},
{"sign":-1})
self._reactions[constants.REAC_PRESSED + 0.1] = ConstantReaction(
constants.THORPY_EVENT,
self._drag_element.shift,
{"id":constants.EVENT_PRESS, "el": self.plus})
self._reactions[constants.REAC_PRESSED + 0.1] = ConstantReaction(
constants.THORPY_EVENT,
self._drag_element.shift,
{"id":constants.EVENT_PRESS, "el": self.minus},
{"sign":-1})
## def _get_no_slide_rect(self):
## """Returns size without slide_rect"""
## wh_slide = self._get_slide_rect().size
## wh = self.get_fus_rect().size
## return (wh[0] - wh_slide[0], wh[1] - wh_slide[1])
def get_dragger(self):
return self._drag_element
def set_value(self, value):
self.get_dragger().place_at(value)
class SliderX(_GraphicalSlider):
def __init__(self,
length,
limvals=None,
text="",
elements=None,
normal_params=None,
initial_value=None):
if initial_value is None:
initial_value = 0
if limvals[0] <= initial_value <= limvals[1]: #will be False if initial_value is None
self.initial_value = initial_value
else:
if initial_value is not None:
functions.debug_msg("Initial value for slider was not in the\
limvals range. Auto set to limvals[0].")
self.initial_value = limvals[0]
self._drag_element = DraggerX(self)
super(SliderX, self).__init__(length, limvals, text, elements)
self._drag_element.finish()
self.add_elements(list([self._drag_element]))
def finish(self):
Element.finish(self)
self._drag_element.set_center((None, self.get_fus_center()[1]))
for state in self._states:
self._states[state].refresh_ghost_rect()
self._setup()
self._drag_element.place_at(self.initial_value)
def _setup(self, height=None, dragsize=None):
height = style.SLIDER_THICK if height is None else height
dragsize = style.SLIDERX_DRAG_SIZE if dragsize is None else dragsize
self._height = height
size = (self._length + dragsize[0] + style.SLIDER_MARGINS[0], height)
painter = functions.obtain_valid_painter(
painterstyle.DEF_PAINTER,
pressed=True,
color=style.DEF_COLOR2,
size=size)
self.set_painter(painter)
dp = functions.obtain_valid_painter(
painterstyle.DEF_PAINTER,
pressed=False,
size=dragsize)
try:
drag_x = self.val_to_pix(self.initial_value,
self.get_fus_topleft()[0]) + 1
self._drag_element.change_painter(dp, autopress=False)
self._drag_element.set_center((drag_x, self.get_fus_center()[1]))
except AttributeError:
drag_x = self.val_to_pix(self.initial_value,
self.get_ghost_topleft()[0]) + 1
self._drag_element.set_painter(dp, autopress=False)
self._drag_element.set_center((drag_x, self.get_ghost_center()[1]))
self._drag_element.set_free(y=False)
Ghost.fit_children(self)
def _get_slide_rect(self):
slide_rect = Rect((0, 0), (self._length, self._height))
try:
slide_rect.center = self.get_fus_center()
except AttributeError:
slide_rect.center = self.get_ghost_center()
return slide_rect
def get_value(self):
x0 = self._get_slide_rect().x
val = self.pix_to_val(self._drag_element.get_fus_center()[0], x0)
if val < self._limvals[0]:
return self._limvals[0]
elif val > self._limvals[1]:
return self._limvals[1]
else:
return val
def _add_buttons(self, size=None):
_GraphicalSlider._add_buttons(self, size)
rect = self.get_fus_rect()
self._minus.set_center((-2 + rect.left - size[0] // 2, rect.centery))
self._plus.set_center((2 + rect.right + size[0] // 2, rect.centery))
Ghost.fit_children(self)
self._add_buttons_reactions()
def _add_buttons_reactions(self):
"""Add reactions to keyboard _press and unpress"""
pass
def get_size(self, state=None):
"""Special get_size method for sliders.
Could be named get_family_size().
"""
return self.get_family_rect(state).size
class _SliderXSetter(SliderX): #donner ca au father, pas sliderx
def pix_to_val(self, pix, x0): #!!!!!
value = SliderX.pix_to_val(self, pix, x0)
if self.father._value_type is float:
return round(value, self.father._round_decimals)
elif self.father._value_type is int:
return int(round(value))
class SliderY(_GraphicalSlider):
def __init__(self,
length,
limvals=None,
text="",
elements=None,
normal_params=None):
self._height = None
self._drag_element = DraggerY(self)
super(SliderY, self).__init__(length, limvals, text, elements,
normal_params)
self._drag_element.finish()
self.add_elements(list([self._drag_element]))
Ghost.fit_children(self)
def finish(self):
Element.finish(self)
self._drag_element.set_center((self.get_fus_center()[0], None))
self.misc_refresh()
def misc_refresh(self):
self._refresh_shift()
def _get_slide_rect(self):
slide_rect = Rect((0, 0), (self._height, self._length))
slide_rect.center = self.get_fus_rect().center
return slide_rect
def get_value(self):
y0 = self._get_slide_rect().y
val = self.pix_to_val(self._drag_element.get_fus_center()[1], y0)
if val < self._limvals[0]:
return self._limvals[0]
elif val > self._limvals[1]:
return self._limvals[1]
else:
return val
def get_factor(self):
value = self.get_value()
return 1. - (self._limvals[1] - value) / self._limvals[1]
def _add_buttons(self, size=None):
_GraphicalSlider._add_buttons(self, size)
rect = self.get_fus_rect()
pos = (rect.centerx, rect.bottom + style.SLIDER_MARGINS[1] + size[1]/2)
self._minus.set_center(pos)
pos = (rect.centerx, rect.top - style.SLIDER_MARGINS[1] - size[1]/2)
self._plus.set_center(pos)
Ghost.fit_children(self)
self._add_buttons_reactions()
def _add_buttons_reactions(self):
pass
def _get_theo_size(self, buttonsize, dragsize, length, margins=None,
surplus=False):
"""Returns the theoretical future total size of self. The reason for
this method to exist is that it provides a way to guess the size before
the graphical parts are created by calling self.finish().
<surplus> : get only the size surplus due to buttons.
"""
margins = style.SLIDER_MARGINS[0] if margins is None else margins
w = max(buttonsize[0], dragsize[0])
actual_length = length + dragsize[1] + 2 * margins
if buttonsize[0] != 0: # (0, 0) button size means no buttons
buttons_growth = 2 * buttonsize[1] + 2 * margins
else:
buttons_growth = 0
h = actual_length + buttons_growth
if surplus:
h -= length
return (w, h)
|
|
# -*- test-case-name: sine.test -*-
from xshtoom.sdp import SDP
from xshtoom.rtp.protocol import RTPProtocol
from xshtoom.audio.converters import Codecker, PT_PCMU
from xshtoom.rtp.formats import PT_NTE
from xshtoom.audio.aufile import WavReader, GSMReader, WavWriter
from sine.sip import responseFromRequest, parseAddress, formatAddress
from sine.sip import Response, Request, URL, T1, T2, SIPError, Via, debug
from sine.sip import ClientTransaction, ServerTransaction, SIPLookupError
from sine.sip import ITransactionUser, SIPResolverMixin, ServerInviteTransaction
from sine.sip import ClientInviteTransaction, computeBranch
from twisted.internet import reactor, defer, task, stdio
from twisted.application.service import Service
from twisted.cred.error import UnauthorizedLogin
from twisted.python import log
from axiom import batch
from axiom.errors import NoSuchUser
import random, wave, hashlib
from zope.interface import Interface, implements
from epsilon import juice
class Hangup(Exception):
"""
Raise this in ITransactionUser.receivedAudio or .receivedDTMF to
end the call.
"""
class StartRecording(juice.Command):
commandName = 'Start-Recording'
arguments = [('cookie', juice.String()), ('filename', juice.String()), ("format", juice.String())]
class StopRecording(juice.Command):
commandName = "Stop-Recording"
arguments = [('cookie', juice.String())]
response = []
class PlayFile(juice.Command):
commandName = "Play-File"
arguments = [('cookie', juice.String()), ("filename", juice.String()), ("format", juice.String(optional=True))]
response = [("done", juice.Boolean())]
class StopPlaying(juice.Command):
commandName = "Stop-Playing"
arguments = [('cookie', juice.String())]
class CreateRTPSocket(juice.Command):
commandName= "Create-RTPSocket"
arguments = [("host", juice.String())]
response = [('cookie', juice.String())]
class GetSDP(juice.Command):
commandName = "Get-SDP"
arguments = [('cookie', juice.String()), ("othersdp", juice.String())]
response = [('sdp', juice.String())]
class RTPStop(juice.Command):
commandName = "Stop"
arguments = [('cookie', juice.String())]
class RTPStart(juice.Command):
commandName = "Start"
arguments = [('cookie', juice.String()), ("targethost", juice.String()), ("targetport", juice.Integer())]
class DTMF(juice.Command):
commandName = "DTMF"
arguments = [('cookie', juice.String()), ("key", juice.Integer())]
class LocalControlProtocol(juice.Juice):
#runs in main process
def __init__(self, *args, **kwargs):
juice.Juice.__init__(self, *args, **kwargs)
self.dialogs = {}
self.cookies = {}
def command_DTMF(self, cookie, key):
dialog = self.dialogs[cookie]
dialog.callController.receivedDTMF(dialog, key)
return {}
command_DTMF.command = DTMF
def createRTPSocket(self, dialog, host):
d = CreateRTPSocket(host=str(host)).do(self)
def remember(response):
cookie = response['cookie']
self.dialogs[cookie] = dialog
self.cookies[dialog] = cookie
return cookie
return d.addCallback(remember)
def getSDP(self, dialog, othersdp):
if othersdp:
othersdp = othersdp.show()
else:
othersdp = ""
return GetSDP(cookie=self.cookies[dialog], othersdp=othersdp).do(self).addCallback(lambda r: SDP(r["sdp"]))
class MediaServerControlProtocol(batch.JuiceChild):
#runs in RTP subprocess
file = None
def __init__(self, _):
juice.Juice.__init__(self, False)
self.codec = Codecker(PT_PCMU)
self.currentRecordings = {}
self.currentPlayouts = {}
self.cookies = 0
def dropCall(self, cookie):
pass
def incomingRTP(self, cookie, packet):
rtp, fObj = self.currentRecordings[cookie]
if packet.header.ct is PT_NTE:
data = packet.data
key = ord(data[0])
start = (ord(data[1]) & 128) and True or False
if start:
DTMF(cookie=cookie, key=key).do(self)
else:
#print "stop inbound dtmf", key
return
else:
if fObj is not None:
fObj.write(self.codec.decode(packet))
def command_START_RECORDING(self, cookie, filename, format="raw"):
rtp, currentFile = self.currentRecordings[cookie]
formats = {"wav": WavWriter,
"raw": (lambda f: f)}
if format not in formats:
raise ValueError("no support for writing format %r" % (format,))
if currentFile is not None:
log.msg("Uh oh we were already recording in %s" % (currentFile))
currentFile.close()
log.msg("FORMAT IS! %s" % (formats[format],))
self.currentRecordings[cookie] = (rtp, formats[format](open(filename, 'wb')))
return {}
command_START_RECORDING.command = StartRecording
def command_STOP_RECORDING(self, cookie):
#XXX what should this return
if cookie in self.currentRecordings:
rtp, f = self.currentRecordings[cookie]
self.currentRecordings[cookie] = (rtp, None)
if f:
f.close()
return {}
command_STOP_RECORDING.command = StopRecording
def command_PLAY_FILE(self, cookie, filename, format="raw"):
"""
Play a shtoom-format sound file. (Raw unsigned linear, 16 bit
8000Hz audio. sox options: -u -w -r 8000)
"""
rtp, _ = self.currentRecordings[cookie]
formats = {"wav": (WavReader, 160),
"raw": ((lambda f: f), 320),
"gsm": (GSMReader, 320)}
if format not in formats:
raise ValueError("no support for format %r" % (format,))
codec, samplesize = formats[format]
f = codec(open(filename))
d = defer.Deferred()
def playSample():
data = f.read(samplesize)
if data == '':
self.stopPlaying(cookie, True)
else:
sample = self.codec.handle_audio(data)
rtp.handle_media_sample(sample)
if cookie in self.currentPlayouts:
self.stopPlaying(cookie, False)
LC = task.LoopingCall(playSample)
LC.start(0.020)
self.currentPlayouts[cookie] = LC, d
return d
command_PLAY_FILE.command = PlayFile
def command_CREATE_RTPSOCKET(self, host):
c = self.makeCookie()
rtp = RTPProtocol(self, c)
self.currentRecordings[c] = (rtp, None)
rtp.createRTPSocket(host, False)
return {"cookie": c}
command_CREATE_RTPSOCKET.command = CreateRTPSocket
def command_GET_SDP(self, cookie, othersdp=None):
rtp, _ = self.currentRecordings[cookie]
if othersdp:
sdptxt = SDP(othersdp)
else:
sdptxt = None
return {"sdp": rtp.getSDP(sdptxt).show()}
command_GET_SDP.command = GetSDP
def command_STOP(self, cookie):
rtp, _ = self.currentRecordings[cookie]
self.stopPlaying(cookie, False)
self.command_STOP_RECORDING(cookie)
del self.currentRecordings[cookie]
rtp.stopSendingAndReceiving()
rtp.timeouterLoop.stop()
return {}
command_STOP.command = RTPStop
def command_START(self, cookie, targethost, targetport):
rtp, _ = self.currentRecordings[cookie]
rtp.start((targethost, targetport))
return {}
command_START.command = RTPStart
def command_STOP_PLAYING(self, cookie):
self.stopPlaying(cookie, False)
return {}
command_STOP_PLAYING.command = StopPlaying
def stopPlaying(self, cookie, finishedPlaying):
if cookie not in self.currentPlayouts:
return
LC, d = self.currentPlayouts[cookie]
if LC:
LC.stop()
LC = None
d.callback({"done": finishedPlaying})
del self.currentPlayouts[cookie]
def makeCookie(self):
self.cookies += 1
return "cookie%s" % (self.cookies,)
class MediaServer(Service):
def startService(self):
j = MediaServerControlProtocol(False)
stdio.StandardIO(j)
class Dialog:
"""
I represent the state of a SIP call, and am responsible for
providing appropriate information for generating requests or
responses in that call.
Note that RFC 3261 distinguishes between dialogs and sessions,
because under certain circumstances you can have multiple dialogs
in a single session, for instance if the request forks and
receives multiple 2xx responses. The right thing to do in that
situation isn't clear, so I don't deal with that specially.
"""
callController = None
def forServer(cls, tu, contactURI, msg):
"""
Create a dialog from a received INVITE.
tu: the transaction user handling this dialog.
contactURI: the contact for the other party.
msg: the initial INVITE that establishes this dialog.
"""
#RFC 3261 12.1.1
self = cls(tu, contactURI)
self.msg = msg
toAddress, fromAddress = self._finishInit()
self.localAddress = toAddress
self.remoteAddress = fromAddress
self.localAddress[2]['tag'] = self.genTag()
self.direction = "server"
self.routeSet = [parseAddress(route) for route in self.msg.headers.get('record-route', [])]
self.clientState = "confirmed"
def gotRTP(rtp):
self.rtp = rtp
return self.rtp.createRTPSocket(self, contactURI.host)
def gotCookie(c):
self.cookie = c
return self
return self.tu.mediaController.getProcess().addCallback(gotRTP).addCallback(gotCookie)
forServer = classmethod(forServer)
def forClient(cls, tu, contactURI, targetURI, controller, noSDP=False, fromName=""):
"""
Create a dialog with a remote party by sending an INVITE.
tu: the transaction user handling this dialog.
contactURI: the _local_ contact URI (port and host media will be received on)
targetURI: URI of the remote party.
controller: an ICallController instance that will handle media for this call.
"""
#XXX Need to distinguish between contact and "logical" address
#Contact usually includes the IP this element is actually listening on,
#rather than some address that may proxy to here
#this code assumes that they are identical
#and specifically, that you can RTP-listen on contactURI.host
# RFC 3261 12.1.2
self = cls(tu, contactURI)
self.callController = controller
self.direction = "client"
def startProcess(rtp):
self.rtp = rtp
return self.tu.mediaController.getProcess().addCallback(startProcess).addCallback(lambda _: self._generateInvite(contactURI, fromName, targetURI, noSDP))
forClient = classmethod(forClient)
routeSet = None
def __init__(self, tu, contactURI):
self.tu = tu
self.contactURI = contactURI
self.localCSeq = random.randint(1E4,1E5)
self.LC = None
#UAC bits
self.clientState = "early"
self.sessionDescription = None
self.ackTimer = [None, 0]
self.acked = False
self.ended = False
def _finishInit(self):
self.callID = self.msg.headers['call-id'][0]
toAddress = parseAddress(self.msg.headers['to'][0])
fromAddress = parseAddress(self.msg.headers['from'][0])
return toAddress, fromAddress
def ackTimerRetry(self, msg):
timer, tries = self.ackTimer
if tries > 10:
#more than 64**T1 seconds since we've heard from the other end
#so say bye and give up
self.sendBye()
return
if tries > 0:
self.tu.transport.sendResponse(msg)
self.ackTimer = (reactor.callLater(min((2**tries)*T1, T2),
self.ackTimerRetry, msg),
tries+1)
def getDialogID(self):
return (self.callID,
self.localAddress[2].get('tag',''),
self.remoteAddress[2].get('tag',''))
def genTag(self):
tag = ('%04x'%(random.randint(0, 2**10)))[:4]
tag += ('%04x'%(random.randint(0, 2**10)))[:4]
return tag
def responseFromRequest(self, code, msg, body, bodyType="application/sdp"):
response = Response(code)
for name in ("via", "call-id", "record-route", "cseq"):
response.headers[name] = msg.headers.get(name, [])[:]
if self.direction == 'server':
response.addHeader('to', formatAddress(self.localAddress))
response.addHeader('from', formatAddress(self.remoteAddress))
elif self.direction == 'client':
response.addHeader('from', formatAddress(self.localAddress))
response.addHeader('to', formatAddress(self.remoteAddress))
response.addHeader('user-agent', "Divmod Sine")
if msg.method == 'INVITE' and code == 200:
response.addHeader('contact', formatAddress(self.contactURI))
response.addHeader('content-length', len(body))
response.addHeader('content-type', bodyType)
response.bodyDataReceived(body)
else:
response.addHeader('content-length', 0)
response.creationFinished()
return response
def _generateInvite(self, contacturi, fromName, uri, noSDP):
#RFC 3261 8.1.1
#RFC 3261 13.2.1
invite = Request("INVITE", uri)
invite.addHeader("to", formatAddress(uri))
invite.addHeader("from", formatAddress((fromName, URL(contacturi.host, contacturi.username), {'tag': self.genTag()})))
invite.addHeader("call-id",
"%s@%s" % (hashlib.md5(str(random.random())).hexdigest(),
contacturi.host))
invite.addHeader("cseq", "%s INVITE" % self.localCSeq)
invite.addHeader("user-agent", "Divmod Sine")
if noSDP:
invite.headers["content-length"] = ["0"]
else:
invite.addHeader("content-type", "application/sdp")
#XXX maybe rip off IP discovered in SDP phase?
invite.addHeader("contact", formatAddress(contacturi))
def fillSDP(_):
def consultSDP(sdpobj):
sdp = sdpobj.show()
self.sessionDescription = sdp
invite.body = sdp
invite.headers['content-length'] = [str(len(sdp))]
invite.creationFinished()
return defer.maybeDeferred(self.rtp.getSDP, self, None).addCallback(consultSDP)
def finish(_):
self.msg = invite
toAddress,fromAddress = self._finishInit()
self.localAddress = fromAddress
self.remoteAddress = toAddress
return self
d = self.rtp.createRTPSocket(self, contacturi.host)
def gotCookie(c):
self.cookie = c
d.addCallback(gotCookie)
if noSDP:
invite.creationFinished()
return d.addCallback(finish)
else:
return d.addCallback(fillSDP).addCallback(finish)
def reinvite(self, newContact, newSDP):
"""
Send a new INVITE to the remote address for this dialog.
@param newContact: a new local address for this dialog. if None, the existing one is used.
@param newSDP: An L{xshtoom.sdp.SDP} instance describing the new session.
"""
newSDP = upgradeSDP(self.sessionDescription, newSDP)
msg = self.generateRequest('INVITE')
if newContact:
msg.headers['contact'] = [formatAddress(newContact)]
msg.body = newSDP.show()
msg.headers['content-length'] = [str(len(newSDP.show()))]
msg.addHeader("content-type", "application/sdp")
self.reinviteMsg = msg
self.reinviteSDP = newSDP
for ct in self.tu.cts:
if (isinstance(ct, ClientInviteTransaction) and
ct.mode not in ('completed', 'terminated')):
self.clientState = "reinvite-waiting"
return
for st in self.tu.transport.serverTransactions.values():
if (st.tu == self.tu and
st.mode not in ('confirmed', 'terminated')):
self.clientState = "reinvite-waiting"
#XXX gotta do something to trigger the reinvite once the
#offending ST finishes
return
return self._sendReinvite(msg)
def _sendReinvite(self, msg):
if self.clientState == "byeSent":
#it's over, never mind
return
dest = self._findDest(msg)
ct = ClientInviteTransaction(self.tu.transport, self.tu, msg, (dest.host, dest.port))
self.clientState = "reinviteSent"
self.tu.cts[ct] = self
def generateRequest(self, method):
#RFC 3261 12.2.1.1
r = Request(method, self.remoteAddress[1])
if self.routeSet:
r.headers['route'] = [formatAddress(route) for route in self.routeSet]
if 'lr' not in self.routeSet[0][1].other:
r.headers['route'].append(formatAddress(("", r.uri, {})))
r.uri = parseAddress(r.headers['route'].pop())[1]
r.addHeader('to', formatAddress(self.remoteAddress))
r.addHeader('from', formatAddress(self.localAddress))
r.addHeader('cseq', "%s %s" % (self.localCSeq, method))
self.localCSeq += 1
r.addHeader('call-id', self.msg.headers['call-id'][0])
r.addHeader('contact', formatAddress(self.contactURI))
r.addHeader('content-length', 0)
return r
def _findDest(self, msg):
rs = msg.headers.get('route', None)
if rs:
dest = parseAddress(rs[0])[1]
else:
dest = self.remoteAddress[1]
return dest
def sendBye(self):
"Send a BYE and stop media."
msg = self.generateRequest('BYE')
self.clientState = "byeSent"
dest = self._findDest(msg)
ct = ClientTransaction(self.tu.transport, self.tu, msg, (dest.host, dest.port))
self.tu.cts[ct] = self #is this bad?
self.end()
def sendAck(self, body=""):
msg = self.generateRequest('ACK')
msg.headers['cseq'] = ["%s ACK" % self.msg.headers['cseq'][0].split(' ')[0]]
msg.body = body
msg.headers['content-length'] = [str( len(body))]
if body:
msg.addHeader("content-type", "application/sdp")
dest = self._findDest(msg)
msg.headers.setdefault('via', []).insert(0, Via(self.tu.transport.host, self.tu.transport.port,
rport=True,
branch=computeBranch(msg)).toString())
self.tu.transport.sendRequest(msg, (dest.host, dest.port))
def playFile(self, filename, codec=None):
def check(r):
if not r['done']:
return defer.fail(RuntimeError("cancelled"))
if codec is None:
return PlayFile(cookie=self.cookie, filename=str(filename)).do(self.rtp).addCallback(check)
else:
return PlayFile(cookie=self.cookie, filename=str(filename), format=codec).do(self.rtp).addCallback(check)
def stopPlaying(self):
return StopPlaying(cookie=self.cookie).do(self.rtp)
def end(self):
if self.ended:
return
self.ended = True
RTPStop(cookie=self.cookie).do(self.rtp)
self.callController.callEnded(self)
def startRecording(self, filename, format):
return StartRecording(
cookie=self.cookie,
format=format,
filename=str(filename)).do(self.rtp)
def endRecording(self):
return StopRecording(
cookie=self.cookie).do(self.rtp)
def startAudio(self, sdp):
md = sdp.getMediaDescription('audio')
addr = md.ipaddr or sdp.ipaddr
def go(ipaddr):
remoteAddr = (ipaddr, md.port)
return RTPStart(cookie=self.cookie, targethost=remoteAddr[0], targetport=remoteAddr[1]).do(self.rtp)
reactor.resolve(addr).addCallback(go)
def upgradeSDP(currentSession, newSDP):
"Read the media description from the new SDP and update the current session by removing the current media."
sdp = SDP(currentSession)
if newSDP.ipaddr:
c = (newSDP.nettype, newSDP.addrfamily, newSDP.ipaddr)
sdp.nettype, sdp.addrfamily, sdp.ipaddr = c
sdp.mediaDescriptions = newSDP.mediaDescriptions
sdp._o_version = str(int(sdp._o_version) + 1)
return sdp
class ICallControllerFactory(Interface):
def buildCallController(self, dialog):
"Return an ICallController"
class ICallController(Interface):
"""
The order of calls received is:
- acceptCall
- callBegan
- zero or more receiveAudio and/or receiveDTMF (if zero, there is probably a network problem)
- callEnded
"""
def acceptCall(dialog):
"""
Decide if this call will be accepted or not: raise a
SIPError(code) if the call should be rejected, where 'code' is
the SIP error code desired.
"""
def callBegan(dialog):
"""
Called after the INVITE response has been ACKed and audio started.
"""
def callFailed(dialog, message):
"""
Called when an incoming call is canceled or an outgoing call
receives a failure response.
"""
def callEnded(dialog):
"""
Called after BYE received.
"""
def receivedAudio(dialog, packet):
"""
Called with a chunk of audio data, decode into shtoom's
preferred format (signed linear 16bit 8000Hz).
"""
def receivedDTMF(dialog, key):
"""
Called with the numeric value of the pressed key. * and # are
10 and 11.
"""
def _matchToDialog(msg, origin, dest, dialogs):
dialog= dialogs.get(
(msg.headers['call-id'][0],
parseAddress(msg.headers[origin][0])[2].get('tag',''),
parseAddress(msg.headers[dest][0])[2].get('tag','')),
None)
return dialog
def matchResponseToDialog(msg, dialogs):
return _matchToDialog(msg, 'from', 'to', dialogs)
def matchRequestToDialog(msg, dialogs):
return _matchToDialog(msg, 'to', 'from', dialogs)
class UserAgent(SIPResolverMixin):
"""
I listen on a sine.sip.SIPTransport and create or accept SIP calls
"""
implements(ITransactionUser)
def server(cls, voicesystem, localHost, mediaController, dialogs=None):
"""
I listen for incoming SIP calls and connect them to
ICallController instances, looked up via an IVoiceSystem.
"""
self = cls(localHost, mediaController, dialogs)
self.voicesystem = voicesystem
return self
server = classmethod(server)
def client(cls, controller, localpart, localHost, mediaController, dialogs=None):
"""
I create calls to SIP URIs and connect them to my ICallController instance.
"""
self = cls(localHost, mediaController, dialogs)
self.controller = controller
self.user = localpart
return self
client = classmethod(client)
def __init__(self, localHost, mc, dialogs):
self.mediaController = mc
if dialogs is None:
self.dialogs = {}
else:
self.dialogs = dialogs
self.cts = {}
self.host = localHost
self.shutdownDeferred = None
def start(self, transport):
self.transport = transport
def stopTransactionUser(self, hard=False):
for d in self.dialogs.values():
if hard:
d.end()
else:
d.sendBye()
self.shutdownDeferred = defer.Deferred()
return self.shutdownDeferred
def maybeStartAudio(self, dialog, sdp):
"""
Start audio on the dialog. This method is designed to be
overridden by user-agents that provide facilities like
third-party call control. See L{sine.tpcc} for details.
"""
debug("START AUDIO (maybe)")
dialog.startAudio(sdp)
def requestReceived(self, msg, addr):
#RFC 3261 12.2.2
if msg.method == "INVITE":
st = ServerInviteTransaction(self.transport, self, msg, addr)
else:
st = ServerTransaction(self.transport, self, msg, addr)
#dialog checking
dialog = matchRequestToDialog(msg, self.dialogs)
#untagged requests must be checked against ongoing transactions
# see 8.2.2.2
if not dialog and parseAddress(msg.headers['to'][0])[2].get('tag',None):
#uh oh, there was an expectation of a dialog
#but we can't remember it (maybe we crashed?)
st.messageReceivedFromTU(responseFromRequest(481, msg))
return defer.succeed(st)
#authentication
#check for Require
m = getattr(self, "process_" + msg.method, None)
if not m:
st.messageReceivedFromTU(responseFromRequest(405, msg))
return defer.succeed(st)
else:
return defer.maybeDeferred(m, st, msg, addr, dialog).addCallback(
lambda x: st)
def process_INVITE(self, st, msg, addr, dialog):
#RFC 3261 13.3.1
if dialog:
#it's a reinvite
if msg.body:
#new SDP ahoy
sdp = SDP(msg.body)
else:
sdp = None
d = defer.maybeDeferred(dialog.rtp.getSDP, dialog, sdp)
def gotSDP(mysdp):
if not mysdp.hasMediaDescriptions():
st.messageReceivedFromTU(responseFromRequest(488, msg))
return st
dialog.sessionDescription = mysdp
if dialog.clientState == "reinviteSent":
st.messageReceivedFromTU(dialog.responseFromRequest(491, msg))
else:
if sdp:
self.maybeStartAudio(dialog, sdp)
dialog.msg = msg
dialog.reinviteMsg = True
dialog.remoteAddress = parseAddress(msg.headers['from'][0])
response = dialog.responseFromRequest(200, msg, mysdp.show())
st.messageReceivedFromTU(response)
dialog.ackTimerRetry(response)
return st
return d.addCallback(gotSDP)
#otherwise, time to start a new dialog
d = Dialog.forServer(self, URL(self.host,
parseAddress(msg.headers['to'][0])[1].username),
msg)
def lookupElement(dialog):
avatar = self.voicesystem.localElementByName(parseAddress(msg.headers['to'][0])[1].username)
dialog.callController = avatar.buildCallController(dialog)
if msg.body:
sdp = SDP(msg.body)
else:
sdp = None
d = defer.maybeDeferred(dialog.rtp.getSDP, dialog, sdp)
def gotSDP(mysdp):
dialog.sessionDescription = mysdp
if not mysdp.hasMediaDescriptions():
st.messageReceivedFromTU(responseFromRequest(406, msg))
return st
if sdp:
self.maybeStartAudio(dialog, sdp)
self.dialogs[dialog.getDialogID()] = dialog
response = dialog.responseFromRequest(200, msg, mysdp.show())
st.messageReceivedFromTU(response)
dialog.ackTimerRetry(response)
d.addCallback(gotSDP)
return d
def failedLookup(err):
err.trap(NoSuchUser, UnauthorizedLogin)
raise SIPLookupError(604)
return d.addCallback(lookupElement).addErrback(failedLookup)
def process_ACK(self, st, msg, addr, dialog):
#woooo it is an ack for a 200, it is call setup time
timer = dialog.ackTimer[0]
if timer.active():
timer.cancel()
if not getattr(dialog, 'reinviteMsg', None):
#only do this for the initial INVITE
if not dialog.acked:
dialog.callController.callBegan(dialog)
dialog.acked = True
else: debug("reinvite ACKed")
if msg.body:
#must've gotten an invite with no SDP
#so this is the answer
sdp = SDP(msg.body)
self.maybeStartAudio(dialog, sdp)
def process_BYE(self, st, msg, addr, dialog):
if not dialog:
raise SIPError(481)
#stop RTP stuff
dialog.end()
response = dialog.responseFromRequest(200, msg, None)
st.messageReceivedFromTU(response)
del self.dialogs[dialog.getDialogID()]
def responseReceived(self, response, ct=None):
#OK this function is a bit hairy because I don't want to track
#any call state in this class and responses to various things
#need to be handled differently. The main event is 2xx
#responses to the INVITE -- that changes the early dialog
#(created when the INVITE was sent) to an confirmed dialog.
#Error responses result in dialog teardown, as do responses to BYEs.
#RFC 3261 12.2.1.2
dialog = self.cts.get(ct, None)
if dialog is None:
dialog = matchResponseToDialog(response, self.dialogs)
if 'INVITE' in response.headers['cseq'][0] and 200 <= response.code < 300:
#possibly this line doesn't belong here? earlyResponseReceived
#does it too but IIRC it can't come before sending the ack
dialog.remoteAddress = parseAddress(response.headers['to'][0])
self.acknowledgeInvite(dialog, response)
if dialog.clientState == "early":
self.earlyResponseReceived(dialog, response, ct)
if dialog.clientState == "byeSent":
self.byeResponseReceived(dialog, response, ct)
elif dialog.clientState == "reinviteSent":
self.reinviteResponseReceived(dialog, response, ct)
def acknowledgeInvite(self, dialog, response):
#RFC 3261, 13.2.2.4
if dialog.sessionDescription:
#the INVITE contained the offer, no body in the ACK
dialog.sendAck()
else:
#the 200 contained the offer, answer in the ACK
sdp = SDP(response.body)
d = defer.maybeDeferred(dialog.rtp.getSDP, dialog, sdp)
def gotSDP(mysdpobj):
mysdp = mysdpobj.show()
dialog.sendAck(mysdp)
dialog.sessionDescription = mysdp
d.addCallback(gotSDP)
return d
def reinviteResponseReceived(self, dialog, response, ct):
if response.code == 491:
if dialog.direction == "client":
reactor.callLater(random.randint(210,400)/100.0,
dialog._sendReinvite, dialog.reinviteMsg)
else:
reactor.callLater(random.randint(0, 200)/100.0,
dialog._sendReinvite, dialog.reinviteMsg)
elif 200 <= response.code < 300:
dialog.clientState = "confirmed"
dialog.msg = dialog.reinviteMsg
dialog.contactURI = parseAddress(dialog.msg.headers['contact'][0])[1]
dialog.sessionDescription = dialog.reinviteSDP
else:
dialog.clientState = "confirmed"
def byeResponseReceived(self, dialog, response, ct):
del self.cts[ct]
del self.dialogs[dialog.getDialogID()]
def earlyResponseReceived(self, dialog, response, ct):
if 200 <= response.code < 300:
#RFC 3261 12.1.2
dialog.clientState = "confirmed"
dialog.remoteAddress = parseAddress(response.headers['to'][0])
dialog.routeSet = [parseAddress(route) for route in response.headers.get('record-route', [])[::-1]]
self.dialogs[dialog.getDialogID()] = dialog
sdp = SDP(response.body)
self.maybeStartAudio(dialog, sdp)
if self.controller:
self.controller.callBegan(dialog)
elif 300 <= response.code < 400:
raise NotImplemented, "Dunno about redirects yet"
elif 400 <= response.code < 700:
if dialog.getDialogID() in self.dialogs:
del self.dialogs[dialog.getDialogID()]
del self.cts[ct]
self.controller.callFailed(dialog, response)
def call(self, uri):
"""
Call the specified URI and notify our controller when it is set up.
"""
return self._doCall(uri)
def _doCall(self, uri, noSDP=False,fromName=""):
dlgD = Dialog.forClient(self, URL(self.host, self.user), uri, self.controller, noSDP, fromName)
def _cb(dlg):
targetsD = self._lookupURI(uri)
def _send(targets):
#'targets' is a list of (host, port) obtained from SRV lookup
#ideally, if there's a 503 response to this message, we can
#resend through another target.
#For now we'll just send to the first and hope for the best.
ct = ClientInviteTransaction(self.transport, self, dlg.msg, targets[0])
self.cts[ct] = dlg
targetsD.addCallback(_send)
return dlg
return dlgD.addCallback(_cb)
def clientTransactionTerminated(self, ct):
if ct not in self.cts:
return
dialog = self.cts.pop(ct)
if dialog.clientState != "confirmed":
self.responseReceived(ct.response)
if self.shutdownDeferred and not self.cts:
self.shutdownDeferred.callback(True)
def dropCall(self, dialog):
"For shtoom compatibility."
dialog.sendBye()
class SimpleCallRecipient:
"""
An example SIP application: upon receipt of a call, a greeting is
played, then audio is recorded until hangup or # is pressed.
"""
implements(ICallController)
file = None
def acceptCall(self, dialog):
pass
def callBegan(self, dialog):
import os
fn = os.path.join(os.path.split(__file__)[0], 'test_audio.raw')
dialog.playFile(fn).addCallback(lambda _: self.beginRecording())
def receivedDTMF(self, key):
if key == 11:
self.endRecording()
def callEnded(self, dialog):
self.endRecording()
def beginRecording(self):
self.file = wave.open('recording.wav', 'wb')
self.file.setparams((1,2,8000,0,'NONE','NONE'))
def receivedAudio(self, dialog, bytes):
if self.file:
self.file.writeframes(bytes)
def endRecording(self):
if self.file:
self.file.close()
|
|
#!/usr/bin/env python -tt
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
import os
import itertools
# this calls the __init__ of generate, and populates the registry with all the
# generator_factories.
# Then we grab the single instance of generator_registry
from thrift_compiler.generate.t_generator \
import registry as generator_registry
from thrift_compiler import frontend
class ArgumentError(Exception):
pass
def init_parser():
from optparse import OptionParser, OptionGroup
from optparse import TitledHelpFormatter, IndentedHelpFormatter
class MyParser(OptionParser):
def format_epilog(self, formatter):
return self.epilog
# construct the epilogue
tmp = []
for lang, opts in generator_registry.reference.iteritems():
tmp.append(' {0} ({1[long]}):'.format(lang, opts))
for key, help in opts.get('options', {}).items():
tmp.append(''.join((' ', '{0}:'.format(key).ljust(20), help)))
epilogue = '\n'.join([
'',
'Available generators (and options):',
] + tmp +
[''] * 2)
# construct the parser
parser = MyParser(usage="%prog [options] file", description="", version="",
epilog=epilogue, formatter=IndentedHelpFormatter())
# defaults
parser.set_defaults(strict=127, warn=1, debug=False)
# callback for --strict
def strict_cob(option, opt_str, value, parser):
if opt_str == '--strict':
parser.values.strict = 255
parser.values.warn = 2
else:
return NotImplemented
# parser rules
rules = [
(['-o', '--install_dir'], dict(metavar='dir',
dest="outputDir", default='.',
help='Set the output directory for gen-* packages (default:'
'current directory)')),
(['--out'], {}),
(['-I'], dict(metavar='dir', dest="includeDirs", action="append",
help='Add a directory to the list of directories searched for'
'include directives')),
(['--nowarn'], dict(action='store_const', const=0, dest='warn',
help='Suppress all compiler warnings (BAD!)')),
(['--strict'], dict(action='callback', callback=strict_cob,
help='Strict compiler warnings on')),
(['-v', '--verbose'], dict(action='store_true', default=False,
help='Verbose mode')),
(['-r', '--recurse'], dict(action='store_true', default=False,
help='Also generate included files')),
(['--debug'], dict(action='store_true',
help='Parse debug trace to stdout')),
(['--allow-neg-keys'], dict(action='store_true', default=False,
help='Allow negative field keys (Used to preserve '
'protocol compatibility with older .thrift files')),
(['--allow-neg-enum-vals'], dict(action='store_true', default=False,
help='Allow negative enum vals')),
(['--allow-64bit-consts'], dict(action='store_true', default=False,
help='Do not print warnings about using 64-bit constants')),
(['--gen'], dict(metavar='STR', dest='generate', default=[],
action='append', help='Generate code with a dynamically-'
'registered generator. STR has the form language[:key1=val1[,'
'key2,[key3=val3]]]. Keys and values are options passed to the '
'generator. Many options will not require values.')),
(['--fbcode_dir'], {}),
(['--record-genfiles'], {}),
]
for i, j in rules:
parser.add_option(*i, **j)
return parser
def toDict(string):
'Turns a string of the form a[,c[=d]]... to a dict'
d = {}
items = string.split(',')
for item in itertools.ifilter(None, items):
item = item.split('=', 1)
if len(item) == 1:
key = item[0]
d[key] = ''
elif len(item) == 2:
key, value = item
d[key] = value
return d
def parseParameters(parser, args):
# do the actual parsing
(opts, args) = parser.parse_args(args)
if len(args) != 1:
raise ArgumentError('Must provide exactly one thrift definition file.')
thrift_file = args[0]
# languages to generate
to_generate = {}
# parse generating options
generate = opts.generate
if len(generate) == 0:
raise ArgumentError('Please specify at least one language to '
'generate.')
for desc in generate:
tmp = desc.split(':')
if not (0 < len(tmp) <= 2):
raise ArgumentError('Incorrect language description.'
'Syntax: language[:key1=val1[,key2,[key3=val3]]].')
# add empty switches array
if len(tmp) == 1:
tmp.append('')
lang, switches = tmp
if lang not in generator_registry.generator_factory_map:
raise ArgumentError('Language {0} not defined.'.format(lang))
switches = toDict(switches)
# save it to a resultant dict
to_generate[lang] = switches
# sanity checks
if not os.path.isfile(thrift_file):
raise ArgumentError('Thrift file not found.')
if not os.access(thrift_file, os.R_OK):
raise ArgumentError('Cannot read thrift file.')
if not os.path.isdir(opts.outputDir):
raise ArgumentError('Output directory is not a directory.')
if not os.access(opts.outputDir, os.W_OK | os.X_OK):
raise ArgumentError('Output directory is not writeable.')
return dict(
to_generate=to_generate,
options=opts,
thrift_file=thrift_file,
)
class Configuration(object):
def __init__(self, opts):
self._opts = opts
if not opts.verbose:
# kill the verbose function
self.pverbose = self.pverbose_dummy
# set to True to debug docstring parsing
self.dump_docs = False
# Nice little wrapper to the opts for easy access
def __getattr__(self, key):
return getattr(self._opts, key)
def pverbose_dummy(self, msg):
pass
def pverbose(self, msg):
sys.stderr.write(msg)
def pwarning(self, level, msg):
if self.warn < level:
return
print >>sys.stderr, msg
def generate(self, program, languages):
'Oooohh, recursively generate program, hot!!'
from thrift_compiler.frontend import t_program
assert isinstance(program, t_program)
assert isinstance(languages, dict)
if self.recurse:
for inc in program.includes:
# Propagate output path from parent to child programs
inc.out_path = program.out_path
self.generate(inc, languages)
# Generate code
self.pverbose("Program: {0}\n".format(program.path))
if self.dump_docs:
frontend.dump_docstrings(program)
for language, flags in languages.iteritems():
for flag in flags:
if flag not in generator_registry.generator_factory_map[ \
language].supported_flags:
self.pwarning(1, "Language {0} doesn't recognize flag {1}"\
.format(language, flag))
g = generator_registry.get_generator(program, language, flags)
if g is None:
self.pwarning(1, "Unable to get a generator for " \
"{0}:{1}".format(language, ','.join(flags)))
# do it!
g.generate_program()
def process(self, params):
def generate_wrapper(*args):
self.generate(*args)
frontend.process(params, generate_wrapper)
def main():
# parse
parser = init_parser()
params = None
try:
params = parseParameters(parser, sys.argv[1:])
except ArgumentError as e:
print('Argument Error:', e)
# print usage
# parser.print_help()
return
# instantiate a Configuration that will hold the compilation flags
conf = Configuration(params['options'])
conf.process(params)
if __name__ == '__main__':
main()
|
|
# session.py -- Session, SessionCallbacks classes
#
# Copyright (C) 2003 Manish Jethani (manish_jethani AT yahoo.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import select
import md5
from string import split, join
from binascii import hexlify, unhexlify
from time import time
from protocol import States, Lists, PrivacyModes
from error import Error, HttpError
from friend import Group, Friend, FriendList
from net import Connection, HttpProxyConnection
from command import Command, Msg, Png, Qry
from codec import url_codec
import protocol
import chat
class _Session: # common base for Session and Chat
def __init__(self, callbacks):
self.callbacks = callbacks
self.transaction_id = 0
self.http_proxy = None
self.conn = None
self.send_queue = []
def _connect(self, server):
conn = None
if self.http_proxy:
conn = HttpProxyConnection(server, self.http_proxy)
else:
conn = Connection(server)
conn.establish()
return conn
def _increment_transaction_id(self):
self.transaction_id = self.transaction_id + 1
return self.transaction_id
def _send_cmd(self, cmd, conn):
conn.send_data_line(str(cmd))
self._increment_transaction_id()
def _receive_cmd(self, conn):
buf = conn.receive_data_line()
if buf == None: # connection closed
raise Error(1, 'Connection closed.')
cmd = Command()
cmd.parse(buf)
return cmd
def _sync_command(self, cmd, conn):
# synchronous command (receive response immediately)
self._send_cmd(cmd, conn)
return self._receive_cmd(conn)
def _async_command(self, cmd):
self.send_queue.append(cmd)
self._increment_transaction_id()
class SessionCallbacks: # callback interface
"""Callback interface for MSN instant messaging session
To receive notification on various protocol events, the client must
implement some or all of the methods in this callback interface.
"""
def ping(self):
"""Ping received from server"""
def state_changed(self, state):
"""User's presence state has changed
Keyword arguments:
state -- any of the msnp.States members
"""
def friend_online(self, state, passport_id, display_name):
"""Friend is online
Keyword arguments:
state -- any of the msnp.States members
passport_id -- string representing friend's passport ID
display_name -- friend's display name
"""
def friend_offline(self, passport_id):
"""Friend is offline
Keyword arguments:
passport_id -- string representing friend's passport ID
"""
def friend_list_updated(self, friend_list):
"""Friend list has been updated
Keyword arguments:
friend_list -- same as msnp.Session.friend_list
"""
def logged_out(self):
"""User has been logged out"""
def group_added(self, id, name):
"""Group has been added
Keyword arguments:
id -- group ID
name -- name of group
"""
def group_removed(self, id):
"""Group has been removed
Keyword arguments:
id -- group ID
"""
def group_renamed(self, id, name):
"""Group has been renamed
Keyword arguments:
id -- group ID
name -- new name of group
"""
def friend_added(self, list_, passport_id, display_name, group_id = -1):
"""Friend has been added
If list_ is msnp.Lists.REVERSE, it means that the user has been added
to someone's list. In that case, the passport_id and display_name
parameters contain information about that someone.
Keyword arguments:
list_ -- type of list (allow, block, etc.)
passport_id -- string representing friend's passport ID
display_name -- friend's display name
group_id -- group ID of group to which friend has been added
"""
def friend_removed(self, list_, passport_id, group_id = -1):
"""Friend has been removed
Keyword arguments:
list_ -- type of list (allow, block, etc.)
passport_id -- string representing friend's passport ID
group_id -- group ID of group from which friend has been removed
"""
def display_name_changed(self, display_name):
"""Display name changed
Keyword arguments:
display_name -- user's new display name
"""
def display_name_received(self, passport_id, display_name):
"""Display name received
Keyword arguments:
passport_id -- string representing friend's passport ID
display_name -- friend's display name
"""
def chat_started(self, chat):
"""Chat started
Keyword arguments:
chat -- Chat instance representing new chat started
"""
class Session(_Session):
"""MSN instant messaging session
To get into an instant messaging session, an instance of msnp.Session must
be created. The session can be started by calling the login method. After
logging in, the process method must be called periodically to process the
server's commands.
"""
class __ChatRequest:
def __init__(self, invitee):
self.invitee = invitee
def __init__(self, callbacks = None, dispatch_server = None):
"""Constructor for msnp.Session
Keyword arguments:
callbacks -- callback interface
dispatch_server -- dispatch server host, port
"""
if callbacks == None:
callbacks = SessionCallbacks()
_Session.__init__(self, callbacks)
if dispatch_server == None:
self.dispatch_server = ('messenger.hotmail.com', 1863)
self.logged_in = 0
self.passport_id = None
self.display_name = None
self.chat_requests = {}
self.friend_list = FriendList()
self.active_chats = {}
def __get_twn_ticket(self, twn_string, username, password):
from net import HTTPSConnection
from urllib import urlencode
debuglevel = 0
# step 1: get address of login server
con = HTTPSConnection('nexus.passport.com',
http_proxy = self.http_proxy)
con.set_debuglevel(debuglevel)
con.request('GET', '/rdr/pprdr.asp')
res = con.getresponse()
con.close()
if res.status != 200:
raise HttpError(0, 'Bad response from passport nexus server.',
res.status, res.reason)
hdr = res.getheader('PassportURLs')
url = {}
for u in hdr.split(','):
k, v = u.split('=')
url[k] = v
dalogin = url['DALogin'].split('/', 1)
# step 2: get "ticket" to notification server
while True:
con = HTTPSConnection(dalogin[0], http_proxy = self.http_proxy)
con.set_debuglevel(debuglevel)
auth = 'Passport1.4 OrgVerb=GET,%s,%s,%s,%s' \
% (urlencode({'OrgURL': 'http://messenger.msn.com'}),
urlencode({'sign-in': username}),
urlencode({'pwd': password}),
twn_string)
con.request('GET', '/%s' % (dalogin[1]), '',
{'Authorization': auth})
res = con.getresponse()
con.close()
if res.status / 100 == 3:
dalogin[0] = res.getheader('Location').split('/', 3)[2]
elif res.status != 200:
raise HttpError(0, 'Bad response from login server.',
res.status, res.reason) # XXX handle redirection?
else:
break
hdr = res.getheader('Authentication-Info') or \
res.getheader('WWW-Authenticate')
hdr = hdr[len('Passport1.4 '):]
auth = {}
for u in hdr.split(','):
k, v = u.split('=', 1)
if v[0] == '\'' and v[-1] == '\'':
v = v[1:-1]
auth[k] = v
ticket = auth['from-PP']
return ticket
# TODO code cleanup
def __handshake(self, server, username, password):
conn = self._connect(server)
try:
ver = Command('VER', self.transaction_id, ('MSNP8', 'CVR0'))
resp = self._sync_command(ver, conn)
if resp.cmd != 'VER' or resp.args[0] == '0':
raise Error(0, 'Bad response for VER command.')
cvr = Command('CVR', self.transaction_id,
('0x0409', 'win', '4.10', 'i386', 'MSNMSGR', '6.0.0602',
'MSMSGS ', username))
resp = self._sync_command(cvr, conn)
if resp.cmd != 'CVR':
raise Error(0, 'Bad response for CVR command.')
usr = Command('USR', self.transaction_id, ('TWN', 'I', username))
resp = self._sync_command(usr, conn)
if resp.cmd != 'USR' and resp.cmd != 'XFR':
raise Error(0, 'Bad response for USR command.')
# for dispatch server, response is ver, cvr, xfr; for notification
# server, it is ver, cvr, usr (or same as dispatch server, in some
# cases)
if resp.cmd == 'XFR':
return split(resp.args[1], ':', 1)
elif resp.cmd == 'USR':
twn_string = resp.args[2]
ticket = self.__get_twn_ticket(twn_string, username, password)
usr = Command('USR', self.transaction_id, ('TWN', 'S', ticket))
resp = self._sync_command(usr, conn)
if resp.cmd != 'USR':
raise Error(int(resp.cmd), protocol.errors[resp.cmd])
elif resp.args[0] != 'OK':
raise Error(0, 'Bad response for USR command.')
self.passport_id = resp.args[1]
self.display_name = url_codec.decode(resp.args[2])
self.logged_in = 1
finally:
if not self.logged_in:
conn.break_()
else:
self.conn = conn
def process(self, chats = False):
"""Process events
Keyword arguments:
chats -- whether or not to call msnp.Chat.process for all active
chat sessions
This method must be called periodically, preferably in the client
application's main loop.
"""
while self.logged_in:
fd = self.conn.socket.fileno()
r = select.select([fd], [], [], 0)
if len(r[0]) > 0:
buf = self.conn.receive_data_line()
self.__process_command_buf(buf)
elif len(self.send_queue) > 0:
cmd = self.send_queue.pop(0)
cmd.send(self.conn)
else:
break
if chats:
self.__process_active_chats()
def __process_active_chats(self):
[chat_.process() for chat_ in self.active_chats.values()]
def __process_command_buf(self, buf):
cmd = buf[:3]
if cmd == 'MSG':
self.__process_msg(buf)
elif cmd == 'QNG':
self.__process_qng(buf)
elif cmd == 'OUT':
self.__process_out(buf)
elif cmd == 'RNG':
self.__process_rng(buf)
else:
c = Command()
c.parse(buf)
if c.cmd == 'CHG':
self.__process_chg(c)
elif c.cmd == 'ILN':
self.__process_iln(c)
elif c.cmd == 'NLN':
self.__process_nln(c)
elif c.cmd == 'FLN':
self.__process_fln(c)
elif c.cmd == 'CHL':
self.__process_chl(c)
elif c.cmd == 'LSG':
self.__process_lsg(c)
elif c.cmd == 'LST':
self.__process_lst(c)
elif c.cmd == 'SYN':
self.__process_syn(c)
elif c.cmd == 'XFR':
self.__process_xfr(c)
elif c.cmd == 'BLP':
self.__process_blp(c)
elif c.cmd == 'GTC':
self.__process_gtc(c)
elif c.cmd == 'ADG':
self.__process_adg(c)
elif c.cmd == 'RMG':
self.__process_rmg(c)
elif c.cmd == 'REG':
self.__process_reg(c)
elif c.cmd == 'ADD':
self.__process_add(c)
elif c.cmd == 'REM':
self.__process_rem(c)
elif c.cmd == 'REA':
self.__process_rea(c)
elif c.cmd == '218':
pass
# TODO error handling
def __process_msg(self, buf):
msg = Msg()
msg.parse(buf)
msg.receive(self.conn)
# discard NS messages for now
def __process_qng(self, buf):
self.callbacks.ping()
def __process_out(self, buf):
self.conn.break_()
self.conn = None
self.logged_in = 0
self.callbacks.logged_out()
def __process_rng(self, buf):
cmdline = split(buf)
session_id = cmdline[1]
sb = split(cmdline[2], ':')
server = (sb[0], int(sb[1]))
hash = cmdline[4]
passport_id = cmdline[5]
display_name = url_codec.decode(cmdline[6])
try:
chat_ = chat.Chat(self, server, hash, passport_id,
display_name, session_id)
except Error, e:
if e.code == 1: # connection closed
return
raise e
self.active_chats[chat_.session_id] = chat_
self.callbacks.chat_started(chat_)
def __process_chg(self, command):
self.callbacks.state_changed(command.args[0])
def __process_iln(self, command):
state = command.args[0]
passport_id = command.args[1]
display_name = url_codec.decode(command.args[2])
friend = self.friend_list.get_friend(passport_id)
if friend != None:
friend.state = state
friend.display_name = display_name
self.__friend_list_updated()
else: # usu. immed. after login
self.friend_list.temp_iln[passport_id] = state
self.callbacks.friend_online(state, passport_id, display_name)
def __process_nln(self, command):
state = command.args[0]
passport_id = command.args[1]
display_name = url_codec.decode(command.args[2])
friend = self.friend_list.get_friend(passport_id)
if friend != None:
friend.display_name = display_name
friend.state = state
self.__friend_list_updated()
self.callbacks.friend_online(state, passport_id, display_name)
def __process_fln(self, command):
passport_id = command.args[0]
friend = self.friend_list.get_friend(passport_id)
if friend != None:
friend.state = States.OFFLINE
self.__friend_list_updated()
self.callbacks.friend_offline(passport_id)
def __process_chl(self, command):
qry = Qry(self.transaction_id, command.args[0])
self._async_command(qry)
def __process_lsg(self, command):
id = int(command.args[0])
name = url_codec.decode(command.args[1])
group = Group(id, name)
self.friend_list.groups[id] = group
self.__friend_list_updated()
def __process_lst(self, command):
from protocol import list_flags
passport_id = command.args[0]
display_name = url_codec.decode(command.args[1])
list_ = int(command.args[2])
group_id = []
if list_ & list_flags[Lists.FORWARD]:
group_id = [int(i) for i in split(command.args[3], ',')]
groups = None
if len(group_id):
groups = [self.friend_list.groups[g_id] for g_id in group_id]
friend = Friend(passport_id, display_name, groups = groups)
for f in list_flags.keys():
if list_ & list_flags[f]:
self.friend_list.lists[f][passport_id] = friend
if self.friend_list.temp_iln.has_key(passport_id):
friend.state = self.friend_list.temp_iln[passport_id]
self.__friend_list_updated()
def __process_syn(self, command):
ver = int(command.args[0])
self.friend_list.ver = ver
self.__friend_list_updated()
def __process_xfr(self, command):
sb = split(command.args[1], ':')
server = (sb[0], int(sb[1]))
cr = self.chat_requests[command.trn]
invitee = cr.invitee
chat_ = chat.Chat(self, server, command.args[3], self.passport_id,
self.display_name, None, invitee)
self.active_chats[chat_.session_id] = chat_
self.callbacks.chat_started(chat_)
def __process_blp(self, command):
privacy_mode = command.args[0]
self.friend_list.privacy_mode = privacy_mode
self.__friend_list_updated()
def __process_gtc(self, command):
notify_on_add = command.args[0] == 'A'
self.friend_list.notify_on_add_ = notify_on_add
self.__friend_list_updated()
def __process_adg(self, command):
ver = int(command.args[0])
name = url_codec.decode(command.args[1])
id = int(command.args[2])
self.friend_list.ver = ver
self.friend_list.groups[id] = Group(id, name)
self.__friend_list_updated()
self.callbacks.group_added(id, name)
def __process_rmg(self, command):
ver = int(command.args[0])
id = int(command.args[1])
self.friend_list.ver = ver
if self.friend_list.groups.has_key(id):
del self.friend_list.groups[id]
self.__friend_list_updated()
self.callbacks.group_removed(id)
def __process_reg(self, command):
ver = int(command.args[0])
id = int(command.args[1])
name = url_codec.decode(command.args[2])
self.friend_list.ver = ver
if self.friend_list.groups.has_key(id):
self.friend_list.groups[id].name = name
self.__friend_list_updated()
self.callbacks.group_renamed(id, name)
def __process_add(self, command):
list_ = command.args[0]
ver = int(command.args[1])
passport_id = command.args[2]
display_name = url_codec.decode(command.args[3])
group = None
if list_ == Lists.FORWARD:
group = self.friend_list.groups[int(command.args[4])]
self.friend_list.ver = ver
friend = self.friend_list.get_friend(passport_id, list_)
if friend != None and group != None:
friend.add_to_group(group)
else:
if group != None:
friend = Friend(passport_id, passport_id, (group))
else:
friend = Friend(passport_id, passport_id)
self.friend_list.lists[list_][passport_id] = friend
self.__friend_list_updated()
if group != None:
self.callbacks.friend_added(list_, passport_id, display_name,
group.get_id())
else:
self.callbacks.friend_added(list_, passport_id, display_name)
def __process_rem(self, command):
list_ = command.args[0]
ver = int(command.args[1])
passport_id = command.args[2]
group = None
if list_ == Lists.FORWARD:
group = self.friend_list.groups[int(command.args[3])]
self.friend_list.ver = ver
friend = self.friend_list.get_friend(passport_id, list_)
if friend != None: # this shouldn't be None, unless friend_list stale
if group != None:
friend.remove_from_group(group)
if len(friend.get_groups()) == 0:
del self.friend_list.lists[list_][passport_id]
self.__friend_list_updated()
if group != None:
self.callbacks.friend_removed(list_, passport_id, group.get_id())
else:
self.callbacks.friend_removed(list_, passport_id)
def __process_rea(self, command):
ver = int(command.args[0])
passport_id = command.args[1]
display_name = url_codec.decode(command.args[2])
if passport_id == self.passport_id:
self.display_name = display_name
self.callbacks.display_name_changed(display_name)
else:
self.callbacks.display_name_received(passport_id, display_name)
def __friend_list_updated(self):
self.friend_list.updated = time()
self.callbacks.friend_list_updated(self.friend_list)
def login(self, username, password, initial_state = States.ONLINE):
"""Login to MSN server
Keyword arguments:
username -- username
password -- password
initial_state -- initial state (default msnp.States.ONLINE)
"""
if self.logged_in:
return
server = self.dispatch_server
while not self.logged_in:
server = self.__handshake(server, username, password)
self.change_state(initial_state)
def ping(self):
"""Ping server"""
if not self.logged_in:
return
self._async_command(Png())
self.process()
def logout(self):
"""Logout from server"""
if not self.logged_in:
return
[chat_.leave() for chat_ in self.active_chats.values()]
self.process()
self.conn.break_()
self.conn = None
self.logged_in = 0
def change_state(self, state):
"""Change user's state
Keyword arguments:
state -- new state (see msnp.States)
"""
if not self.logged_in:
return
chg = Command('CHG', self.transaction_id, (state,))
self._async_command(chg)
self.process()
def sync_friend_list(self, ver = -1):
"""Synchronise friend list by getting new copy from server
The friend list is updated asynchronously.
msnp.SessionCallbacks.friend_list_updated will be called repeatedly
after a call to this method. The client may want to set a timer
instead, and check for updates to the friend list using the
msnp.FriendList.last_updated method.
Keyword arguments:
ver -- friend list version
"""
if not self.logged_in:
return
self.friend_list.dirty = False
if ver == -1:
ver = self.friend_list.ver
syn = Command('SYN', self.transaction_id, (str(ver),))
self._async_command(syn)
self.process()
def request_list(self, list_ = Lists.FORWARD):
"""Request a list from the server
Keyword arguments:
list_ -- type of list to request (see msnp.Lists)
"""
if not self.logged_in:
return
lst = Command('LST', self.transaction_id, (list_,))
self._async_command(lst)
self.process()
def request_groups(self):
"""Request groups from server"""
if not self.logged_in:
return
lsg = Command('LSG', self.transaction_id, ())
self._async_command(lsg)
self.process()
def change_privacy_mode(self, privacy_mode):
"""Change privacy mode
Keyword arguments:
privacy_mode -- new privacy mode (see msnp.PrivacyModes)
"""
if not self.logged_in:
return
blp = Command('BLP', self.transaction_id, (privacy_mode,))
self._async_command(blp)
self.process()
def notify_on_add(self, notify):
"""Change setting for being notified on being added"""
if not self.logged_in:
return
setting = 'N'
if notify:
setting = 'A'
gtc = Command('GTC', self.transaction_id, (setting,))
self._async_command(gtc)
self.process()
def add_group(self, name):
"""Add a group
Keyword arguments:
name -- name of new group
"""
if not self.logged_in:
return
adg = Command('ADG', self.transaction_id,
(url_codec.encode(name), '0'))
self._async_command(adg)
self.process()
def remove_group(self, id):
"""Remove a group
Keyword arguments:
id -- group ID
"""
if not self.logged_in:
return
rmg = Command('RMG', self.transaction_id, (str(id),))
self._async_command(rmg)
self.process()
def rename_group(self, id, name):
"""Rename a group
Keyword arguments:
id -- group ID
name -- new name of group
"""
if not self.logged_in:
return
reg = Command('REG', self.transaction_id,
(str(id), url_codec.encode(name), '0'))
self._async_command(reg)
self.process()
def add_friend(self, list_, passport_id, group_id = 0):
"""Add a friend
Keyword arguments:
list_ -- type of list (allow, block, etc.)
passport_id -- string representing friend's passport ID
group_id -- group ID of group to which friend is being added
"""
add = None
if list_ == Lists.FORWARD:
add = Command('ADD', self.transaction_id,
(list_, passport_id, passport_id, str(group_id)))
else:
add = Command('ADD', self.transaction_id,
(list_, passport_id, passport_id))
self._async_command(add)
self.process()
def remove_friend(self, list_, passport_id, group_id = 0):
"""Remove a friend
Keyword arguments:
list_ -- type of list (allow, block, etc.)
passport_id -- string representing friend's passport ID
group_id -- group ID of group from which friend is being removed
"""
rem = None
if list_ == Lists.FORWARD:
rem = Command('REM', self.transaction_id,
(list_, passport_id, str(group_id)))
else:
rem = Command('REM', self.transaction_id,
(list_, passport_id))
self._async_command(rem)
self.process()
def change_display_name(self, display_name):
"""Change user's display name
Keyword arguments:
display_name -- user's new display name
"""
if not self.logged_in:
return
rea = Command('REA', self.transaction_id,
(self.passport_id, url_codec.encode(display_name)))
self._async_command(rea)
self.process()
def request_display_name(self, passport_id):
"""Request display name of a friend
Keyword arguments:
passport_id -- string representing friend's passport ID
"""
if not self.logged_in:
return
rea = Command('REA', self.transaction_id,
(passport_id, url_codec.encode('MJ++')))
self._async_command(rea)
self.process()
def start_chat(self, invitee):
"""Start a chat
Keyword arguments:
invitee -- friend invited for chat
"""
if not self.logged_in:
return
xfr = Command('XFR', self.transaction_id, ('SB',))
self._async_command(xfr)
self.chat_requests[xfr.trn] = Session.__ChatRequest(invitee)
self.process()
# vim: set ts=4 sw=4 et tw=79 :
|
|
import sys
sys.path.insert(0,".")
import unittest
import neuroml
import neuroml.writers as writers
import PyOpenWorm
from PyOpenWorm import *
import networkx
import rdflib
import rdflib as R
import pint as Q
import os
import subprocess as SP
import subprocess
import tempfile
import doctest
from glob import glob
USE_BINARY_DB = False
BINARY_DB = "OpenWormData/worm.db"
TEST_CONFIG = "tests/default_test.conf"
try:
import bsddb
has_bsddb = True
except ImportError:
has_bsddb = False
try:
import numpy
has_numpy = True
except ImportError:
has_numpy = False
namespaces = { "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }
def clear_graph(graph):
graph.update("CLEAR ALL")
def make_graph(size=100):
""" Make an rdflib graph """
g = R.Graph()
for i in range(size):
s = rdflib.URIRef("http://somehost.com/s"+str(i))
p = rdflib.URIRef("http://somehost.com/p"+str(i))
o = rdflib.URIRef("http://somehost.com/o"+str(i))
g.add((s,p,o))
return g
def delete_zodb_data_store(path):
os.unlink(path)
os.unlink(path + '.index')
os.unlink(path + '.tmp')
os.unlink(path + '.lock')
class DataIntegrityTest(unittest.TestCase):
""" Integration tests that read from the database and ensure that basic
queries have expected answers, as a way to keep data quality high.
"""
@classmethod
def setUpClass(cls):
import csv
cls.neurons = [] #array that holds the names of the 302 neurons at class-level scope
if not USE_BINARY_DB:
PyOpenWorm.connect(conf=Data()) # Connect for integrity tests that use PyOpenWorm functions
cls.g = PyOpenWorm.config('rdf.graph') # declare class-level scope for the database
cls.g.parse("OpenWormData/WormData.n3", format="n3") # load in the database
else:
conf = Configure(**{ "rdf.source" : "ZODB", "rdf.store_conf" : BINARY_DB })
PyOpenWorm.connect(conf=conf)
cls.g = PyOpenWorm.config('rdf.graph')
#grab the list of the names of the 302 neurons
csvfile = open('OpenWormData/aux_data/neurons.csv', 'r')
reader = csv.reader(csvfile, delimiter=';', quotechar='|')
for row in reader:
if len(row[0]) > 0: # Only saves valid neuron names
cls.neurons.append(row[0])
@classmethod
def tearDownClass(cls):
PyOpenWorm.disconnect()
def test_correct_neuron_number(self):
"""
This test verifies that the worm model has exactly 302 neurons.
"""
net = PyOpenWorm.Worm().get_neuron_network()
self.assertEqual(302, len(set(net.neurons())))
def test_TH_neuropeptide_neuron_list(self):
"""
This test verifies that the set of neurons which contain the
neuropeptide TH is correct (the list is given below).
"""
neuronlist = PyOpenWorm.Neuron()
neuronlist.neuropeptide("TH")
thlist = set(x.name() for x in neuronlist.load())
self.assertEqual(set(['CEPDR', 'PDER', 'CEPDL', 'PDEL', 'CEPVR', 'CEPVL']), thlist)
def testUniqueNeuronNode(self):
"""
There should one and only one unique RDF node for every neuron. If more than one is present for a given cell name,
then our data is inconsistent. If there is not at least one present, then we are missing neurons.
"""
results = {}
for n in self.neurons:
#Create a SPARQL query per neuron that looks for all RDF nodes that have text matching the name of the neuron
qres = self.g.query('SELECT distinct ?n WHERE {?n ?t ?s . ?s ?p \"' + n + '\" } LIMIT 5')
results[n] = (len(qres.result), [x[0] for x in qres.result])
# If there is not only one result back, then there is more than one RDF node.
more_than_one = [(x, results[x]) for x in results if results[x][0] > 1]
less_than_one = [(x, results[x]) for x in results if results[x][0] < 1]
self.assertEqual(0, len(more_than_one), "Some neurons have more than 1 node: " + "\n".join(str(x) for x in more_than_one))
self.assertEqual(0, len(less_than_one), "Some neurons have no node: " + "\n".join(str(x) for x in less_than_one))
def testNeuronsHaveTypes(self):
"""
Every Neuron should have a non-blank type
"""
results = set()
for n in self.neurons:
qres = self.g.query('SELECT ?v WHERE { ?s <http://openworm.org/entities/SimpleProperty/value> \"' + n + '\". ' #per node ?s that has the name of a neuron associated
+ '?k <http://openworm.org/entities/Cell/name> ?s .'
+ '?k <http://openworm.org/entities/Neuron/type> ?o .' #look up its listed type ?o
+ '?o <http://openworm.org/entities/SimpleProperty/value> ?v } ' #for that type ?o, get its property ?tp and its value ?v
)
for x in qres:
v = x[0]
if isinstance(v,R.Literal):
results.add(n)
# NOTE: Neurons ALNL, CANL, CANR, ALNR have unknown function and type
self.assertEqual(len(results), len(self.neurons) - 4, "Some neurons are missing a type: {}".format(set(self.neurons) - results))
def test_neuron_GJ_degree(self):
""" Get the number of gap junctions from a networkx representation """
self.assertEqual(PyOpenWorm.Neuron(name='AVAL').GJ_degree(), 40)
def test_neuron_Syn_degree(self):
""" Get the number of chemical synapses from a networkx representation """
self.assertEqual(PyOpenWorm.Neuron(name='AVAL').Syn_degree(), 90)
@unittest.skip("have not yet defined asserts")
def testWhatNodesGetTypeInfo(self):
qres = self.g.query('SELECT ?o ?p ?s WHERE {'
+ '?o <http://openworm.org/entities/SimpleProperty/value> "motor". '
'?o ?p ?s} ' #for that type ?o, get its value ?v
+ 'LIMIT 10')
for row in qres.result:
print row
def test_compare_to_xls(self):
""" Compare the PyOpenWorm connections to the data in the spreadsheet """
SAMPLE_CELL = 'AVAL'
xls_conns = []
pow_conns = []
#QUERY TO GET ALL CONNECTIONS WHERE SAMPLE_CELL IS ON THE PRE SIDE
qres = self.g.query("""SELECT ?post_name ?type (STR(?num) AS ?numval) WHERE {
#############################################################
# Find connections that have the ?pre_name as our passed in value
#############################################################
?pre_namenode <http://openworm.org/entities/SimpleProperty/value> \'"""
+ SAMPLE_CELL +
"""\'.
?pre_cell <http://openworm.org/entities/Cell/name> ?pre_namenode.
?pre <http://openworm.org/entities/SimpleProperty/value> ?pre_cell.
?conn <http://openworm.org/entities/Connection/pre_cell> ?pre.
#############################################################
# Find all the cells that are on the post side of those
# connections and bind their names to ?post_name
#############################################################
?conn <http://openworm.org/entities/Connection/post_cell> ?post.
?post <http://openworm.org/entities/SimpleProperty/value> ?post_cell.
?post_cell <http://openworm.org/entities/Cell/name> ?post_namenode.
?post_namenode <http://openworm.org/entities/SimpleProperty/value> ?post_name.
############################################################
# Go find the type of the connection and bind to ?type
#############################################################
?conn <http://openworm.org/entities/Connection/syntype> ?syntype_node.
?syntype_node <http://openworm.org/entities/SimpleProperty/value> ?type.
############################################################
# Go find the number of the connection and bind to ?num
############################################################
?conn <http://openworm.org/entities/Connection/number> ?number_node.
?number_node <http://openworm.org/entities/SimpleProperty/value> ?num.
############################################################
# Filter out any ?pre_names or ?post_names that aren't literals
############################################################
FILTER(isLiteral(?post_name))}""")
def ff(x):
return str(x.value)
for line in qres.result:
t = list(map(ff, line))
t.insert(0,SAMPLE_CELL) #Insert sample cell name into the result set after the fact
pow_conns.append(t)
#QUERY TO GET ALL CONNECTIONS WHERE SAMPLE_CELL IS ON THE *POST* SIDE
qres = self.g.query("""SELECT ?pre_name ?type (STR(?num) AS ?numval) WHERE {
#############################################################
# Find connections that have the ?post_name as our passed in value
#############################################################
?post_namenode <http://openworm.org/entities/SimpleProperty/value> \'"""
+ SAMPLE_CELL +
"""\'.
?post_cell <http://openworm.org/entities/Cell/name> ?post_namenode.
?post <http://openworm.org/entities/SimpleProperty/value> ?post_cell.
?conn <http://openworm.org/entities/Connection/post_cell> ?post.
#############################################################
# Find all the cells that are on the pre side of those
# connections and bind their names to ?pre_name
#############################################################
?conn <http://openworm.org/entities/Connection/pre_cell> ?pre.
?pre <http://openworm.org/entities/SimpleProperty/value> ?pre_cell.
?pre_cell <http://openworm.org/entities/Cell/name> ?pre_namenode.
?pre_namenode <http://openworm.org/entities/SimpleProperty/value> ?pre_name.
############################################################
# Go find the type of the connection and bind to ?type
#############################################################
?conn <http://openworm.org/entities/Connection/syntype> ?syntype_node.
?syntype_node <http://openworm.org/entities/SimpleProperty/value> ?type.
############################################################
# Go find the number of the connection and bind to ?num
############################################################
?conn <http://openworm.org/entities/Connection/number> ?number_node.
?number_node <http://openworm.org/entities/SimpleProperty/value> ?num.
############################################################
# Filter out any ?pre_names or ?post_names that aren't literals
############################################################
FILTER(isLiteral(?pre_name))}""")
for line in qres.result:
t = list(map(ff, line))
t.insert(1,SAMPLE_CELL) #Insert sample cell name into the result set after the fact
pow_conns.append(t)
#get connections from the sheet
import re
search_string = re.compile(r'\w+[0]+[1-9]+')
replace_string = re.compile(r'[0]+')
def normalize(name):
# normalize neuron names to match those used at other points
# see #137 for elaboration
# if there are zeroes in the middle of a name, remove them
if re.match(search_string, name):
name = replace_string.sub('', name)
return name
import xlrd
combining_dict = {}
# 's' is the workbook sheet
s = xlrd.open_workbook('OpenWormData/aux_data/NeuronConnect.xls').sheets()[0]
for row in range(1, s.nrows):
if s.cell(row, 2).value in ('S', 'Sp', 'EJ') and SAMPLE_CELL in [s.cell(row, 0).value, s.cell(row, 1).value]:
#we're not going to include 'receives' ('r', 'rp') since they're just the inverse of 'sends'
#also omitting 'nmj' for the time being (no model in db)
pre = normalize(s.cell(row, 0).value)
post = normalize(s.cell(row, 1).value)
num = int(s.cell(row, 3).value)
if s.cell(row, 2).value == 'EJ':
syntype = 'gapJunction'
elif s.cell(row, 2).value in ('S', 'Sp'):
syntype = 'send'
# add them to a dict to make sure sends ('s') and send-polys ('sp') are summed.
# keying by connection pairs as a string (e.g. 'sdql,aval,send').
# values are lists if the form [pre, post, number, syntype].
string_key = '{},{},{}'.format(pre, post, syntype)
if string_key in combining_dict.keys():
# if key already there, add to number
num += int(combining_dict[string_key][3])
combining_dict[string_key] = [str(pre), str(post), str(syntype), str(int(num))]
xls_conns = combining_dict.values()
#assert that these two sorted lists are the same
#using sorted lists because Set() removes multiples
self.maxDiff = None
self.assertEqual(sorted(pow_conns), sorted(xls_conns))
|
|
##
# Copyright (c) 2010-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
from caldavclientlibrary.protocol.caldav.definitions import caldavxml
from caldavclientlibrary.protocol.caldav.definitions import csxml
from caldavclientlibrary.protocol.url import URL
from caldavclientlibrary.protocol.webdav.definitions import davxml
from contrib.performance.httpclient import MemoryConsumer, StringProducer
from contrib.performance.loadtest.ical import XMPPPush, Event, Calendar, OS_X_10_11, NotificationCollection
from contrib.performance.loadtest.sim import _DirectoryRecord
from pycalendar.datetime import DateTime
from pycalendar.timezone import Timezone
from twisted.internet.defer import Deferred, inlineCallbacks, returnValue, succeed
from twisted.internet.protocol import ProtocolToConsumerAdapter
from twisted.python.failure import Failure
from twisted.trial.unittest import TestCase
from twisted.web.client import ResponseDone
from twisted.web.http import OK, NO_CONTENT, CREATED, MULTI_STATUS, NOT_FOUND, FORBIDDEN
from twisted.web.http_headers import Headers
from twistedcaldav.ical import Component
from twistedcaldav.timezones import TimezoneCache
import json
import os
EVENT_UID = 'D94F247D-7433-43AF-B84B-ADD684D023B0'
EVENT = """\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Apple Inc.//iCal 4.0.3//EN
CALSCALE:GREGORIAN
BEGIN:VEVENT
CREATED:20101018T155454Z
UID:%(UID)s
DTEND;TZID=America/New_York:20101028T130000
ATTENDEE;CN="User 03";CUTYPE=INDIVIDUAL;EMAIL="user03@example.com";PARTS
TAT=NEEDS-ACTION;ROLE=REQ-PARTICIPANT;RSVP=TRUE:mailto:user03@example.co
m
ATTENDEE;CN="User 01";CUTYPE=INDIVIDUAL;PARTSTAT=ACCEPTED:mailto:user01@
example.com
TRANSP:OPAQUE
SUMMARY:Attended Event
DTSTART;TZID=America/New_York:20101028T120000
DTSTAMP:20101018T155513Z
ORGANIZER;CN="User 01":mailto:user01@example.com
SEQUENCE:3
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n") % {'UID': EVENT_UID}
EVENT_INVITE = """\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Apple Inc.//iCal 4.0.3//EN
CALSCALE:GREGORIAN
BEGIN:VTIMEZONE
TZID:America/New_York
X-LIC-LOCATION:America/New_York
BEGIN:STANDARD
DTSTART:18831118T120358
RDATE:18831118T120358
TZNAME:EST
TZOFFSETFROM:-045602
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19180331T020000
RRULE:FREQ=YEARLY;UNTIL=19190330T070000Z;BYDAY=-1SU;BYMONTH=3
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19181027T020000
RRULE:FREQ=YEARLY;UNTIL=19191026T060000Z;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:STANDARD
DTSTART:19200101T000000
RDATE:19200101T000000
RDATE:19420101T000000
RDATE:19460101T000000
RDATE:19670101T000000
TZNAME:EST
TZOFFSETFROM:-0500
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19200328T020000
RDATE:19200328T020000
RDATE:19740106T020000
RDATE:19750223T020000
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19201031T020000
RDATE:19201031T020000
RDATE:19450930T020000
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19210424T020000
RRULE:FREQ=YEARLY;UNTIL=19410427T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19210925T020000
RRULE:FREQ=YEARLY;UNTIL=19410928T060000Z;BYDAY=-1SU;BYMONTH=9
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19420209T020000
RDATE:19420209T020000
TZNAME:EWT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:19450814T190000
RDATE:19450814T190000
TZNAME:EPT
TZOFFSETFROM:-0400
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:19460428T020000
RRULE:FREQ=YEARLY;UNTIL=19660424T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19460929T020000
RRULE:FREQ=YEARLY;UNTIL=19540926T060000Z;BYDAY=-1SU;BYMONTH=9
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:STANDARD
DTSTART:19551030T020000
RRULE:FREQ=YEARLY;UNTIL=19661030T060000Z;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19670430T020000
RRULE:FREQ=YEARLY;UNTIL=19730429T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19671029T020000
RRULE:FREQ=YEARLY;UNTIL=20061029T060000Z;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19760425T020000
RRULE:FREQ=YEARLY;UNTIL=19860427T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:19870405T020000
RRULE:FREQ=YEARLY;UNTIL=20060402T070000Z;BYDAY=1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:20070311T020000
RRULE:FREQ=YEARLY;BYDAY=2SU;BYMONTH=3
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:20071104T020000
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=11
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20101018T155454Z
UID:%(UID)s
DTEND;TZID=America/New_York:20101028T130000
ATTENDEE;CN="User 02";CUTYPE=INDIVIDUAL;EMAIL="user02@example.com";PARTS
TAT=NEEDS-ACTION;ROLE=REQ-PARTICIPANT;RSVP=TRUE:mailto:user02@example.co
m
ATTENDEE;CN="User 03";CUTYPE=INDIVIDUAL;EMAIL="user03@example.com";PARTS
TAT=NEEDS-ACTION;ROLE=REQ-PARTICIPANT;RSVP=TRUE:mailto:user03@example.co
m
ATTENDEE;CN="User 01";CUTYPE=INDIVIDUAL;PARTSTAT=ACCEPTED:urn:uuid:user01
TRANSP:OPAQUE
SUMMARY:Attended Event
DTSTART;TZID=America/New_York:20101028T120000
DTSTAMP:20101018T155513Z
ORGANIZER;CN="User 01":urn:uuid:user01
SEQUENCE:3
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n") % {'UID': EVENT_UID}
EVENT_AND_TIMEZONE = """\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Apple Inc.//iCal 4.0.3//EN
CALSCALE:GREGORIAN
BEGIN:VTIMEZONE
TZID:America/New_York
X-LIC-LOCATION:America/New_York
BEGIN:STANDARD
DTSTART:18831118T120358
RDATE:18831118T120358
TZNAME:EST
TZOFFSETFROM:-045602
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19180331T020000
RRULE:FREQ=YEARLY;UNTIL=19190330T070000Z;BYDAY=-1SU;BYMONTH=3
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19181027T020000
RRULE:FREQ=YEARLY;UNTIL=19191026T060000Z;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:STANDARD
DTSTART:19200101T000000
RDATE:19200101T000000
RDATE:19420101T000000
RDATE:19460101T000000
RDATE:19670101T000000
TZNAME:EST
TZOFFSETFROM:-0500
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19200328T020000
RDATE:19200328T020000
RDATE:19740106T020000
RDATE:19750223T020000
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19201031T020000
RDATE:19201031T020000
RDATE:19450930T020000
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19210424T020000
RRULE:FREQ=YEARLY;UNTIL=19410427T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19210925T020000
RRULE:FREQ=YEARLY;UNTIL=19410928T060000Z;BYDAY=-1SU;BYMONTH=9
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19420209T020000
RDATE:19420209T020000
TZNAME:EWT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:19450814T190000
RDATE:19450814T190000
TZNAME:EPT
TZOFFSETFROM:-0400
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:19460428T020000
RRULE:FREQ=YEARLY;UNTIL=19660424T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19460929T020000
RRULE:FREQ=YEARLY;UNTIL=19540926T060000Z;BYDAY=-1SU;BYMONTH=9
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:STANDARD
DTSTART:19551030T020000
RRULE:FREQ=YEARLY;UNTIL=19661030T060000Z;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19670430T020000
RRULE:FREQ=YEARLY;UNTIL=19730429T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:19671029T020000
RRULE:FREQ=YEARLY;UNTIL=20061029T060000Z;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19760425T020000
RRULE:FREQ=YEARLY;UNTIL=19860427T070000Z;BYDAY=-1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:19870405T020000
RRULE:FREQ=YEARLY;UNTIL=20060402T070000Z;BYDAY=1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:DAYLIGHT
DTSTART:20070311T020000
RRULE:FREQ=YEARLY;BYDAY=2SU;BYMONTH=3
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:20071104T020000
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=11
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20101018T155454Z
UID:%(UID)s
DTEND;TZID=America/New_York:20101028T130000
ATTENDEE;CN="User 03";CUTYPE=INDIVIDUAL;EMAIL="user03@example.com";PARTS
TAT=NEEDS-ACTION;ROLE=REQ-PARTICIPANT;RSVP=TRUE:mailto:user03@example.co
m
ATTENDEE;CN="User 01";CUTYPE=INDIVIDUAL;PARTSTAT=ACCEPTED:mailto:user01@
example.com
TRANSP:OPAQUE
SUMMARY:Attended Event
DTSTART;TZID=America/New_York:20101028T120000
DTSTAMP:20101018T155513Z
ORGANIZER;CN="User 01":mailto:user01@example.com
SEQUENCE:3
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n") % {'UID': EVENT_UID}
class EventTests(TestCase):
"""
Tests for L{Event}.
"""
def test_uid(self):
"""
When the C{vevent} attribute of an L{Event} instance is set,
L{Event.getUID} returns the UID value from it.
"""
event = Event(None, u'/foo/bar', u'etag', Component.fromString(EVENT))
self.assertEquals(event.getUID(), EVENT_UID)
def test_withoutUID(self):
"""
When an L{Event} has a C{vevent} attribute set to C{None},
L{Event.getUID} returns C{None}.
"""
event = Event(None, u'/bar/baz', u'etag')
self.assertIdentical(event.getUID(), None)
PRINCIPAL_PROPFIND_RESPONSE = """\
<?xml version='1.0' encoding='UTF-8'?>
<multistatus xmlns='DAV:'>
<response>
<href>/principals/__uids__/user01/</href>
<propstat>
<prop>
<principal-collection-set>
<href>/principals/</href>
</principal-collection-set>
<calendar-home-set xmlns='urn:ietf:params:xml:ns:caldav'>
<href xmlns='DAV:'>/calendars/__uids__/user01</href>
</calendar-home-set>
<calendar-user-address-set xmlns='urn:ietf:params:xml:ns:caldav'>
<href xmlns='DAV:'>/principals/__uids__/user01/</href>
<href xmlns='DAV:'>/principals/users/user01/</href>
</calendar-user-address-set>
<schedule-inbox-URL xmlns='urn:ietf:params:xml:ns:caldav'>
<href xmlns='DAV:'>/calendars/__uids__/user01/inbox/</href>
</schedule-inbox-URL>
<schedule-outbox-URL xmlns='urn:ietf:params:xml:ns:caldav'>
<href xmlns='DAV:'>/calendars/__uids__/user01/outbox/</href>
</schedule-outbox-URL>
<dropbox-home-URL xmlns='http://calendarserver.org/ns/'>
<href xmlns='DAV:'>/calendars/__uids__/user01/dropbox/</href>
</dropbox-home-URL>
<notification-URL xmlns='http://calendarserver.org/ns/'>
<href xmlns='DAV:'>/calendars/__uids__/user01/notification/</href>
</notification-URL>
<displayname>User 01</displayname>
<principal-URL>
<href>/principals/__uids__/user01/</href>
</principal-URL>
<supported-report-set>
<supported-report>
<report>
<acl-principal-prop-set/>
</report>
</supported-report>
<supported-report>
<report>
<principal-match/>
</report>
</supported-report>
<supported-report>
<report>
<principal-property-search/>
</report>
</supported-report>
<supported-report>
<report>
<expand-property/>
</report>
</supported-report>
</supported-report-set>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
</multistatus>
"""
_CALENDAR_HOME_PROPFIND_RESPONSE_TEMPLATE = """\
<?xml version='1.0' encoding='UTF-8'?>
<multistatus xmlns='DAV:'>
<response>
<href>/calendars/__uids__/user01/</href>
<propstat>
<prop>
%(xmpp)s
<displayname>User 01</displayname>
<resourcetype>
<collection/>
</resourcetype>
<owner>
<href>/principals/__uids__/user01/</href>
</owner>
<quota-available-bytes>104855434</quota-available-bytes>
<quota-used-bytes>2166</quota-used-bytes>
<current-user-privilege-set>
<privilege>
<all/>
</privilege>
<privilege>
<read/>
</privilege>
<privilege>
<read-free-busy xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<write/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<write-content/>
</privilege>
<privilege>
<bind/>
</privilege>
<privilege>
<unbind/>
</privilege>
<privilege>
<unlock/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<write-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
</current-user-privilege-set>
<push-transports xmlns='http://calendarserver.org/ns/'/>
<pushkey xmlns='http://calendarserver.org/ns/'>/Some/Unique/Value</pushkey>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<getctag xmlns='http://calendarserver.org/ns/'/>
<calendar-description xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-color xmlns='http://apple.com/ns/ical/'/>
<calendar-order xmlns='http://apple.com/ns/ical/'/>
<supported-calendar-component-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-free-busy-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-calendar-transp xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-default-calendar-URL xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-timezone xmlns='urn:ietf:params:xml:ns:caldav'/>
<source xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-alarms xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-attachments xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-todos xmlns='http://calendarserver.org/ns/'/>
<refreshrate xmlns='http://apple.com/ns/ical/'/>
<publish-url xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
<response>
<href>/calendars/__uids__/user01/notification/</href>
<propstat>
<prop>
<sync-token xmlns='DAV:'>SYNCTOKEN3</sync-token>
<displayname>notification</displayname>
<resourcetype>
<collection/>
<notification xmlns='http://calendarserver.org/ns/'/>
</resourcetype>
<owner>
<href>/principals/__uids__/user01/</href>
</owner>
<quota-available-bytes>104855434</quota-available-bytes>
<quota-used-bytes>2166</quota-used-bytes>
<current-user-privilege-set>
<privilege>
<all/>
</privilege>
<privilege>
<read/>
</privilege>
<privilege>
<read-free-busy xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<write/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<write-content/>
</privilege>
<privilege>
<bind/>
</privilege>
<privilege>
<unbind/>
</privilege>
<privilege>
<unlock/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<write-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
</current-user-privilege-set>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<xmpp-server xmlns='http://calendarserver.org/ns/'/>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>
<getctag xmlns='http://calendarserver.org/ns/'/>
<calendar-description xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-color xmlns='http://apple.com/ns/ical/'/>
<calendar-order xmlns='http://apple.com/ns/ical/'/>
<supported-calendar-component-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-free-busy-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-calendar-transp xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-default-calendar-URL xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-timezone xmlns='urn:ietf:params:xml:ns:caldav'/>
<source xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-alarms xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-attachments xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-todos xmlns='http://calendarserver.org/ns/'/>
<refreshrate xmlns='http://apple.com/ns/ical/'/>
<push-transports xmlns='http://calendarserver.org/ns/'/>
<pushkey xmlns='http://calendarserver.org/ns/'/>
<publish-url xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
<response>
<href>/calendars/__uids__/user01/dropbox/</href>
<propstat>
<prop>
<resourcetype>
<collection/>
<dropbox-home xmlns='http://calendarserver.org/ns/'/>
</resourcetype>
<owner>
<href>/principals/__uids__/user01/</href>
</owner>
<quota-available-bytes>104855434</quota-available-bytes>
<quota-used-bytes>2166</quota-used-bytes>
<current-user-privilege-set>
<privilege>
<all/>
</privilege>
<privilege>
<read/>
</privilege>
<privilege>
<read-free-busy xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<write/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<write-content/>
</privilege>
<privilege>
<bind/>
</privilege>
<privilege>
<unbind/>
</privilege>
<privilege>
<unlock/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<write-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
</current-user-privilege-set>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<xmpp-server xmlns='http://calendarserver.org/ns/'/>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>
<getctag xmlns='http://calendarserver.org/ns/'/>
<displayname/>
<calendar-description xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-color xmlns='http://apple.com/ns/ical/'/>
<calendar-order xmlns='http://apple.com/ns/ical/'/>
<supported-calendar-component-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-free-busy-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-calendar-transp xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-default-calendar-URL xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-timezone xmlns='urn:ietf:params:xml:ns:caldav'/>
<source xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-alarms xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-attachments xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-todos xmlns='http://calendarserver.org/ns/'/>
<refreshrate xmlns='http://apple.com/ns/ical/'/>
<push-transports xmlns='http://calendarserver.org/ns/'/>
<pushkey xmlns='http://calendarserver.org/ns/'/>
<publish-url xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
<response>
<href>/calendars/__uids__/user01/calendar/</href>
<propstat>
<prop>
<getctag xmlns='http://calendarserver.org/ns/'>c2696540-4c4c-4a31-adaf-c99630776828#3</getctag>
<sync-token xmlns='DAV:'>SYNCTOKEN1</sync-token>
<displayname>calendar</displayname>
<calendar-color xmlns='http://apple.com/ns/ical/'>#0252D4FF</calendar-color>
<calendar-order xmlns='http://apple.com/ns/ical/'>1</calendar-order>
<supported-calendar-component-set xmlns='urn:ietf:params:xml:ns:caldav'>
<comp name='VEVENT'/>
<comp name='VTODO'/>
<comp name='VTIMEZONE'/>
<comp name='VFREEBUSY'/>
</supported-calendar-component-set>
<resourcetype>
<collection/>
<calendar xmlns='urn:ietf:params:xml:ns:caldav'/>
</resourcetype>
<owner>
<href>/principals/__uids__/user01/</href>
</owner>
<schedule-calendar-transp xmlns='urn:ietf:params:xml:ns:caldav'>
<opaque/>
</schedule-calendar-transp>
<quota-available-bytes>104855434</quota-available-bytes>
<quota-used-bytes>2166</quota-used-bytes>
<calendar-timezone xmlns='urn:ietf:params:xml:ns:caldav'><![CDATA[BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Apple Inc.//iCal 4.0.3//EN
CALSCALE:GREGORIAN
BEGIN:VTIMEZONE
TZID:America/New_York
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=2SU
DTSTART:20070311T020000
TZNAME:EDT
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
RRULE:FREQ=YEARLY;BYMONTH=11;BYDAY=1SU
DTSTART:20071104T020000
TZNAME:EST
TZOFFSETTO:-0500
END:STANDARD
END:VTIMEZONE
END:VCALENDAR
]]></calendar-timezone>
<current-user-privilege-set>
<privilege>
<all/>
</privilege>
<privilege>
<read/>
</privilege>
<privilege>
<read-free-busy xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<write/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<write-content/>
</privilege>
<privilege>
<bind/>
</privilege>
<privilege>
<unbind/>
</privilege>
<privilege>
<unlock/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<write-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
</current-user-privilege-set>
<pushkey xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<xmpp-server xmlns='http://calendarserver.org/ns/'/>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>
<calendar-description xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-free-busy-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-default-calendar-URL xmlns='urn:ietf:params:xml:ns:caldav'/>
<source xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-alarms xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-attachments xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-todos xmlns='http://calendarserver.org/ns/'/>
<refreshrate xmlns='http://apple.com/ns/ical/'/>
<push-transports xmlns='http://calendarserver.org/ns/'/>
<publish-url xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
<response>
<href>/calendars/__uids__/user01/outbox/</href>
<propstat>
<prop>
<supported-calendar-component-set xmlns='urn:ietf:params:xml:ns:caldav'>
<comp name='VEVENT'/>
<comp name='VTODO'/>
<comp name='VTIMEZONE'/>
<comp name='VFREEBUSY'/>
</supported-calendar-component-set>
<resourcetype>
<collection/>
<schedule-outbox xmlns='urn:ietf:params:xml:ns:caldav'/>
</resourcetype>
<owner>
<href>/principals/__uids__/user01/</href>
</owner>
<quota-available-bytes>104855434</quota-available-bytes>
<quota-used-bytes>2166</quota-used-bytes>
<current-user-privilege-set>
<privilege>
<all/>
</privilege>
<privilege>
<read/>
</privilege>
<privilege>
<write/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<write-content/>
</privilege>
<privilege>
<bind/>
</privilege>
<privilege>
<unbind/>
</privilege>
<privilege>
<unlock/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<write-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
<privilege>
<schedule-send xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<schedule xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<read-free-busy xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
</current-user-privilege-set>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<xmpp-server xmlns='http://calendarserver.org/ns/'/>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>
<getctag xmlns='http://calendarserver.org/ns/'/>
<displayname/>
<calendar-description xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-color xmlns='http://apple.com/ns/ical/'/>
<calendar-order xmlns='http://apple.com/ns/ical/'/>
<calendar-free-busy-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-calendar-transp xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-default-calendar-URL xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-timezone xmlns='urn:ietf:params:xml:ns:caldav'/>
<source xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-alarms xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-attachments xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-todos xmlns='http://calendarserver.org/ns/'/>
<refreshrate xmlns='http://apple.com/ns/ical/'/>
<push-transports xmlns='http://calendarserver.org/ns/'/>
<pushkey xmlns='http://calendarserver.org/ns/'/>
<publish-url xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
<response>
<href>/calendars/__uids__/user01/freebusy</href>
<propstat>
<prop>
<resourcetype>
<free-busy-url xmlns='http://calendarserver.org/ns/'/>
</resourcetype>
<owner>
<href>/principals/__uids__/user01/</href>
</owner>
<quota-available-bytes>104855434</quota-available-bytes>
<quota-used-bytes>2166</quota-used-bytes>
<current-user-privilege-set>
<privilege>
<read/>
</privilege>
<privilege>
<schedule-deliver xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<schedule xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<all/>
</privilege>
<privilege>
<write/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<write-content/>
</privilege>
<privilege>
<bind/>
</privilege>
<privilege>
<unbind/>
</privilege>
<privilege>
<unlock/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<write-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
<privilege>
<read-free-busy xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
</current-user-privilege-set>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<xmpp-server xmlns='http://calendarserver.org/ns/'/>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>
<getctag xmlns='http://calendarserver.org/ns/'/>
<displayname/>
<calendar-description xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-color xmlns='http://apple.com/ns/ical/'/>
<calendar-order xmlns='http://apple.com/ns/ical/'/>
<supported-calendar-component-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-free-busy-set xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-calendar-transp xmlns='urn:ietf:params:xml:ns:caldav'/>
<schedule-default-calendar-URL xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-timezone xmlns='urn:ietf:params:xml:ns:caldav'/>
<source xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-alarms xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-attachments xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-todos xmlns='http://calendarserver.org/ns/'/>
<refreshrate xmlns='http://apple.com/ns/ical/'/>
<push-transports xmlns='http://calendarserver.org/ns/'/>
<pushkey xmlns='http://calendarserver.org/ns/'/>
<publish-url xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
<response>
<href>/calendars/__uids__/user01/inbox/</href>
<propstat>
<prop>
<getctag xmlns='http://calendarserver.org/ns/'>a483dab3-1391-445b-b1c3-5ae9dfc81c2f#0</getctag>
<sync-token xmlns='DAV:'>SYNCTOKEN2</sync-token>
<displayname>inbox</displayname>
<supported-calendar-component-set xmlns='urn:ietf:params:xml:ns:caldav'>
<comp name='VEVENT'/>
<comp name='VTODO'/>
<comp name='VTIMEZONE'/>
<comp name='VFREEBUSY'/>
</supported-calendar-component-set>
<resourcetype>
<collection/>
<schedule-inbox xmlns='urn:ietf:params:xml:ns:caldav'/>
</resourcetype>
<owner>
<href>/principals/__uids__/user01/</href>
</owner>
<calendar-free-busy-set xmlns='urn:ietf:params:xml:ns:caldav'>
<href xmlns='DAV:'>/calendars/__uids__/user01/calendar</href>
</calendar-free-busy-set>
<schedule-default-calendar-URL xmlns='urn:ietf:params:xml:ns:caldav'>
<href xmlns='DAV:'>/calendars/__uids__/user01/calendar</href>
</schedule-default-calendar-URL>
<quota-available-bytes>104855434</quota-available-bytes>
<quota-used-bytes>2166</quota-used-bytes>
<current-user-privilege-set>
<privilege>
<schedule-deliver xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<schedule xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
<privilege>
<all/>
</privilege>
<privilege>
<read/>
</privilege>
<privilege>
<write/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<write-content/>
</privilege>
<privilege>
<bind/>
</privilege>
<privilege>
<unbind/>
</privilege>
<privilege>
<unlock/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<write-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
<privilege>
<read-free-busy xmlns='urn:ietf:params:xml:ns:caldav'/>
</privilege>
</current-user-privilege-set>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
<xmpp-server xmlns='http://calendarserver.org/ns/'/>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>
<calendar-description xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-color xmlns='http://apple.com/ns/ical/'/>
<calendar-order xmlns='http://apple.com/ns/ical/'/>
<schedule-calendar-transp xmlns='urn:ietf:params:xml:ns:caldav'/>
<calendar-timezone xmlns='urn:ietf:params:xml:ns:caldav'/>
<source xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-alarms xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-attachments xmlns='http://calendarserver.org/ns/'/>
<subscribed-strip-todos xmlns='http://calendarserver.org/ns/'/>
<refreshrate xmlns='http://apple.com/ns/ical/'/>
<push-transports xmlns='http://calendarserver.org/ns/'/>
<pushkey xmlns='http://calendarserver.org/ns/'/>
<publish-url xmlns='http://calendarserver.org/ns/'/>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
</multistatus>
"""
CALENDAR_HOME_PROPFIND_RESPONSE = _CALENDAR_HOME_PROPFIND_RESPONSE_TEMPLATE % {
"xmpp": """\
<xmpp-server xmlns='http://calendarserver.org/ns/'/>
<xmpp-uri xmlns='http://calendarserver.org/ns/'/>""",
}
CALENDAR_HOME_PROPFIND_RESPONSE_WITH_XMPP = _CALENDAR_HOME_PROPFIND_RESPONSE_TEMPLATE % {
"xmpp": """\
<xmpp-server xmlns='http://calendarserver.org/ns/'>xmpp.example.invalid:1952</xmpp-server>
<xmpp-uri xmlns='http://calendarserver.org/ns/'>xmpp:pubsub.xmpp.example.invalid?pubsub;node=/CalDAV/another.example.invalid/user01/</xmpp-uri>""",
}
CALENDAR_HOME_PROPFIND_RESPONSE_XMPP_MISSING = _CALENDAR_HOME_PROPFIND_RESPONSE_TEMPLATE % {"xmpp": ""}
class MemoryResponse(object):
def __init__(self, version, code, phrase, headers, bodyProducer):
self.version = version
self.code = code
self.phrase = phrase
self.headers = headers
self.length = bodyProducer.length
self._bodyProducer = bodyProducer
def deliverBody(self, protocol):
protocol.makeConnection(self._bodyProducer)
d = self._bodyProducer.startProducing(ProtocolToConsumerAdapter(protocol))
d.addCallback(lambda ignored: protocol.connectionLost(Failure(ResponseDone())))
class OS_X_10_11Mixin:
"""
Mixin for L{TestCase}s for L{OS_X_10_11}.
"""
def setUp(self):
TimezoneCache.create()
self.record = _DirectoryRecord(
u"user91", u"user91", u"User 91", u"user91@example.org", u"user91",
)
serializePath = self.mktemp()
os.mkdir(serializePath)
self.client = OS_X_10_11(
None,
{
"uri": "http://127.0.0.1",
},
"/principals/users/%s/",
serializePath,
self.record,
None,
1
)
def interceptRequests(self):
requests = []
def request(*args, **kwargs):
result = Deferred()
requests.append((result, args))
return result
self.client._request = request
return requests
class OS_X_10_11Tests(OS_X_10_11Mixin, TestCase):
"""
Tests for L{OS_X_10_11}.
"""
def test_parsePrincipalPROPFINDResponse(self):
"""
L{Principal._parsePROPFINDResponse} accepts an XML document
like the one in the response to a I{PROPFIND} request for
I{/principals/__uids__/<uid>/} and returns a C{PropFindResult}
representing the data from it.
"""
principals = self.client._parseMultiStatus(PRINCIPAL_PROPFIND_RESPONSE)
principal = principals['/principals/__uids__/user01/']
self.assertEquals(
principal.getHrefProperties(),
{
davxml.principal_collection_set: URL(path='/principals/'),
caldavxml.calendar_home_set: URL(path='/calendars/__uids__/user01'),
caldavxml.calendar_user_address_set: (
URL(path='/principals/__uids__/user01/'),
URL(path='/principals/users/user01/'),
),
caldavxml.schedule_inbox_URL: URL(path='/calendars/__uids__/user01/inbox/'),
caldavxml.schedule_outbox_URL: URL(path='/calendars/__uids__/user01/outbox/'),
csxml.dropbox_home_URL: URL(path='/calendars/__uids__/user01/dropbox/'),
csxml.notification_URL: URL(path='/calendars/__uids__/user01/notification/'),
davxml.principal_URL: URL(path='/principals/__uids__/user01/'),
}
)
self.assertEquals(
principal.getTextProperties(),
{davxml.displayname: 'User 01'})
# self.assertEquals(
# principal.getSomething(),
# {SUPPORTED_REPORT_SET: (
# '{DAV:}acl-principal-prop-set',
# '{DAV:}principal-match',
# '{DAV:}principal-property-search',
# '{DAV:}expand-property',
# )})
def test_extractCalendars(self):
"""
L{OS_X_10_11._extractCalendars} accepts a calendar home
PROPFIND response body and returns a list of calendar objects
constructed from the data extracted from the response.
"""
home = "/calendars/__uids__/user01/"
calendars, notificationCollection, _ignore_homeToken = self.client._extractCalendars(
self.client._parseMultiStatus(CALENDAR_HOME_PROPFIND_RESPONSE), home)
calendars.sort(key=lambda cal: cal.resourceType)
calendar, inbox = calendars
self.assertEquals(calendar.resourceType, caldavxml.calendar)
self.assertEquals(calendar.name, "calendar")
self.assertEquals(calendar.url, "/calendars/__uids__/user01/calendar/")
self.assertEquals(calendar.changeToken, "SYNCTOKEN1")
self.assertEquals(inbox.resourceType, caldavxml.schedule_inbox)
self.assertEquals(inbox.name, "inbox")
self.assertEquals(inbox.url, "/calendars/__uids__/user01/inbox/")
self.assertEquals(inbox.changeToken, "SYNCTOKEN2")
self.assertEquals(notificationCollection.changeToken, "SYNCTOKEN3")
self.assertEqual({}, self.client.xmpp)
def test_extractCalendarsXMPP(self):
"""
If there is XMPP push information in a calendar home PROPFIND response,
L{OS_X_10_11._extractCalendars} finds it and records it.
"""
home = "/calendars/__uids__/user01/"
self.client._extractCalendars(
self.client._parseMultiStatus(CALENDAR_HOME_PROPFIND_RESPONSE_WITH_XMPP),
home
)
self.assertEqual({
home: XMPPPush(
"xmpp.example.invalid:1952",
"xmpp:pubsub.xmpp.example.invalid?pubsub;node=/CalDAV/another.example.invalid/user01/",
"/Some/Unique/Value"
)},
self.client.xmpp
)
def test_handleMissingXMPP(self):
home = "/calendars/__uids__/user01/"
self.client._extractCalendars(
self.client._parseMultiStatus(CALENDAR_HOME_PROPFIND_RESPONSE_XMPP_MISSING), home)
self.assertEqual({}, self.client.xmpp)
@inlineCallbacks
def test_changeEventAttendee(self):
"""
OS_X_10_11.changeEventAttendee removes one attendee from an
existing event and appends another.
"""
requests = self.interceptRequests()
vevent = Component.fromString(EVENT)
attendees = tuple(vevent.mainComponent().properties("ATTENDEE"))
old = attendees[0]
new = old.duplicate()
new.setParameter('CN', 'Some Other Guy')
event = Event(self.client.serializeLocation(), u'/some/calendar/1234.ics', None, vevent)
self.client._events[event.url] = event
self.client.changeEventAttendee(event.url, old, new)
_ignore_result, req = requests.pop(0)
# iCal PUTs the new VCALENDAR object.
_ignore_expectedResponseCode, method, url, headers, body = req
self.assertEquals(method, 'PUT')
self.assertEquals(url, 'http://127.0.0.1' + event.url)
self.assertIsInstance(url, str)
self.assertEquals(headers.getRawHeaders('content-type'), ['text/calendar'])
consumer = MemoryConsumer()
yield body.startProducing(consumer)
vevent = Component.fromString(consumer.value())
attendees = tuple(vevent.mainComponent().properties("ATTENDEE"))
self.assertEquals(len(attendees), 2)
self.assertEquals(attendees[0].parameterValue('CN'), 'User 01')
self.assertEquals(attendees[1].parameterValue('CN'), 'Some Other Guy')
def test_addEvent(self):
"""
L{OS_X_10_11.addEvent} PUTs the event passed to it to the
server and updates local state to reflect its existence.
"""
requests = self.interceptRequests()
calendar = Calendar(caldavxml.calendar, set(('VEVENT',)), u'calendar', u'/mumble/', None)
self.client._calendars[calendar.url] = calendar
vcalendar = Component.fromString(EVENT)
d = self.client.addEvent(u'/mumble/frotz.ics', vcalendar)
result, req = requests.pop(0)
# iCal PUTs the new VCALENDAR object.
expectedResponseCode, method, url, headers, body = req
self.assertEqual(expectedResponseCode, CREATED)
self.assertEqual(method, 'PUT')
self.assertEqual(url, 'http://127.0.0.1/mumble/frotz.ics')
self.assertIsInstance(url, str)
self.assertEqual(headers.getRawHeaders('content-type'), ['text/calendar'])
consumer = MemoryConsumer()
finished = body.startProducing(consumer)
def cbFinished(ignored):
self.assertEqual(
Component.fromString(consumer.value()),
Component.fromString(EVENT_AND_TIMEZONE))
finished.addCallback(cbFinished)
def requested(ignored):
response = MemoryResponse(
('HTTP', '1', '1'), CREATED, "Created", Headers({"etag": ["foo"]}),
StringProducer(""))
result.callback((response, ""))
finished.addCallback(requested)
return d
@inlineCallbacks
def test_addInvite(self):
"""
L{OS_X_10_11.addInvite} PUTs the event passed to it to the
server and updates local state to reflect its existence, but
it also does attendee auto-complete and free-busy checks before
the PUT.
"""
calendar = Calendar(caldavxml.calendar, set(('VEVENT',)), u'calendar', u'/mumble/', None)
self.client._calendars[calendar.url] = calendar
vcalendar = Component.fromString(EVENT_INVITE)
self.client.uuid = u'urn:uuid:user01'
self.client.email = u'mailto:user01@example.com'
self.client.principalCollection = "/principals/"
self.client.outbox = "/calendars/__uids__/user01/outbox/"
@inlineCallbacks
def _testReport(*args, **kwargs):
expectedResponseCode, method, url, headers, body = args
self.assertEqual(expectedResponseCode, (MULTI_STATUS,))
self.assertEqual(method, 'REPORT')
self.assertEqual(url, 'http://127.0.0.1/principals/')
self.assertIsInstance(url, str)
self.assertEqual(headers.getRawHeaders('content-type'), ['text/xml'])
consumer = MemoryConsumer()
yield body.startProducing(consumer)
response = MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "MultiStatus", Headers({}),
StringProducer("<?xml version='1.0' encoding='UTF-8'?><multistatus xmlns='DAV:' />"))
returnValue((response, "<?xml version='1.0' encoding='UTF-8'?><multistatus xmlns='DAV:' />"))
@inlineCallbacks
def _testPost(*args, **kwargs):
expectedResponseCode, method, url, headers, body = args
self.assertEqual(expectedResponseCode, OK)
self.assertEqual(method, 'POST')
self.assertEqual(url, 'http://127.0.0.1/calendars/__uids__/user01/outbox/')
self.assertIsInstance(url, str)
self.assertEqual(headers.getRawHeaders('content-type'), ['text/calendar'])
consumer = MemoryConsumer()
yield body.startProducing(consumer)
self.assertNotEqual(consumer.value().find(kwargs["attendee"]), -1)
response = MemoryResponse(
('HTTP', '1', '1'), OK, "OK", Headers({}),
StringProducer(""))
returnValue((response, ""))
def _testPost02(*args, **kwargs):
return _testPost(*args, attendee="ATTENDEE:mailto:user02@example.com", **kwargs)
def _testPost03(*args, **kwargs):
return _testPost(*args, attendee="ATTENDEE:mailto:user03@example.com", **kwargs)
@inlineCallbacks
def _testPut(*args, **kwargs):
expectedResponseCode, method, url, headers, body = args
self.assertEqual(expectedResponseCode, CREATED)
self.assertEqual(method, 'PUT')
self.assertEqual(url, 'http://127.0.0.1/mumble/frotz.ics')
self.assertIsInstance(url, str)
self.assertEqual(headers.getRawHeaders('content-type'), ['text/calendar'])
consumer = MemoryConsumer()
yield body.startProducing(consumer)
self.assertEqual(
Component.fromString(consumer.value()),
Component.fromString(EVENT_INVITE))
response = MemoryResponse(
('HTTP', '1', '1'), CREATED, "Created", Headers({}),
StringProducer(""))
returnValue((response, ""))
def _testGet(*args, **kwargs):
expectedResponseCode, method, url = args
self.assertEqual(expectedResponseCode, OK)
self.assertEqual(method, 'GET')
self.assertEqual(url, 'http://127.0.0.1/mumble/frotz.ics')
self.assertIsInstance(url, str)
response = MemoryResponse(
('HTTP', '1', '1'), OK, "OK", Headers({"etag": ["foo"]}),
StringProducer(EVENT_INVITE))
return succeed((response, EVENT_INVITE))
requests = [_testReport, _testPost02, _testReport, _testPost03, _testPut, _testGet]
def _requestHandler(*args, **kwargs):
handler = requests.pop(0)
return handler(*args, **kwargs)
self.client._request = _requestHandler
yield self.client.addInvite('/mumble/frotz.ics', vcalendar)
def test_deleteEvent(self):
"""
L{OS_X_10_11.deleteEvent} DELETEs the event at the relative
URL passed to it and updates local state to reflect its
removal.
"""
requests = self.interceptRequests()
calendar = Calendar(caldavxml.calendar, set(('VEVENT',)), u'calendar', u'/foo/', None)
event = Event(None, calendar.url + u'bar.ics', None)
self.client._calendars[calendar.url] = calendar
self.client._setEvent(event.url, event)
d = self.client.deleteEvent(event.url)
result, req = requests.pop()
expectedResponseCode, method, url = req
self.assertEqual(expectedResponseCode, (NO_CONTENT, NOT_FOUND))
self.assertEqual(method, 'DELETE')
self.assertEqual(url, 'http://127.0.0.1' + event.url)
self.assertIsInstance(url, str)
self.assertNotIn(event.url, self.client._events)
self.assertNotIn(u'bar.ics', calendar.events)
response = MemoryResponse(
('HTTP', '1', '1'), NO_CONTENT, "No Content", None,
StringProducer(""))
result.callback((response, ""))
return d
def test_serialization(self):
"""
L{OS_X_10_11.serialize} properly generates a JSON document.
"""
clientPath = os.path.join(self.client.serializePath, "user91-OS_X_10.11")
self.assertFalse(os.path.exists(clientPath))
indexPath = os.path.join(clientPath, "index.json")
self.assertFalse(os.path.exists(indexPath))
cal1 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VEVENT
UID:004f8e41-b071-4b30-bb3b-6aada4adcc10
DTSTART:20120817T113000
DTEND:20120817T114500
DTSTAMP:20120815T154420Z
SEQUENCE:2
SUMMARY:Simple event
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
cal2 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
METHOD:REQUEST
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VEVENT
UID:00a79cad-857b-418e-a54a-340b5686d747
DTSTART:20120817T113000
DTEND:20120817T114500
DTSTAMP:20120815T154420Z
SEQUENCE:2
SUMMARY:Simple event
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
events = (
Event(self.client.serializeLocation(), u'/home/calendar/1.ics', u'123.123', Component.fromString(cal1)),
Event(self.client.serializeLocation(), u'/home/inbox/i1.ics', u'123.123', Component.fromString(cal2)),
)
self.client._events.update(dict([[event.url, event] for event in events]))
calendars = (
Calendar(str(caldavxml.calendar), set(('VEVENT',)), u'calendar', u'/home/calendar/', "123", invitees=["a", "b", "c"]),
Calendar(str(caldavxml.calendar), set(('VTODO',)), u'tasks', u'/home/tasks/', "456"),
Calendar(str(caldavxml.schedule_inbox), set(('VEVENT', "VTODO",)), u'calendar', u'/home/inbox/', "789"),
)
self.client._calendars.update(dict([[calendar.url, calendar] for calendar in calendars]))
self.client._calendars["/home/calendar/"].events["1.ics"] = events[0]
self.client._calendars["/home/inbox/"].events["i1.ics"] = events[1]
self.client._notificationCollection = NotificationCollection("/home/notification", "123")
self.client._managed_attachments_server_url = "attachmentsurl"
self.client.calendarHomeToken = "hometoken"
self.client.serialize()
self.assertTrue(os.path.exists(clientPath))
self.assertTrue(os.path.exists(indexPath))
def _normDict(d):
return dict([
(
k,
sorted(
v,
key=lambda x:
x["changeToken" if k == "calendars" else "url"]
) if isinstance(v, list) else v,
)
for k, v in d.items()
])
with open(indexPath) as f:
jdata = f.read()
self.assertEqual(_normDict(json.loads(jdata)), _normDict(json.loads("""{
"notificationCollection": {
"url": "/home/notification",
"notifications": [],
"changeToken": "123"
},
"calendars": [
{
"changeToken": "123",
"name": "calendar",
"shared": false,
"sharedByMe": false,
"resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VEVENT"
],
"invitees": [
"a",
"b",
"c"
],
"url": "/home/calendar/",
"events": [
"1.ics"
]
},
{
"changeToken": "789",
"name": "calendar",
"shared": false,
"sharedByMe": false,
"resourceType": "{urn:ietf:params:xml:ns:caldav}schedule-inbox",
"componentTypes": [
"VEVENT",
"VTODO"
],
"invitees": [],
"url": "/home/inbox/",
"events": [
"i1.ics"
]
},
{
"changeToken": "456",
"name": "tasks",
"shared": false,
"sharedByMe": false,
"resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VTODO"
],
"invitees": [],
"url": "/home/tasks/",
"events": []
}
],
"principalURL": null,
"homeToken": "hometoken",
"attachmentsUrl": "attachmentsurl",
"events": [
{
"url": "/home/calendar/1.ics",
"scheduleTag": null,
"etag": "123.123",
"uid": "004f8e41-b071-4b30-bb3b-6aada4adcc10"
},
{
"url": "/home/inbox/i1.ics",
"scheduleTag": null,
"etag": "123.123",
"uid": "00a79cad-857b-418e-a54a-340b5686d747"
}
],
"attachments": {}
}""")))
event1Path = os.path.join(clientPath, "calendar", "1.ics")
self.assertTrue(os.path.exists(event1Path))
with open(event1Path) as f:
data = f.read()
self.assertEqual(data, cal1)
event2Path = os.path.join(clientPath, "inbox", "i1.ics")
self.assertTrue(os.path.exists(event2Path))
with open(event2Path) as f:
data = f.read()
self.assertEqual(data, cal2)
def test_deserialization(self):
"""
L{OS_X_10_11.deserailize} properly parses a JSON document.
"""
cal1 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VEVENT
UID:004f8e41-b071-4b30-bb3b-6aada4adcc10
DTSTART:20120817T113000
DTEND:20120817T114500
DTSTAMP:20120815T154420Z
SEQUENCE:2
SUMMARY:Simple event
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
cal2 = """BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
METHOD:REQUEST
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VEVENT
UID:00a79cad-857b-418e-a54a-340b5686d747
DTSTART:20120817T113000
DTEND:20120817T114500
DTSTAMP:20120815T154420Z
SEQUENCE:2
SUMMARY:Simple event
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
clientPath = os.path.join(self.client.serializePath, "user91-OS_X_10.11")
os.mkdir(clientPath)
indexPath = os.path.join(clientPath, "index.json")
with open(indexPath, "w") as f:
f.write("""{
"calendars": [
{
"changeToken": "321",
"attachmentsUrl": "https://example.com/attachments/",
"name": "calendar",
"shared": false,
"sharedByMe": false,
"resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VEVENT"
],
"invitees": [
"a",
"b",
"c"
],
"url": "/home/calendar/",
"events": [
"2.ics"
]
},
{
"changeToken": "987",
"name": "calendar",
"shared": false,
"sharedByMe": false,
"resourceType": "{urn:ietf:params:xml:ns:caldav}schedule-inbox",
"componentTypes": [
"VEVENT",
"VTODO"
],
"invitees": [
"a",
"b",
"c"
],
"url": "/home/inbox/",
"events": [
"i2.ics"
]
},
{
"changeToken": "654",
"name": "tasks",
"shared": false,
"sharedByMe": false,
"resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VTODO"
],
"invitees": [
"a",
"b",
"c"
],
"url": "/home/tasks/",
"events": []
}
],
"principalURL": null,
"homeToken": "hometoken",
"attachmentsUrl": "attachmentsurl",
"events": [
{
"url": "/home/calendar/2.ics",
"scheduleTag": null,
"etag": "321.321",
"uid": "004f8e41-b071-4b30-bb3b-6aada4adcc10"
},
{
"url": "/home/inbox/i2.ics",
"scheduleTag": null,
"etag": "987.987",
"uid": "00a79cad-857b-418e-a54a-340b5686d747"
}
]
}""")
os.mkdir(os.path.join(clientPath, "calendar"))
event1Path = os.path.join(clientPath, "calendar", "2.ics")
with open(event1Path, "w") as f:
f.write(cal1)
os.mkdir(os.path.join(clientPath, "inbox"))
event1Path = os.path.join(clientPath, "inbox", "i2.ics")
with open(event1Path, "w") as f:
f.write(cal2)
self.client.deserialize()
self.assertEqual(len(self.client._calendars), 3)
self.assertTrue("/home/calendar/" in self.client._calendars)
self.assertEqual(self.client._calendars["/home/calendar/"].changeToken, "321")
self.assertEqual(self.client._calendars["/home/calendar/"].name, "calendar")
self.assertEqual(self.client._calendars["/home/calendar/"].resourceType, "{urn:ietf:params:xml:ns:caldav}calendar")
self.assertEqual(self.client._calendars["/home/calendar/"].componentTypes, set(("VEVENT",)))
self.assertEqual(self.client._calendars["/home/calendar/"].invitees, ["a", "b", "c"])
self.assertTrue("/home/tasks/" in self.client._calendars)
self.assertTrue("/home/inbox/" in self.client._calendars)
self.assertEqual(self.client._calendars["/home/inbox/"].componentTypes, set(("VEVENT", "VTODO",)))
self.assertEqual(len(self.client._events), 2)
self.assertTrue("/home/calendar/2.ics" in self.client._events)
self.assertEqual(self.client._events["/home/calendar/2.ics"].scheduleTag, None)
self.assertEqual(self.client._events["/home/calendar/2.ics"].etag, "321.321")
self.assertEqual(self.client._events["/home/calendar/2.ics"].getUID(), "004f8e41-b071-4b30-bb3b-6aada4adcc10")
self.assertEqual(str(self.client._events["/home/calendar/2.ics"].component), cal1)
self.assertTrue("/home/inbox/i2.ics" in self.client._events)
self.assertEqual(self.client._events["/home/inbox/i2.ics"].scheduleTag, None)
self.assertEqual(self.client._events["/home/inbox/i2.ics"].etag, "987.987")
self.assertEqual(self.client._events["/home/inbox/i2.ics"].getUID(), "00a79cad-857b-418e-a54a-340b5686d747")
self.assertEqual(str(self.client._events["/home/inbox/i2.ics"].component), cal2)
self.assertEqual(self.client.calendarHomeToken, "hometoken")
self.assertEqual(self.client._managed_attachments_server_url, "attachmentsurl")
class UpdateCalendarTests(OS_X_10_11Mixin, TestCase):
"""
Tests for L{OS_X_10_11._updateCalendar}.
"""
_CALENDAR_PROPFIND_RESPONSE_BODY = """\
<?xml version='1.0' encoding='UTF-8'?>
<multistatus xmlns='DAV:'>
<response>
<href>/something/anotherthing.ics</href>
<propstat>
<prop>
<resourcetype>
<collection/>
</resourcetype>
<getetag>"None"</getetag>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
<propstat>
<prop>
</prop>
<status>HTTP/1.1 404 Not Found</status>
</propstat>
</response>
<response>
<href>/something/else.ics</href>
<propstat>
<prop>
<resourcetype>
<collection/>
</resourcetype>
<getetag>"None"</getetag>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
</response>
</multistatus>
"""
_CALENDAR_REPORT_RESPONSE_BODY = """\
<?xml version='1.0' encoding='UTF-8'?>
<multistatus xmlns='DAV:'>
<response>
<href>/something/anotherthing.ics</href>
<status>HTTP/1.1 404 Not Found</status>
</response>
<response>
<href>/something/else.ics</href>
<propstat>
<prop>
<getetag>"ef70beb4cb7da4b2e2950350b09e9a01"</getetag>
<calendar-data xmlns='urn:ietf:params:xml:ns:caldav'><![CDATA[BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VEVENT
UID:CD54161A13AA8A4649D3781E@caldav.corp.apple.com
DTSTART:20110715T140000Z
DURATION:PT1H
DTSTAMP:20110715T144217Z
SUMMARY:Test2
END:VEVENT
END:VCALENDAR
]]></calendar-data>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
</response>
</multistatus>
"""
_CALENDAR_REPORT_RESPONSE_BODY_1 = """\
<?xml version='1.0' encoding='UTF-8'?>
<multistatus xmlns='DAV:'>
<response>
<href>/something/anotherthing.ics</href>
<propstat>
<prop>
<getetag>"ef70beb4cb7da4b2e2950350b09e9a01"</getetag>
<calendar-data xmlns='urn:ietf:params:xml:ns:caldav'><![CDATA[BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VEVENT
UID:anotherthing@caldav.corp.apple.com
DTSTART:20110715T140000Z
DURATION:PT1H
DTSTAMP:20110715T144217Z
SUMMARY:Test1
END:VEVENT
END:VCALENDAR
]]></calendar-data>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
</response>
</multistatus>
"""
_CALENDAR_REPORT_RESPONSE_BODY_2 = """\
<?xml version='1.0' encoding='UTF-8'?>
<multistatus xmlns='DAV:'>
<response>
<href>/something/else.ics</href>
<propstat>
<prop>
<getetag>"ef70beb4cb7da4b2e2950350b09e9a01"</getetag>
<calendar-data xmlns='urn:ietf:params:xml:ns:caldav'><![CDATA[BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VEVENT
UID:else@caldav.corp.apple.com
DTSTART:20110715T140000Z
DURATION:PT1H
DTSTAMP:20110715T144217Z
SUMMARY:Test2
END:VEVENT
END:VCALENDAR
]]></calendar-data>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
</response>
</multistatus>
"""
def test_eventMissing(self):
"""
If an event included in the calendar sync REPORT response no longer exists
by the time a REPORT is issued for that event, the 404 is handled and
the rest of the normal update logic for that event is skipped.
"""
requests = self.interceptRequests()
calendar = Calendar(None, set(('VEVENT',)), 'calendar', '/something/', None)
self.client._calendars[calendar.url] = calendar
self.client._updateCalendar(calendar, "1234")
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
self.assertEqual('http://127.0.0.1/something/', url)
self.assertEqual((MULTI_STATUS, FORBIDDEN), expectedResponseCode)
result.callback(
(
MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "Multi-status", None,
StringProducer(self._CALENDAR_PROPFIND_RESPONSE_BODY)),
self._CALENDAR_PROPFIND_RESPONSE_BODY
)
)
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
self.assertEqual('http://127.0.0.1/something/', url)
self.assertEqual((MULTI_STATUS,), expectedResponseCode)
# Someone else comes along and gets rid of the event
del self.client._events["/something/anotherthing.ics"]
result.callback(
(
MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "Multi-status", None,
StringProducer(self._CALENDAR_REPORT_RESPONSE_BODY)),
self._CALENDAR_REPORT_RESPONSE_BODY
)
)
# Verify that processing proceeded to the response after the one with a
# 404 status.
self.assertIn('/something/else.ics', self.client._events)
def test_multigetBatch(self):
"""
If an event included in the calendar sync REPORT response no longer exists
by the time a REPORT is issued for that event, the 404 is handled and
the rest of the normal update logic for that event is skipped.
"""
requests = self.interceptRequests()
self.patch(self.client, "MULTIGET_BATCH_SIZE", 1)
calendar = Calendar(None, set(('VEVENT',)), 'calendar', '/something/', None)
self.client._calendars[calendar.url] = calendar
self.client._updateCalendar(calendar, "1234")
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
self.assertEqual('http://127.0.0.1/something/', url)
self.assertEqual((MULTI_STATUS, FORBIDDEN), expectedResponseCode)
result.callback(
(
MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "Multi-status", None,
StringProducer(self._CALENDAR_PROPFIND_RESPONSE_BODY)),
self._CALENDAR_PROPFIND_RESPONSE_BODY
)
)
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
self.assertEqual('http://127.0.0.1/something/', url)
self.assertEqual((MULTI_STATUS,), expectedResponseCode)
result.callback(
(
MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "Multi-status", None,
StringProducer(self._CALENDAR_REPORT_RESPONSE_BODY_1)),
self._CALENDAR_REPORT_RESPONSE_BODY_1
)
)
self.assertTrue(self.client._events['/something/anotherthing.ics'].etag is not None)
self.assertTrue(self.client._events['/something/else.ics'].etag is None)
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
self.assertEqual('http://127.0.0.1/something/', url)
self.assertEqual((MULTI_STATUS,), expectedResponseCode)
result.callback(
(
MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "Multi-status", None,
StringProducer(self._CALENDAR_REPORT_RESPONSE_BODY_2)),
self._CALENDAR_REPORT_RESPONSE_BODY_2
)
)
self.assertTrue(self.client._events['/something/anotherthing.ics'].etag is not None)
self.assertTrue(self.client._events['/something/else.ics'].etag is not None)
class VFreeBusyTests(OS_X_10_11Mixin, TestCase):
"""
Tests for L{OS_X_10_11.requestAvailability}.
"""
def test_requestAvailability(self):
"""
L{OS_X_10_11.requestAvailability} accepts a date range and a set of
account uuids and issues a VFREEBUSY request. It returns a Deferred
which fires with a dict mapping account uuids to availability range
information.
"""
self.client.uuid = u'urn:uuid:user01'
self.client.email = u'mailto:user01@example.com'
self.client.outbox = "/calendars/__uids__/%s/outbox/" % (self.record.uid,)
requests = self.interceptRequests()
start = DateTime(2011, 6, 10, 10, 45, 0, tzid=Timezone.UTCTimezone)
end = DateTime(2011, 6, 10, 11, 15, 0, tzid=Timezone.UTCTimezone)
d = self.client.requestAvailability(
start, end, [u"urn:uuid:user05", u"urn:uuid:user10"])
result, req = requests.pop(0)
expectedResponseCode, method, url, headers, body = req
self.assertEqual(OK, expectedResponseCode)
self.assertEqual('POST', method)
self.assertEqual(
'http://127.0.0.1/calendars/__uids__/%s/outbox/' % (self.record.uid,),
url)
self.assertEqual(headers.getRawHeaders('originator'), ['mailto:user01@example.com'])
self.assertEqual(headers.getRawHeaders('recipient'), ['urn:uuid:user05, urn:uuid:user10'])
self.assertEqual(headers.getRawHeaders('content-type'), ['text/calendar'])
consumer = MemoryConsumer()
finished = body.startProducing(consumer)
def cbFinished(ignored):
vevent = Component.fromString(consumer.value())
uid = vevent.resourceUID()
dtstamp = vevent.mainComponent().propertyValue("DTSTAMP")
dtstamp = dtstamp.getText()
self.assertEqual("""BEGIN:VCALENDAR
CALSCALE:GREGORIAN
VERSION:2.0
METHOD:REQUEST
PRODID:-//Apple Inc.//iCal 4.0.3//EN
BEGIN:VFREEBUSY
UID:%(uid)s
DTEND:20110611T000000Z
ATTENDEE:urn:uuid:user05
ATTENDEE:urn:uuid:user10
DTSTART:20110610T000000Z
DTSTAMP:%(dtstamp)s
ORGANIZER:mailto:user01@example.com
SUMMARY:Availability for urn:uuid:user05, urn:uuid:user10
END:VFREEBUSY
END:VCALENDAR
""".replace('\n', '\r\n') % {'uid': uid, 'dtstamp': dtstamp}, consumer.value())
finished.addCallback(cbFinished)
def requested(ignored):
response = MemoryResponse(
('HTTP', '1', '1'), OK, "Ok", Headers({}),
StringProducer(""))
result.callback((response, ""))
finished.addCallback(requested)
return d
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the Redis storage."""
import unittest
import fakeredis
import redis
from plaso.containers import events
from plaso.containers import sessions
from plaso.containers import tasks
from plaso.storage.redis import redis_store
from tests.containers import test_lib as containers_test_lib
from tests.storage import test_lib
class RedisStoreTest(test_lib.StorageTestCase):
"""Tests for the Redis storage object."""
# pylint: disable=protected-access
_REDIS_URL = 'redis://127.0.0.1/0'
def _CreateRedisClient(self):
"""Creates a Redis client for testing.
This method will attempt to use a Redis server listening on localhost and
fallback to a fake Redis client if no server is available or the connection
timed out.
Returns:
Redis: a Redis client.
"""
try:
redis_client = redis.from_url(self._REDIS_URL, socket_timeout=60)
redis_client.ping()
except redis.exceptions.ConnectionError:
redis_client = fakeredis.FakeStrictRedis()
return redis_client
def _RemoveSessionData(self, redis_client, session_identifier):
"""Removes the session data after testing.
Args:
redis_client (Redis): an open Redis client.
session_identifier (str): the identifier of the session the tasks are
part of.
"""
redis_hash_pattern = '{0:s}-*'.format(session_identifier)
for redis_hash_name in redis_client.keys(redis_hash_pattern):
redis_client.delete(redis_hash_name)
def testGetRedisHashName(self):
"""Tests the _GetRedisHashName function."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
event_data_stream = events.EventDataStream()
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
redis_hash_name = test_store._GetRedisHashName(
event_data_stream.CONTAINER_TYPE)
expected_redis_hash_name = '{0:s}-{1:s}-{2:s}'.format(
task.session_identifier, task.identifier,
event_data_stream.CONTAINER_TYPE)
self.assertEqual(redis_hash_name, expected_redis_hash_name)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
# TODO: add tests for _GetFinalizationKey
# TODO: add tests for _RaiseIfNotReadable
# TODO: add tests for _RaiseIfNotWritable
# TODO: add tests for _SetClientName
def testWriteExistingAttributeContainer(self):
"""Tests the _WriteExistingAttributeContainer function."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=session.identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 0)
with self.assertRaises(IOError):
test_store._WriteExistingAttributeContainer(event_data_stream)
test_store._WriteNewAttributeContainer(event_data_stream)
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 1)
test_store._WriteExistingAttributeContainer(event_data_stream)
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 1)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testWriteNewAttributeContainer(self):
"""Tests the _WriteNewAttributeContainer method."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 0)
test_store._WriteNewAttributeContainer(event_data_stream)
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 1)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testAddAttributeContainer(self):
"""Tests the AddAttributeContainer method."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 0)
test_store.AddAttributeContainer(event_data_stream)
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 1)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
with self.assertRaises(IOError):
test_store.AddAttributeContainer(event_data_stream)
# TODO: add tests for _WriteStorageMetadata
def testGetAttributeContainerByIdentifier(self):
"""Tests the GetAttributeContainerByIdentifier method."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
test_store.AddAttributeContainer(event_data_stream)
identifier = event_data_stream.GetIdentifier()
container = test_store.GetAttributeContainerByIdentifier(
event_data_stream.CONTAINER_TYPE, identifier)
self.assertIsNotNone(container)
identifier.sequence_number = 99
container = test_store.GetAttributeContainerByIdentifier(
event_data_stream.CONTAINER_TYPE, identifier)
self.assertIsNone(container)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testGetAttributeContainerByIndex(self):
"""Tests the GetAttributeContainerByIndex function."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
container = test_store.GetAttributeContainerByIndex(
event_data_stream.CONTAINER_TYPE, 0)
self.assertIsNone(container)
test_store.AddAttributeContainer(event_data_stream)
container = test_store.GetAttributeContainerByIndex(
event_data_stream.CONTAINER_TYPE, 0)
self.assertIsNotNone(container)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testGetAttributeContainers(self):
"""Tests the GetAttributeContainers method."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
event_data_stream.md5_hash = '8f0bf95a7959baad9666b21a7feed79d'
containers = list(test_store.GetAttributeContainers(
event_data_stream.CONTAINER_TYPE))
self.assertEqual(len(containers), 0)
test_store.AddAttributeContainer(event_data_stream)
containers = list(test_store.GetAttributeContainers(
event_data_stream.CONTAINER_TYPE))
self.assertEqual(len(containers), 1)
filter_expression = 'md5_hash == "8f0bf95a7959baad9666b21a7feed79d"'
containers = list(test_store.GetAttributeContainers(
event_data_stream.CONTAINER_TYPE,
filter_expression=filter_expression))
self.assertEqual(len(containers), 1)
filter_expression = 'md5_hash != "8f0bf95a7959baad9666b21a7feed79d"'
containers = list(test_store.GetAttributeContainers(
event_data_stream.CONTAINER_TYPE,
filter_expression=filter_expression))
self.assertEqual(len(containers), 0)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testGetNumberOfAttributeContainers(self):
"""Tests the GetNumberOfAttributeContainers function."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 0)
test_store.AddAttributeContainer(event_data_stream)
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 1)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testGetSerializedAttributeContainers(self):
"""Tests the GetSerializedAttributeContainers method."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
for _, event_data, _ in containers_test_lib.CreateEventsFromValues(
self._TEST_EVENTS):
test_store.AddAttributeContainer(event_data)
cursor, serialized_containers = (
test_store.GetSerializedAttributeContainers('event_data', 0, 0))
self.assertEqual(len(serialized_containers), 4)
for serialized_container in serialized_containers:
self.assertIsInstance(serialized_container, bytes)
self.assertIsInstance(cursor, int)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testGetSortedEvents(self):
"""Tests the GetSortedEvents method."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
for event, _, _ in containers_test_lib.CreateEventsFromValues(
self._TEST_EVENTS):
test_store.AddAttributeContainer(event)
retrieved_events = list(test_store.GetSortedEvents())
self.assertEqual(len(retrieved_events), 4)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
def testHasAttributeContainers(self):
"""Tests the HasAttributeContainers method."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
result = test_store.HasAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertFalse(result)
test_store.AddAttributeContainer(event_data_stream)
result = test_store.HasAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertTrue(result)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
# TODO: add tests for Open and Close
def testUpdateAttributeContainer(self):
"""Tests the UpdateAttributeContainer function."""
redis_client = self._CreateRedisClient()
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
test_store = redis_store.RedisStore()
test_store.Open(
redis_client=redis_client, session_identifier=task.session_identifier,
task_identifier=task.identifier)
try:
event_data_stream = events.EventDataStream()
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 0)
with self.assertRaises(IOError):
test_store.UpdateAttributeContainer(event_data_stream)
test_store.AddAttributeContainer(event_data_stream)
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 1)
test_store.UpdateAttributeContainer(event_data_stream)
number_of_containers = test_store.GetNumberOfAttributeContainers(
event_data_stream.CONTAINER_TYPE)
self.assertEqual(number_of_containers, 1)
finally:
test_store.Close()
self._RemoveSessionData(redis_client, session.identifier)
if __name__ == '__main__':
unittest.main()
|
|
"""
Testing the file wrapping utilities.
"""
import os
import unittest
from numpy import array, isnan, isinf
from openmdao.util.filewrap import InputFileGenerator, FileParser
class TestCase(unittest.TestCase):
""" Test namelist writer functions. """
def setUp(self):
self.templatename = 'template.dat'
self.filename = 'filename.dat'
def tearDown(self):
if os.path.exists(self.filename):
os.remove(self.filename)
if os.path.exists(self.templatename):
os.remove(self.templatename)
def test_templated_input(self):
template = "Junk\n" + \
"Anchor\n" + \
" A 1, 2 34, Test 1e65\n" + \
" B 4 Stuff\n" + \
"Anchor\n" + \
" C 77 False Inf 333.444\n"
outfile = open(self.templatename, 'w')
outfile.write(template)
outfile.close()
gen = InputFileGenerator()
gen.set_template_file(self.templatename)
gen.set_generated_file(self.filename)
gen.set_delimiters(', ')
gen.mark_anchor('Anchor')
gen.transfer_var('CC', 2, 0)
gen.transfer_var(3.0, 1, 3)
gen.reset_anchor()
gen.mark_anchor('Anchor', 2)
gen.transfer_var('NaN', 1, 4)
gen.reset_anchor()
gen.transfer_var('55', 3, 2)
gen.mark_anchor('C 77')
gen.transfer_var(1.3e-37, -3, 6)
gen.clearline(-5)
gen.mark_anchor('Anchor', -1)
gen.transfer_var('8.7', 1, 5)
gen.generate()
infile = open(self.filename, 'r')
result = infile.read()
infile.close()
answer = "\n" + \
"Anchor\n" + \
" A 1, 3.0 34, Test 1.3e-37\n" + \
" B 55 Stuff\n" + \
"Anchor\n" + \
" C 77 False NaN 8.7\n"
self.assertEqual(answer, result)
# Test some errors
try:
gen.mark_anchor('C 77', 3.14)
except ValueError, err:
msg = "The value for occurrence must be an integer"
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
try:
gen.mark_anchor('C 77', 0)
except ValueError, err:
msg = "0 is not valid for an anchor occurrence."
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
try:
gen.mark_anchor('ZZZ')
except RuntimeError, err:
msg = "Could not find pattern ZZZ in template file template.dat"
self.assertEqual(str(err), msg)
else:
self.fail('RuntimeError expected')
def test_templated_input_same_anchors(self):
template = "CQUAD4 1 3.456\n" + \
"CQUAD4 2 4.123\n" + \
"CQUAD4 3 7.222\n" + \
"CQUAD4 4\n"
outfile = open(self.templatename, 'w')
outfile.write(template)
outfile.close()
gen = InputFileGenerator()
gen.set_template_file(self.templatename)
gen.set_generated_file(self.filename)
gen.set_delimiters(', ')
gen.mark_anchor('CQUAD4')
gen.transfer_var('x', 0, 2)
gen.mark_anchor('CQUAD4')
gen.transfer_var('y', 0, 3)
gen.mark_anchor('CQUAD4', 2)
gen.transfer_var('z', 0, 2)
gen.generate()
infile = open(self.filename, 'r')
result = infile.read()
infile.close()
answer = "CQUAD4 x 3.456\n" + \
"CQUAD4 2 y\n" + \
"CQUAD4 3 7.222\n" + \
"CQUAD4 z\n"
self.assertEqual(answer, result)
print result
def test_templated_input_arrays(self):
template = "Anchor\n" + \
"0 0 0 0 0\n"
outfile = open(self.templatename, 'w')
outfile.write(template)
outfile.close()
gen = InputFileGenerator()
gen.set_template_file(self.templatename)
gen.set_generated_file(self.filename)
gen.mark_anchor('Anchor')
gen.transfer_array(array([1, 2, 3, 4.75, 5.0]), 1, 3, 5, sep=' ')
gen.generate()
infile = open(self.filename, 'r')
result = infile.read()
infile.close()
answer = "Anchor\n" + \
"0 0 1.0 2.0 3.0 4.75 5.0\n"
self.assertEqual(answer, result)
def test_templated_input_2Darrays(self):
template = "Anchor\n" + \
"0 0 0 0 0\n" + \
"0 0 0 0 0\n"
outfile = open(self.templatename, 'w')
outfile.write(template)
outfile.close()
gen = InputFileGenerator()
gen.set_template_file(self.templatename)
gen.set_generated_file(self.filename)
gen.mark_anchor('Anchor')
var = array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]])
gen.transfer_2Darray(var, 1, 2, 1, 5)
gen.generate()
infile = open(self.filename, 'r')
result = infile.read()
infile.close()
answer = "Anchor\n" + \
"1 2 3 4 5\n" + \
"6 7 8 9 10\n"
self.assertEqual(answer, result)
def test_output_parse(self):
data = "Junk\n" + \
"Anchor\n" + \
" A 1, 2 34, Test 1e65\n" + \
" B 4 Stuff\n" + \
"Anchor\n" + \
" C 77 False NaN 333.444\n" + \
" 1,2,3,4,5\n" + \
" Inf 1.#QNAN -1.#IND\n"
outfile = open(self.filename, 'w')
outfile.write(data)
outfile.close()
gen = FileParser()
gen.set_file(self.filename)
gen.set_delimiters(' ')
gen.mark_anchor('Anchor')
val = gen.transfer_var(1, 1)
self.assertEqual(val, 'A')
gen.reset_anchor()
val = gen.transfer_var(3, 2)
self.assertEqual(val, 4)
self.assertEqual(type(val), int)
gen.mark_anchor('Anchor', 2)
val = gen.transfer_var(1, 4)
self.assertEqual(isnan(val), True)
val = gen.transfer_var(3, 1)
self.assertEqual(isinf(val), True)
val = gen.transfer_var(3, 2)
self.assertEqual(isnan(val), True)
val = gen.transfer_var(3, 3)
self.assertEqual(isnan(val), True)
val = gen.transfer_line(-1)
self.assertEqual(val, ' B 4 Stuff')
# Now, let's try column delimiters
gen.set_delimiters('columns')
gen.mark_anchor('Anchor', -1)
val = gen.transfer_var(1, 8, 10)
self.assertEqual(val, 'als')
val = gen.transfer_var(1, 17)
self.assertEqual(val, 333.444)
# Test some errors
try:
gen.mark_anchor('C 77', 3.14)
except ValueError, err:
msg = "The value for occurrence must be an integer"
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
try:
gen.mark_anchor('C 77', 0)
except ValueError, err:
msg = "0 is not valid for an anchor occurrence."
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
try:
gen.mark_anchor('ZZZ')
except RuntimeError, err:
msg = "Could not find pattern ZZZ in output file filename.dat"
self.assertEqual(str(err), msg)
else:
self.fail('RuntimeError expected')
def test_output_parse_same_anchors(self):
data = "CQUAD4 1 3.456\n" + \
"CQUAD4 2 4.123\n" + \
"CQUAD4 3 7.222\n" + \
"CQUAD4 4\n"
outfile = open(self.filename, 'w')
outfile.write(data)
outfile.close()
gen = FileParser()
gen.set_file(self.filename)
gen.set_delimiters(' ')
gen.mark_anchor('CQUAD4')
val = gen.transfer_var(0, 3)
self.assertEqual(val, 3.456)
gen.mark_anchor('CQUAD4')
val = gen.transfer_var(0, 3)
self.assertEqual(val, 4.123)
gen.mark_anchor('CQUAD4', 2)
val = gen.transfer_var(0, 2)
self.assertEqual(val, 4)
gen.reset_anchor()
gen.mark_anchor('CQUAD4', -1)
val = gen.transfer_var(0, 2)
self.assertEqual(val, 4)
gen.mark_anchor('CQUAD4', -1)
val = gen.transfer_var(0, 3)
self.assertEqual(val, 7.222)
gen.mark_anchor('CQUAD4', -2)
val = gen.transfer_var(0, 3)
self.assertEqual(val, 4.123)
def test_output_parse_keyvar(self):
data = "Anchor\n" + \
" Key1 1 2 3.7 Test 1e65\n" + \
" Key1 3 4 3.2 ibg 0.0003\n" + \
" Key1 5 6 6.7 Tst xxx\n"
outfile = open(self.filename, 'w')
outfile.write(data)
outfile.close()
gen = FileParser()
gen.set_file(self.filename)
gen.set_delimiters(' ')
gen.mark_anchor('Anchor')
val = gen.transfer_keyvar('Key1', 3)
self.assertEqual(val, 3.7)
val = gen.transfer_keyvar('Key1', 4, -2)
self.assertEqual(val, 'ibg')
val = gen.transfer_keyvar('Key1', 4, -2, -1)
self.assertEqual(val, 'Test')
try:
gen.transfer_keyvar('Key1', 4, 0)
except ValueError, err:
msg = "The value for occurrence must be a nonzero integer"
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
try:
gen.transfer_keyvar('Key1', 4, -3.4)
except ValueError, err:
msg = "The value for occurrence must be a nonzero integer"
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
def test_output_parse_array(self):
data = "Anchor\n" + \
"10 20 30 40 50 60 70 80\n" + \
"11 21 31 41 51 61 71 81\n" + \
"Key a b c d e\n"
outfile = open(self.filename, 'w')
outfile.write(data)
outfile.close()
gen = FileParser()
gen.set_file(self.filename)
gen.set_delimiters(' ')
gen.mark_anchor('Anchor')
val = gen.transfer_array(1, 1, 1, 8)
self.assertEqual(val[0], 10)
self.assertEqual(val[7], 80)
val = gen.transfer_array(1, 5, 2, 6)
self.assertEqual(val[0], 50)
self.assertEqual(val[9], 61)
gen.mark_anchor('Key')
val = gen.transfer_array(0, 2, 0, 6)
self.assertEqual(val[4], 'e')
val = gen.transfer_array(0, 2, fieldend=6)
self.assertEqual(val[4], 'e')
# Now, let's try column delimiters
gen.reset_anchor()
gen.mark_anchor('Anchor')
gen.set_delimiters('columns')
val = gen.transfer_array(1, 7, 1, 15)
self.assertEqual(val[0], 30)
self.assertEqual(val[2], 50)
val = gen.transfer_array(1, 10, 2, 18)
self.assertEqual(val[0], 40)
self.assertEqual(val[5], 61)
val = gen.transfer_array(3, 5, 3, 10)
self.assertEqual(val[0], 'a b c')
try:
gen.transfer_array(1, 7, 1)
except ValueError, err:
msg = "fieldend is missing, currently required"
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
def test_output_parse_2Darray(self):
data = '''
Anchor
FREQ DELTA -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5 -8.5
Hz
50. 1.0 30.0 34.8 36.3 36.1 34.6 32.0 28.4 23.9 18.5 12.2 5.0 -3.1 -12.3 -22.5 -34.0 -47.2 -63.7
63. 1.0 36.5 41.3 42.8 42.6 41.1 38.5 34.9 30.4 25.0 18.7 11.5 3.4 -5.8 -16.0 -27.5 -40.7 -57.2
80. 1.0 42.8 47.6 49.1 48.9 47.4 44.8 41.2 36.7 31.3 25.0 17.8 9.7 0.5 -9.7 -21.2 -34.4 -50.9
100. 1.0 48.4 53.1 54.7 54.5 53.0 50.4 46.8 42.3 36.9 30.6 23.3 15.2 6.1 -4.2 -15.7 -28.9 -45.4
125. 1.0 53.6 58.3 59.9 59.6 58.1 55.5 52.0 47.5 42.0 35.7 28.5 20.4 11.2 1.0 -10.5 -23.7 -40.2
160. 1.0 58.9 63.7 65.2 65.0 63.5 60.9 57.3 52.8 47.4 41.0 33.8 25.7 16.5 6.3 -5.2 -18.4 -34.9
200. 1.0 63.4 68.1 69.6 69.4 67.9 65.3 61.7 57.2 51.8 45.5 38.3 30.1 21.0 10.7 -0.8 -14.0 -30.5
250. 1.0 67.5 72.2 73.7 73.5 72.0 69.4 65.8 61.3 55.9 49.5 42.3 34.2 25.0 14.8 3.3 -10.0 -26.5
315. 1.0 71.3 76.1 77.6 77.4 75.8 73.2 69.7 65.1 59.7 53.4 46.1 38.0 28.8 18.6 7.1 -6.2 -22.7
400. 1.0 74.9 79.7 81.2 81.0 79.4 76.8 73.2 68.7 63.2 56.9 49.7 41.5 32.4 22.1 10.6 -2.7 -19.2
500. 1.0 77.9 82.7 84.2 83.9 82.4 79.8 76.2 71.6 66.2 59.8 52.6 44.4 35.3 25.0 13.5 0.2 -16.3
630. 1.0 80.7 85.4 86.9 86.6 85.1 82.4 78.8 74.3 68.8 62.5 55.2 47.0 37.9 27.6 16.1 2.8 -13.7
800. 1.0 83.1 87.8 89.2 89.0 87.4 84.8 81.2 76.6 71.1 64.8 57.5 49.3 40.1 29.9 18.3 5.0 -11.5
1000. 1.0 84.9 89.6 91.1 90.8 89.2 86.6 82.9 78.4 72.9 66.5 59.2 51.0 41.8 31.5 20.0 6.6 -9.9
1250. 1.0 86.4 91.1 92.5 92.2 90.7 88.0 84.3 79.7 74.2 67.8 60.5 52.3 43.1 32.8 21.2 7.9 -8.7
1600. 1.0 87.6 92.3 93.7 93.4 91.8 89.1 85.4 80.8 75.2 68.8 61.5 53.3 44.0 33.7 22.1 8.7 -7.9
2000. 1.0 88.4 93.0 94.4 94.0 92.4 89.6 85.9 81.3 75.7 69.3 61.9 53.7 44.4 34.0 22.4 9.0 -7.6
2500. 1.0 88.7 93.3 94.6 94.2 92.6 89.8 86.1 81.4 75.8 69.3 61.9 53.6 44.3 33.9 22.2 8.8 -7.9
3150. 1.0 88.7 93.2 94.5 94.1 92.4 89.5 85.7 81.0 75.4 68.8 61.4 53.0 43.7 33.3 21.5 8.1 -8.6
4000. 1.0 88.3 92.7 94.0 93.5 91.7 88.8 85.0 80.2 74.5 67.9 60.4 52.0 42.5 32.0 20.2 6.7 -10.0
5000. 1.0 87.5 91.9 93.1 92.5 90.7 87.7 83.8 78.9 73.2 66.5 58.9 50.4 40.9 30.4 18.5 4.9 -11.9
6300. 1.0 86.5 90.8 91.9 91.2 89.3 86.2 82.2 77.3 71.4 64.6 57.0 48.4 38.8 28.1 16.2 2.5 -14.5
8000. 1.0 85.3 89.5 90.4 89.6 87.6 84.4 80.2 75.2 69.2 62.3 54.5 45.8 36.1 25.3 13.2 -0.6 -17.7
10000. 1.0 84.2 88.2 89.0 88.1 85.9 82.5 78.3 73.0 66.9 59.9 51.9 43.1 33.2 22.3 10.1 -3.9 -21.1
'''
outfile = open(self.filename, 'w')
outfile.write(data)
outfile.close()
gen = FileParser()
gen.set_file(self.filename)
# whitespace delim; with end field
gen.set_delimiters(' \t')
gen.mark_anchor('Anchor')
val = gen.transfer_2Darray(3, 2, 26, 19)
self.assertEqual(val[0, 1], 30.0)
self.assertEqual(val[0, 17], -63.7)
self.assertEqual(val[1, 17], -57.2)
self.assertEqual(val[23, 17], -21.1)
self.assertEqual(val.shape[0], 24)
self.assertEqual(val.shape[1], 18)
# whitespace delim; no end field
val = gen.transfer_2Darray(3, 2, 26)
self.assertEqual(val[0, 1], 30.0)
self.assertEqual(val[23, 17], -21.1)
self.assertEqual(val.shape[0], 24)
self.assertEqual(val.shape[1], 18)
# column delim; with end field
gen.set_delimiters('columns')
val = gen.transfer_2Darray(3, 19, 26, 125)
self.assertEqual(val[0, 1], 30.0)
self.assertEqual(val[0, 17], -63.7)
self.assertEqual(val[1, 17], -57.2)
self.assertEqual(val[23, 17], -21.1)
self.assertEqual(val.shape[0], 24)
self.assertEqual(val.shape[1], 18)
# column delim; no end field
val = gen.transfer_2Darray(3, 19, 26)
self.assertEqual(val[0, 1], 30.0)
self.assertEqual(val[0, 17], -63.7)
self.assertEqual(val[1, 17], -57.2)
self.assertEqual(val[23, 17], -21.1)
self.assertEqual(val.shape[0], 24)
self.assertEqual(val.shape[1], 18)
# make sure single line works
gen.set_delimiters(' \t')
val = gen.transfer_2Darray(5, 3, 5, 5)
self.assertEqual(val[0, 2], 49.1)
# Small block read
val = gen.transfer_2Darray(7, 3, 9, 6)
self.assertEqual(val[0, 0], 53.6)
self.assertEqual(val[2, 0], 63.4)
# Error messages for bad values
try:
gen.transfer_2Darray(7, 3, 9, 1)
except ValueError, err:
msg = "fieldend must be greater than fieldstart"
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
try:
gen.transfer_2Darray(9, 2, 8, 4)
except ValueError, err:
msg = "rowend must be greater than rowstart"
self.assertEqual(str(err), msg)
else:
self.fail('ValueError expected')
def test_comment_char(self):
# Check to see if the use of the comment
# characters works
data = "Junk\n" + \
"CAnchor\n" + \
" Z 11, 22 344, Test 1e65\n" + \
" B 4 Stuff\n" + \
" $ Anchor\n" + \
" Q 1, 2 34, Test 1e65\n" + \
" B 4 Stuff\n" + \
"Anchor\n" + \
" A 1, 2 34, Test 1e65\n" + \
" B 4 Stuff\n" + \
"Anchor\n" + \
" C 77 False NaN 333.444\n" + \
" 1,2,3,4,5\n" + \
" Inf 1.#QNAN -1.#IND\n"
outfile = open(self.filename, 'w')
outfile.write(data)
outfile.close()
# Test full line comments
gen = FileParser(full_line_comment_char="C")
gen.set_file(self.filename)
gen.set_delimiters(' ')
gen.mark_anchor('Anchor')
val = gen.transfer_var(1, 1)
self.assertEqual(val, 'A')
# Test end of line comments also
gen = FileParser(full_line_comment_char="C", end_of_line_comment_char="$")
gen.set_file(self.filename)
gen.set_delimiters(' ')
gen.mark_anchor('Anchor')
val = gen.transfer_var(1, 1)
self.assertEqual(val, 'A')
def test_more_delims(self):
data = "anchor,1.0,2.0\n" + \
"abc=123.456\n" + \
"c=1,2,Word,6\n" + \
"d=C:/abc/def,a+b*c^2,(%#%),!true\n" + \
"a^33 1.#QNAN^#$%^"
outfile = open(self.filename, 'w')
outfile.write(data)
outfile.close()
op = FileParser()
op.set_file(self.filename)
olddelims = op.delimiter
op.set_delimiters(' \t,=')
op.mark_anchor('anchor')
val = op.transfer_var(0, 1)
self.assertEqual(val, 'anchor')
val = op.transfer_var(0, 2)
self.assertEqual(val, 1.0)
val = op.transfer_var(1, 1)
self.assertEqual(val, 'abc')
val = op.transfer_var(1, 2)
self.assertEqual(val, 123.456)
val = op.transfer_var(2, 4)
self.assertEqual(val, 'Word')
val = op.transfer_var(2, 5)
self.assertEqual(val, 6)
val = op.transfer_var(3, 2)
self.assertEqual(val, 'C:/abc/def')
val = op.transfer_var(3, 3)
self.assertEqual(val, 'a+b*c^2')
val = op.transfer_var(3, 4)
self.assertEqual(val, '(%#%)')
val = op.transfer_var(3, 5)
self.assertEqual(val, '!true')
op.set_delimiters(' \t^')
val = op.transfer_var(4, 1)
self.assertEqual(val, 'a')
val = op.transfer_var(4, 2)
self.assertEqual(val, 33)
val = op.transfer_var(4, 3)
self.assertEqual(isnan(val), True)
val = op.transfer_var(4, 4)
self.assertEqual(val, '#$%')
if __name__ == '__main__':
import nose
import sys
sys.argv.append('--cover-package=openmdao')
sys.argv.append('--cover-erase')
nose.runmodule()
|
|
"""
Base classes for writing management commands (named commands which can
be executed through ``django-admin.py`` or ``manage.py``).
"""
from __future__ import unicode_literals
import os
import sys
from optparse import make_option, OptionParser
import django
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style, no_style
from django.utils.encoding import force_str
from django.utils.six import StringIO
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
class OutputWrapper(object):
"""
Wrapper around stdout/stderr
"""
def __init__(self, out, style_func=None, ending='\n'):
self._out = out
self.style_func = None
if hasattr(out, 'isatty') and out.isatty():
self.style_func = style_func
self.ending = ending
def __getattr__(self, name):
return getattr(self._out, name)
def write(self, msg, style_func=None, ending=None):
ending = self.ending if ending is None else ending
if ending and not msg.endswith(ending):
msg += ending
style_func = [f for f in (style_func, self.style_func, lambda x:x)
if f is not None][0]
self._out.write(force_str(style_func(msg)))
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin.py`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``OptionParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` or ``execute()`` raised any exception (e.g.
``CommandError``), ``run_from_argv()`` will instead print an error
message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<appname
appname ...>'.
``can_import_settings``
A boolean indicating whether the command needs to be able to
import Django settings; if ``True``, ``execute()`` will verify
that this is possible before proceeding. Default value is
``True``.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_model_validation``
A boolean; if ``True``, validation of installed models will be
performed prior to executing the command. Default value is
``True``. To validate an individual application's models
rather than all applications' models, call
``self.validate(app)`` from ``handle()``, where ``app`` is the
application's Python module.
``leave_locale_alone``
A boolean indicating whether the locale set in settings should be
preserved during the execution of the command instead of being
forcibly set to 'en-us'.
Default value is ``False``.
Make sure you know what you are doing if you decide to change the value
of this option in your custom command if it creates database content
that is locale-sensitive and such content shouldn't contain any
translations (like it happens e.g. with django.contrim.auth
permissions) as making the locale differ from the de facto default
'en-us' might cause unintended effects.
This option can't be False when the can_import_settings option is set
to False too because attempting to set the locale needs access to
settings. This condition will generate a CommandError.
"""
# Metadata about this command.
option_list = (
make_option('-v', '--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output'),
make_option('--settings',
help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
make_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
make_option('--traceback', action='store_true',
help='Raise on exception'),
make_option('--no-color', action='store_true', dest='no_color', default=False,
help="Don't colorize the command output."),
)
help = ''
args = ''
# Configuration shortcuts that alter various logic.
can_import_settings = True
requires_model_validation = True
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
leave_locale_alone = False
def __init__(self):
self.style = color_style()
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return django.get_version()
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``OptionParser`` which will be used to
parse the arguments to this command.
"""
return OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version(),
option_list=self.option_list)
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
"""
parser = self.create_parser(argv[0], argv[1])
options, args = parser.parse_args(argv[2:])
handle_default_options(options)
try:
self.execute(*args, **options.__dict__)
except Exception as e:
if options.traceback or not isinstance(e, CommandError):
raise
# self.stderr is not guaranteed to be set here
stderr = getattr(self, 'stderr', OutputWrapper(sys.stderr, self.style.ERROR))
stderr.write('%s: %s' % (e.__class__.__name__, e))
sys.exit(1)
def execute(self, *args, **options):
"""
Try to execute this command, performing model validation if
needed (as controlled by the attribute
``self.requires_model_validation``, except if force-skipped).
"""
self.stdout = OutputWrapper(options.get('stdout', sys.stdout))
if options.get('no_color'):
self.style = no_style()
self.stderr = OutputWrapper(options.get('stderr', sys.stderr))
else:
self.stderr = OutputWrapper(options.get('stderr', sys.stderr), self.style.ERROR)
if self.can_import_settings:
from django.conf import settings # NOQA
saved_locale = None
if not self.leave_locale_alone:
# Only mess with locales if we can assume we have a working
# settings file, because django.utils.translation requires settings
# (The final saying about whether the i18n machinery is active will be
# found in the value of the USE_I18N setting)
if not self.can_import_settings:
raise CommandError("Incompatible values of 'leave_locale_alone' "
"(%s) and 'can_import_settings' (%s) command "
"options." % (self.leave_locale_alone,
self.can_import_settings))
# Switch to US English, because django-admin.py creates database
# content like permissions, and those shouldn't contain any
# translations.
from django.utils import translation
saved_locale = translation.get_language()
translation.activate('en-us')
try:
if self.requires_model_validation and not options.get('skip_validation'):
self.validate()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
# This needs to be imported here, because it relies on
# settings.
from django.db import connections, DEFAULT_DB_ALIAS
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
if connection.ops.start_transaction_sql():
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()))
self.stdout.write(output)
if self.output_transaction:
self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;"))
finally:
if saved_locale is not None:
translation.activate(saved_locale)
def validate(self, app=None, display_num_errors=False):
"""
Validates the given app, raising CommandError for any errors.
If app is None, then this will validate all installed apps.
"""
from django.core.management.validation import get_validation_errors
s = StringIO()
num_errors = get_validation_errors(s, app)
if num_errors:
s.seek(0)
error_text = s.read()
raise CommandError("One or more models did not validate:\n%s" % error_text)
if display_num_errors:
self.stdout.write("%s error%s found" % (num_errors, '' if num_errors == 1 else 's'))
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError('subclasses of BaseCommand must provide a handle() method')
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application
names as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app()``, which will be called once for each application.
"""
args = '<appname appname ...>'
def handle(self, *app_labels, **options):
from django.db import models
if not app_labels:
raise CommandError('Enter at least one appname.')
try:
app_list = [models.get_app(app_label) for app_label in app_labels]
except (ImproperlyConfigured, ImportError) as e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app in app_list:
app_output = self.handle_app(app, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app(self, app, **options):
"""
Perform the command's actions for ``app``, which will be the
Python module corresponding to an application name given on
the command line.
"""
raise NotImplementedError('subclasses of AppCommand must provide a handle_app() method')
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
args = '<label label ...>'
label = 'label'
def handle(self, *labels, **options):
if not labels:
raise CommandError('Enter at least one %s.' % self.label)
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError('subclasses of LabelCommand must provide a handle_label() method')
class NoArgsCommand(BaseCommand):
"""
A command which takes no arguments on the command line.
Rather than implementing ``handle()``, subclasses must implement
``handle_noargs()``; ``handle()`` itself is overridden to ensure
no arguments are passed to the command.
Attempting to pass arguments will raise ``CommandError``.
"""
args = ''
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, **options):
"""
Perform this command's actions.
"""
raise NotImplementedError('subclasses of NoArgsCommand must provide a handle_noargs() method')
|
|
"""Support for MQTT sensors."""
from __future__ import annotations
from datetime import timedelta
import functools
import logging
import voluptuous as vol
from homeassistant.components import sensor
from homeassistant.components.sensor import (
CONF_STATE_CLASS,
DEVICE_CLASSES_SCHEMA,
STATE_CLASSES_SCHEMA,
SensorEntity,
)
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_FORCE_UPDATE,
CONF_NAME,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from . import CONF_QOS, CONF_STATE_TOPIC, DOMAIN, PLATFORMS, subscription
from .. import mqtt
from .debug_info import log_messages
from .mixins import (
MQTT_ENTITY_COMMON_SCHEMA,
MqttAvailability,
MqttEntity,
async_setup_entry_helper,
)
_LOGGER = logging.getLogger(__name__)
CONF_EXPIRE_AFTER = "expire_after"
CONF_LAST_RESET_TOPIC = "last_reset_topic"
CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template"
MQTT_SENSOR_ATTRIBUTES_BLOCKED = frozenset(
{
sensor.ATTR_LAST_RESET,
sensor.ATTR_STATE_CLASS,
}
)
DEFAULT_NAME = "MQTT Sensor"
DEFAULT_FORCE_UPDATE = False
def validate_options(conf):
"""Validate options.
If last reset topic is present it must be same as the state topic.
"""
if (
CONF_LAST_RESET_TOPIC in conf
and CONF_STATE_TOPIC in conf
and conf[CONF_LAST_RESET_TOPIC] != conf[CONF_STATE_TOPIC]
):
_LOGGER.warning(
"'%s' must be same as '%s'", CONF_LAST_RESET_TOPIC, CONF_STATE_TOPIC
)
if CONF_LAST_RESET_TOPIC in conf and CONF_LAST_RESET_VALUE_TEMPLATE not in conf:
_LOGGER.warning(
"'%s' must be set if '%s' is set",
CONF_LAST_RESET_VALUE_TEMPLATE,
CONF_LAST_RESET_TOPIC,
)
return conf
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_LAST_RESET_TOPIC),
mqtt.MQTT_RO_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
vol.Optional(CONF_LAST_RESET_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_LAST_RESET_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
}
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema),
validate_options,
)
async def async_setup_platform(
hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT sensors through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT sensors dynamically through MQTT discovery."""
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, sensor.DOMAIN, setup, PLATFORM_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config: ConfigType, config_entry=None, discovery_data=None
):
"""Set up MQTT sensor."""
async_add_entities([MqttSensor(hass, config, config_entry, discovery_data)])
class MqttSensor(MqttEntity, SensorEntity):
"""Representation of a sensor that can be updated using MQTT."""
_attr_last_reset = None
_attributes_extra_blocked = MQTT_SENSOR_ATTRIBUTES_BLOCKED
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the sensor."""
self._state = None
self._expiration_trigger = None
expire_after = config.get(CONF_EXPIRE_AFTER)
if expire_after is not None and expire_after > 0:
self._expired = True
else:
self._expired = None
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return PLATFORM_SCHEMA
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
template.hass = self.hass
last_reset_template = self._config.get(CONF_LAST_RESET_VALUE_TEMPLATE)
if last_reset_template is not None:
last_reset_template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
topics = {}
def _update_state(msg):
payload = msg.payload
# auto-expire enabled?
expire_after = self._config.get(CONF_EXPIRE_AFTER)
if expire_after is not None and expire_after > 0:
# When expire_after is set, and we receive a message, assume device is not expired since it has to be to receive the message
self._expired = False
# Reset old trigger
if self._expiration_trigger:
self._expiration_trigger()
self._expiration_trigger = None
# Set new trigger
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
self._expiration_trigger = async_track_point_in_utc_time(
self.hass, self._value_is_expired, expiration_at
)
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
variables = {"entity_id": self.entity_id}
payload = template.async_render_with_possible_json_value(
payload,
self._state,
variables=variables,
)
self._state = payload
def _update_last_reset(msg):
payload = msg.payload
template = self._config.get(CONF_LAST_RESET_VALUE_TEMPLATE)
if template is not None:
variables = {"entity_id": self.entity_id}
payload = template.async_render_with_possible_json_value(
payload,
self._state,
variables=variables,
)
if not payload:
_LOGGER.debug("Ignoring empty last_reset message from '%s'", msg.topic)
return
try:
last_reset = dt_util.parse_datetime(payload)
if last_reset is None:
raise ValueError
self._attr_last_reset = last_reset
except ValueError:
_LOGGER.warning(
"Invalid last_reset message '%s' from '%s'", msg.payload, msg.topic
)
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT messages."""
_update_state(msg)
if CONF_LAST_RESET_VALUE_TEMPLATE in self._config and (
CONF_LAST_RESET_TOPIC not in self._config
or self._config[CONF_LAST_RESET_TOPIC] == self._config[CONF_STATE_TOPIC]
):
_update_last_reset(msg)
self.async_write_ha_state()
topics["state_topic"] = {
"topic": self._config[CONF_STATE_TOPIC],
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
@callback
@log_messages(self.hass, self.entity_id)
def last_reset_message_received(msg):
"""Handle new last_reset messages."""
_update_last_reset(msg)
self.async_write_ha_state()
if (
CONF_LAST_RESET_TOPIC in self._config
and self._config[CONF_LAST_RESET_TOPIC] != self._config[CONF_STATE_TOPIC]
):
topics["last_reset_topic"] = {
"topic": self._config[CONF_LAST_RESET_TOPIC],
"msg_callback": last_reset_message_received,
"qos": self._config[CONF_QOS],
}
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state, topics
)
@callback
def _value_is_expired(self, *_):
"""Triggered when value is expired."""
self._expiration_trigger = None
self._expired = True
self.async_write_ha_state()
@property
def native_unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._config.get(CONF_UNIT_OF_MEASUREMENT)
@property
def force_update(self):
"""Force update."""
return self._config[CONF_FORCE_UPDATE]
@property
def native_value(self):
"""Return the state of the entity."""
return self._state
@property
def device_class(self) -> str | None:
"""Return the device class of the sensor."""
return self._config.get(CONF_DEVICE_CLASS)
@property
def state_class(self) -> str | None:
"""Return the state class of the sensor."""
return self._config.get(CONF_STATE_CLASS)
@property
def available(self) -> bool:
"""Return true if the device is available and value has not expired."""
expire_after = self._config.get(CONF_EXPIRE_AFTER)
# mypy doesn't know about fget: https://github.com/python/mypy/issues/6185
return MqttAvailability.available.fget(self) and ( # type: ignore[attr-defined]
expire_after is None or not self._expired
)
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from karbor.common import constants
from karbor.context import RequestContext
from karbor import exception
from karbor.resource import Resource
from karbor.services.protection.bank_plugin import Bank
from karbor.services.protection.bank_plugin import BankPlugin
from karbor.services.protection.bank_plugin import BankSection
from karbor.services.protection import client_factory
from karbor.services.protection.protection_plugins.volume.\
volume_glance_plugin import VolumeGlanceProtectionPlugin
from karbor.services.protection.protection_plugins.volume import \
volume_glance_plugin_schemas
from karbor.tests import base
import mock
from oslo_config import cfg
from oslo_config import fixture
class FakeBankPlugin(BankPlugin):
def update_object(self, key, value, context=None):
return
def get_object(self, key, context=None):
return
def list_objects(self, prefix=None, limit=None, marker=None,
sort_dir=None, context=None):
return
def delete_object(self, key, context=None):
return
def get_owner_id(self, context=None):
return
fake_bank = Bank(FakeBankPlugin())
fake_bank_section = BankSection(bank=fake_bank, section="fake")
ResourceNode = collections.namedtuple(
"ResourceNode",
["value",
"child_nodes"]
)
Volume = collections.namedtuple(
"Volume",
["id", "status", "size"]
)
Snapshot = collections.namedtuple(
"Snapshot",
["id", "status", "size"]
)
Image = collections.namedtuple(
"Image",
["disk_format",
"container_format",
"status",
"id"]
)
def call_hooks(operation, checkpoint, resource, context, parameters, **kwargs):
def noop(*args, **kwargs):
pass
hooks = (
'on_prepare_begin',
'on_prepare_finish',
'on_main',
'on_complete',
)
for hook_name in hooks:
hook = getattr(operation, hook_name, noop)
hook(checkpoint, resource, context, parameters, **kwargs)
class FakeCheckpoint(object):
def __init__(self):
super(FakeCheckpoint, self).__init__()
self.bank_section = fake_bank_section
def get_resource_bank_section(self, resource_id=None):
return self.bank_section
class VolumeGlanceProtectionPluginTest(base.TestCase):
def setUp(self):
super(VolumeGlanceProtectionPluginTest, self).setUp()
plugin_config = cfg.ConfigOpts()
plugin_config_fixture = self.useFixture(fixture.Config(plugin_config))
plugin_config_fixture.load_raw_values(
group='volume_glance_plugin',
poll_interval=0,
backup_image_object_size=65536
)
self.plugin = VolumeGlanceProtectionPlugin(plugin_config)
cfg.CONF.set_default('glance_endpoint',
'http://127.0.0.1:9292',
'glance_client')
cfg.CONF.set_default('cinder_endpoint',
'http://127.0.0.1:8774/v2.1',
'cinder_client')
self.cntxt = RequestContext(user_id='demo',
project_id='abcd',
auth_token='efgh'
)
self.cinder_client = client_factory.ClientFactory.create_client(
"cinder", self.cntxt)
self.glance_client = client_factory.ClientFactory.create_client(
"glance", self.cntxt)
self.checkpoint = FakeCheckpoint()
def test_get_options_schema(self):
options_schema = self.plugin.get_options_schema(
constants.VOLUME_RESOURCE_TYPE)
self.assertEqual(options_schema,
volume_glance_plugin_schemas.OPTIONS_SCHEMA)
def test_get_restore_schema(self):
options_schema = self.plugin.get_restore_schema(
constants.VOLUME_RESOURCE_TYPE)
self.assertEqual(options_schema,
volume_glance_plugin_schemas.RESTORE_SCHEMA)
def test_get_saved_info_schema(self):
options_schema = self.plugin.get_saved_info_schema(
constants.VOLUME_RESOURCE_TYPE)
self.assertEqual(options_schema,
volume_glance_plugin_schemas.SAVED_INFO_SCHEMA)
@mock.patch('karbor.services.protection.protection_plugins'
'.utils.status_poll')
@mock.patch('karbor.services.protection.clients.glance.create')
@mock.patch('karbor.services.protection.clients.cinder.create')
def test_create_backup(self, mock_cinder_create,
mock_glance_create, mock_status_poll):
resource = Resource(id="123",
type=constants.VOLUME_RESOURCE_TYPE,
name='fake')
fake_bank_section.update_object = mock.MagicMock()
protect_operation = self.plugin.get_protect_operation(resource)
mock_cinder_create.return_value = self.cinder_client
mock_glance_create.return_value = self.glance_client
mock_status_poll.return_value = True
self.cinder_client.volume_snapshots.create = mock.MagicMock()
self.cinder_client.volume_snapshots.create.return_value = Snapshot(
id="1234",
status="available",
size='100000000'
)
self.cinder_client.volume_snapshots.get = mock.MagicMock()
self.cinder_client.volume_snapshots.get.return_value = Snapshot(
id="1234",
status="available",
size='100000000'
)
self.cinder_client.volumes.create = mock.MagicMock()
self.cinder_client.volumes.create.return_value = Volume(
id='2345',
status='available',
size=1
)
self.cinder_client.volumes.get = mock.MagicMock()
self.cinder_client.volumes.get.return_value = Volume(
id='2345',
status='available',
size=1
)
self.cinder_client.volumes.upload_to_image = mock.MagicMock()
self.cinder_client.volumes.upload_to_image.return_value = [202, {
'os-volume_upload_image': {
'image_id': "3456"
}
}]
self.glance_client.images.get = mock.MagicMock()
self.glance_client.images.return_value = Image(
disk_format="raw",
container_format="bare",
status="active",
id="3456"
)
fake_bank_section.update_object = mock.MagicMock()
self.glance_client.images.data = mock.MagicMock()
self.glance_client.images.data.return_value = []
mock_status_poll.return_value = True
self.cinder_client.volume_snapshots.delete = mock.MagicMock()
self.cinder_client.volumes.delete = mock.MagicMock()
self.glance_client.images.delete = mock.MagicMock()
call_hooks(protect_operation, self.checkpoint, resource, self.cntxt,
{})
self.cinder_client.volumes.upload_to_image.assert_called_with(
volume=Volume(id='2345', status='available', size=1),
force=True,
image_name='temporary_image_of_2345',
container_format="bare",
disk_format="raw",
visibility="private",
protected=False
)
def test_delete_backup(self):
resource = Resource(id="123",
type=constants.VOLUME_RESOURCE_TYPE,
name='fake')
fake_bank_section.list_objects = mock.MagicMock()
fake_bank_section.list_objects.return_value = ["data_1", "data_2"]
fake_bank_section.delete_object = mock.MagicMock()
delete_operation = self.plugin.get_delete_operation(resource)
call_hooks(delete_operation, self.checkpoint, resource, self.cntxt,
{})
def test_get_supported_resources_types(self):
types = self.plugin.get_supported_resources_types()
self.assertEqual([constants.VOLUME_RESOURCE_TYPE], types)
@mock.patch('karbor.services.protection.protection_plugins.utils.'
'update_resource_verify_result')
def test_verify_backup(self, mock_update_verify):
resource = Resource(id="123",
type=constants.VOLUME_RESOURCE_TYPE,
name='fake')
fake_bank_section.get_object = mock.MagicMock()
fake_bank_section.get_object.return_value = 'available'
verify_operation = self.plugin.get_verify_operation(resource)
call_hooks(verify_operation, self.checkpoint, resource, self.cntxt,
{})
mock_update_verify.assert_called_with(
None, resource.type, resource.id, 'available')
@mock.patch('karbor.services.protection.protection_plugins.utils.'
'update_resource_verify_result')
def test_verify_backup_with_error_status(self, mock_update_verify):
resource = Resource(id="123",
type=constants.VOLUME_RESOURCE_TYPE,
name='fake')
fake_bank_section.get_object = mock.MagicMock()
fake_bank_section.get_object.return_value = 'error'
verify_operation = self.plugin.get_verify_operation(resource)
self.assertRaises(
exception.VerifyResourceFailed, call_hooks, verify_operation,
self.checkpoint, resource, self.cntxt, {})
mock_update_verify.assert_called_with(
None, resource.type, resource.id, 'error',
'The status of volume backup status is error.')
|
|
# Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple shell that uses the IPython messaging system."""
# Override platform information.
import platform
platform.system = lambda: "pnacl"
platform.release = lambda: "chrome"
import time
import json
import logging
import sys
import Queue
import thread
stdin_input = Queue.Queue()
shell_input = Queue.Queue()
stdin_output = Queue.Queue()
shell_output = Queue.Queue()
iopub_output = Queue.Queue()
sys_stdout = sys.stdout
sys_stderr = sys.stderr
def emit(s):
print >> sys_stderr, "EMITTING: %s" % (s)
time.sleep(1)
import IPython
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
from IPython.utils.traitlets import Type, Dict, Instance
from IPython.core.displayhook import DisplayHook
from IPython.utils import py3compat
from IPython.utils.py3compat import builtin_mod
from IPython.utils.jsonutil import json_clean, encode_images
from IPython.core.displaypub import DisplayPublisher
from IPython.config.configurable import Configurable
# module defined in shell.cc for communicating via pepper API
from pyppapi import nacl_instance
def CreateMessage(msg_type, parent_header=None, content=None):
if parent_header is None:
parent_header = {}
if content is None:
content = {}
return {
'header': {'msg_type': msg_type},
'parent_header': parent_header,
'content': content,
'msg_type': msg_type,
}
class MsgOutStream(object):
"""Class to overrides stderr and stdout."""
def __init__(self, stream_name):
self._stream_name = stream_name
self._parent_header = {}
def SetParentHeader(self, parent_header):
self._parent_header = parent_header
def close(self):
pass
def flush(self):
pass
def write(self, string):
iopub_output.put(CreateMessage('stream', parent_header=self._parent_header,
content={'name': self._stream_name, 'data': string}))
def writelines(self, sequence):
for string in sequence:
self.write(string)
# override sys.stdout and sys.stderr to broadcast on iopub
stdout_stream = MsgOutStream('stdout')
stderr_stream = MsgOutStream('stderr')
sys.stdout = stdout_stream
sys.stderr = stderr_stream
class PepperShellDisplayHook(DisplayHook):
parent_header = Dict({})
def set_parent_header(self, parent_header):
"""Set the parent for outbound messages."""
self.parent_header = parent_header
def start_displayhook(self):
self.content = {}
def write_output_prompt(self):
self.content['execution_count'] = self.prompt_count
def write_format_data(self, format_dict, md_dict=None):
self.content['data'] = encode_images(format_dict)
self.content['metadata'] = md_dict
def finish_displayhook(self):
sys.stdout.flush()
sys.stderr.flush()
iopub_output.put(CreateMessage('pyout', parent_header=self.parent_header,
content=self.content))
self.content = None
class PepperDisplayPublisher(DisplayPublisher):
parent_header = Dict({})
def set_parent_header(self, parent_header):
self.parent_header = parent_header
def _flush_streams(self):
"""flush IO Streams prior to display"""
sys.stdout.flush()
sys.stderr.flush()
def publish(self, source, data, metadata=None):
self._flush_streams()
if metadata is None:
metadata = {}
self._validate_data(source, data, metadata)
content = {}
content['source'] = source
content['data'] = encode_images(data)
content['metadata'] = metadata
iopub_output.put(CreateMessage('display_data', content=json_clean(content),
parent_header=self.parent_header))
def clear_output(self, stdout=True, stderr=True, other=True):
content = dict(stdout=stdout, stderr=stderr, other=other)
if stdout:
sys.stdout.write('\r')
if stderr:
sys.stderr.write('\r')
self._flush_streams()
iopub_output.put(CreateMessage('clear_output', content=content,
parent_header=self.parent_header))
class PepperInteractiveShell(InteractiveShell):
"""A subclass of InteractiveShell for the Pepper Messagin API."""
displayhook_class = Type(PepperShellDisplayHook)
display_pub_class = Type(PepperDisplayPublisher)
@staticmethod
def enable_gui(gui):
pass
InteractiveShellABC.register(PepperInteractiveShell)
class PepperKernel(Configurable):
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
shell_class = Type(PepperInteractiveShell)
def __init__(self):
self.shell = self.shell_class.instance(parent=self)
self.shell.run_cell("""
import os
matplotlib_config_dir = '/mplconfigdir'
os.environ['XDG_CONFIG_HOME'] = matplotlib_config_dir
os.environ['TMP'] = ''
import matplotlib
import matplotlib.cbook
""")
shell = PepperKernel().shell
# Taken from IPython 2.x branch, IPython/kernel/zmq/ipykernel.py
def _complete(msg):
c = msg['content']
try:
cpos = int(c['cursor_pos'])
except:
# If we don't get something that we can convert to an integer, at
# least attempt the completion guessing the cursor is at the end of
# the text, if there's any, and otherwise of the line
cpos = len(c['text'])
if cpos==0:
cpos = len(c['line'])
return shell.complete(c['text'], c['line'], cpos)
# Special message to indicate the NaCl kernel is ready.
iopub_output.put(CreateMessage('status', content={'execution_state': 'nacl_ready'}))
def _no_raw_input(self):
"""Raise StdinNotImplentedError if active frontend doesn't support
stdin."""
raise StdinNotImplementedError("raw_input was called, but this "
"frontend does not support stdin.")
def _raw_input(prompt, parent_header):
# Flush output before making the request.
sys.stderr.flush()
sys.stdout.flush()
# flush the stdin socket, to purge stale replies
while True:
try:
stdin_input.get_nowait()
except Queue.Empty:
break
# Send the input request.
content = json_clean(dict(prompt=prompt))
stdin_output.put(CreateMessage('input_request', content=content,
parent_header=parent_header))
# Await a response.
while True:
try:
reply = stdin_input.get()
except Exception:
print "Invalid Message"
except KeyboardInterrupt:
# re-raise KeyboardInterrupt, to truncate traceback
raise KeyboardInterrupt
else:
break
try:
value = py3compat.unicode_to_str(reply['content']['value'])
except:
print "Got bad raw_input reply: "
print reply
value = ''
if value == '\x04':
# EOF
raise EOFError
return value
def main_loop():
execution_count = 1
while 1:
iopub_output.put(CreateMessage('status', content={'execution_state': 'idle'}))
msg = shell_input.get()
iopub_output.put(CreateMessage('status', content={'execution_state': 'busy'}))
if not 'header' in msg:
continue
request_header = msg['header']
if not 'msg_type' in request_header:
continue
msg_type = request_header['msg_type']
if msg_type == 'execute_request':
try:
content = msg[u'content']
code = content[u'code']
silent = content[u'silent']
store_history = content.get(u'store_history', not silent)
except:
self.log.error("Got bad msg: ")
self.log.error("%s", msg)
continue
# Replace raw_input. Note that is not sufficient to replace
# raw_input in the user namespace.
if content.get('allow_stdin', False):
raw_input = lambda prompt='': _raw_input(prompt, request_header)
input = lambda prompt='': eval(raw_input(prompt))
else:
raw_input = input = lambda prompt='' : _no_raw_input()
if py3compat.PY3:
_sys_raw_input = builtin_mod.input
builtin_mod.input = raw_input
else:
_sys_raw_input = builtin_mod.raw_input
_sys_eval_input = builtin_mod.input
builtin_mod.raw_input = raw_input
builtin_mod.input = input
# Let output streams know which message the output is for
stdout_stream.SetParentHeader(request_header)
stderr_stream.SetParentHeader(request_header)
shell.displayhook.set_parent_header(request_header)
shell.display_pub.set_parent_header(request_header)
status = 'ok'
content = {}
try:
shell.run_cell(msg['content']['code'],
store_history=store_history,
silent=silent)
except Exception, ex:
status = 'error'
logging.exception('Exception occured while running cell')
finally:
# Restore raw_input.
if py3compat.PY3:
builtin_mod.input = _sys_raw_input
else:
builtin_mod.raw_input = _sys_raw_input
builtin_mod.input = _sys_eval_input
content = {'status': status,
'execution_count': execution_count}
if status == 'ok':
content['payload'] = []
content['user_variables'] = {}
content['user_expressions'] = {}
elif status == 'error':
content['ename'] = type(ex).__name__
content['evalue'] = str(ex)
content['traceback'] = []
execution_count += 1
if status == 'error':
iopub_output.put(CreateMessage('pyerr', parent_header=request_header,
content={
'execution_count': execution_count,
'ename': type(ex).__name__,
'evalue': str(ex),
'traceback': []
}
))
shell_output.put(CreateMessage('execute_reply', parent_header=request_header,
content=content))
elif msg_type == 'complete_request':
# Taken from IPython 2.x branch, IPython/kernel/zmq/ipykernel.py
txt, matches = _complete(msg)
matches = {'matches' : matches,
'matched_text' : txt,
'status' : 'ok'}
matches = json_clean(matches)
shell_output.put(CreateMessage('complete_reply',
parent_header = request_header,
content = matches))
elif msg_type == 'object_info_request':
# Taken from IPython 2.x branch, IPython/kernel/zmq/ipykernel.py
content = msg['content']
object_info = shell.object_inspect(content['oname'],
detail_level = content.get('detail_level', 0))
# Before we send this object over, we scrub it for JSON usage
oinfo = json_clean(object_info)
shell_output.put(CreateMessage('object_info_reply',
parent_header = request_header,
content = oinfo))
elif msg_type == 'restart':
# break out of this loop, ending this program.
# The main event loop in shell.cc will then
# run this program again.
break
elif msg_type == 'kill':
# Raise an exception so that the function
# running this script will return -1, resulting
# in no restart of this script.
raise RuntimeError
thread.start_new_thread(main_loop, ())
def deal_message(msg):
channel = msg['stream']
content = json.loads(msg['json'])
queues = {'shell': shell_input, 'stdin': stdin_input}
queue = queues[channel]
queue.put(content)
def send_message(stream, msg):
nacl_instance.send_raw_object({
'stream': stream,
'json': json.dumps(msg)
})
while 1:
msg = nacl_instance.wait_for_message(timeout=1, sleeptime=10000)
try:
deal_message(msg)
except:
pass
output_streams = [
(stdin_output, 'stdin'),
(shell_output, 'shell'),
(iopub_output, 'iopub')
]
for msg_queue, stream in output_streams:
msg = None
try:
msg = msg_queue.get_nowait()
send_message(stream, msg)
except Queue.Empty:
pass
|
|
#!/usr/bin/env python
""" md5s3stash
content addressable storage in AWS S3
"""
import sys
import os
import argparse
import tempfile
import urllib.request, urllib.error, urllib.parse
import urllib.request, urllib.parse, urllib.error
import urllib.parse
import base64
import logging
import hashlib
import basin
import boto
import magic
from PIL import Image
from collections import namedtuple
import re
regex_s3 = re.compile(r's3.*amazonaws.com')
def main(argv=None):
parser = argparse.ArgumentParser(
description='content addressable storage in AWS S3')
parser.add_argument('url', nargs='+',
help='URL or path of source file to stash')
parser.add_argument('-b', '--bucket_base', nargs="?",
help='this must be a unique name in all of AWS S3')
parser.add_argument('-s', '--bucket_scheme', nargs="?",
default="simple", choices=['simple', 'multivalue'],
help='this must be a unique name in all of AWS S3')
parser.add_argument(
'-t', '--tempdir',
required=False,
help="if your files might be large, make sure this is on a big disk"
)
parser.add_argument(
'-w', '--warnings',
default=False,
help='show python `DeprecationWarning`s supressed by default',
required=False,
action='store_true',
)
parser.add_argument('--loglevel', default='ERROR', required=False)
parser.add_argument('-u', '--username', required=False,
help='username for downloads requiring BasicAuth')
parser.add_argument('-p', '--password', required=False,
help='password for downloads requiring BasicAuth')
if argv is None:
argv = parser.parse_args()
if argv.bucket_base:
bucket_base = argv.bucket_base
else:
assert 'BUCKET_BASE' in os.environ, "`-b` or `BUCKET_BASE` must be set"
bucket_base = os.environ['BUCKET_BASE']
if not argv.warnings:
# supress warnings
# http://stackoverflow.com/a/2047600/1763984
import warnings
warnings.simplefilter("ignore", DeprecationWarning)
if argv.tempdir:
tempfile.tempdir = argv.tempdir
auth = None
if argv.username:
auth = (argv.username, argv.password)
# set debugging level
numeric_level = getattr(logging, argv.loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % argv.loglevel)
logging.basicConfig(level=numeric_level, )
# if being used in a library, probably want to be able to recycle
# connection?
conn = boto.connect_s3()
for url in argv.url:
print(("{0}\t{1}\t{2}\t{3}".format(
*md5s3stash(url, bucket_base, conn, url_auth=auth, bucket_scheme=argv.bucket_scheme)
)))
def md5s3stash(
url,
bucket_base,
conn=None,
url_auth=None,
url_cache={},
hash_cache={},
bucket_scheme='simple'
):
""" stash a file at `url` in the named `bucket_base` ,
`conn` is an optional boto.connect_s3()
`url_auth` is optional Basic auth ('<username>', '<password'>) tuple
to use if the url to download requires authentication.
`url_cache` is an object with a dict interface, keyed on url
url_cache[url] = { md5: ..., If-None-Match: etag, If-Modified-Since: date }
`hash_cache` is an obhect with dict interface, keyed on md5
hash_cache[md5] = ( s3_url, mime_type, dimensions )
`bucket_scheme` is text string 'simple' or 'multibucket'
"""
StashReport = namedtuple('StashReport', 'url, md5, s3_url, mime_type, dimensions')
(file_path, md5, mime_type) = checkChunks(url, url_auth, url_cache)
try:
return StashReport(url, md5, *hash_cache[md5])
except KeyError:
pass
s3_url = md5_to_s3_url(md5, bucket_base, bucket_scheme=bucket_scheme)
if conn is None:
conn = boto.connect_s3()
s3move(file_path, s3_url, mime_type, conn)
(mime, dimensions) = image_info(file_path)
os.remove(file_path) # safer than rmtree
hash_cache[md5] = (s3_url, mime, dimensions)
report = StashReport(url, md5, *hash_cache[md5])
logging.getLogger('MD5S3:stash').info(report)
return report
# think about refactoring the next two functions
def md5_to_s3_url(md5, bucket_base, bucket_scheme='multibucket'):
""" calculate the s3 URL given an md5 and an bucket_base """
if bucket_scheme == 'simple':
url = "s3://{0}/{1}".format(
bucket_base,
md5
)
elif bucket_scheme == 'multibucket':
url = "s3://{0}.{1}/{2}".format(
md5_to_bucket_shard(md5),
bucket_base,
md5
)
return url
def md5_to_http_url(md5, bucket_base, bucket_scheme='multibucket', s3_endpoint='s3.amazonaws.com'):
""" calculate the http URL given an md5 and an bucket_base """
if bucket_scheme == 'simple':
url = "http://{0}/{1}/{2}".format(
s3_endpoint,
bucket_base,
md5
)
elif bucket_scheme == 'multibucket':
url = "http://{1}.{2}.{0}/{3}".format(
s3_endpoint,
md5_to_bucket_shard(md5),
bucket_base,
md5
)
return url
def md5_to_bucket_shard(md5):
""" calculate the shard label of the bucket name from md5 """
# "Consider utilizing multiple buckets that start with different
# alphanumeric characters. This will ensure a degree of partitioning
# from the start. The higher your volume of concurrent PUT and
# GET requests, the more impact this will likely have."
# -- http://aws.amazon.com/articles/1904
# "Bucket names must be a series of one or more labels. Adjacent
# labels are separated by a single period (.). [...] Each label must
# start and end with a lowercase letter or a number. "
# -- http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html
# see also: http://en.wikipedia.org/wiki/Base_36
ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyz"
# http://stats.stackexchange.com/a/70884/14900
# take the first two digits of the hash and turn that into an inteter
# this should be evenly distributed
int_value = int(md5[0], 16)+10*int(md5[1], 16)
# divide by the length of the alphabet and take the remainder
bucket = int_value % len(ALPHABET)
return basin.encode(ALPHABET, bucket)
def is_s3_url(url):
'''For s3 urls, if you send http authentication headers, S3 will
send a "400 Bad Request" in response.
Now look for s3*.amazonaws.com
'''
# moving to OR this will be s3-us-west-2.amazonaws.com
match = regex_s3.search(url)
return True if match else False
def urlopen_with_auth(url, auth=None, cache={}):
'''Use urllib2 to open url if the auth is specified.
auth is tuple of (username, password)
'''
opener = urllib.request.build_opener(DefaultErrorHandler())
req = urllib.request.Request(url)
p = urllib.parse.urlparse(url)
# try to set headers for conditional get request
try:
here = cache[url]
if 'If-None-Match' in here:
req.add_header('If-None-Match', cache[url]['If-None-Match'],)
if 'If-Modified-Since' in here:
req.add_header('If-Modified-Since', cache[url]['If-Modified-Since'],)
except KeyError:
pass
if not auth or is_s3_url(url):
if p.scheme not in ['http', 'https']:
return urllib.request.urlopen(url) # urllib works with normal file paths
else:
# make sure https
if p.scheme != 'https':
raise urllib.error.URLError('Basic auth not over https is bad idea! \
scheme:{0}'.format(p.scheme))
# Need to add header so it gets sent with first request,
# else redirected to shib
b64authstr = base64.b64encode('{0}:{1}'.format(*auth))
req.add_header('Authorization', 'Basic {0}'.format(b64authstr))
# return urllib2.urlopen(req)
return opener.open(req)
def checkChunks(url, auth=None, cache={}):
"""
Helper to download large files the only arg is a url this file
will go to a temp directory the file will also be downloaded in
chunks and md5 checksum is returned
based on downloadChunks@https://gist.github.com/gourneau/1430932
and http://www.pythoncentral.io/hashing-files-with-python/
"""
temp_file = tempfile.NamedTemporaryFile(delete=False, prefix='md5s3_')
logging.getLogger('MD5S3').info("temp file path %s" % temp_file.name)
hasher = hashlib.new('md5')
BLOCKSIZE = 1024 * hasher.block_size
try:
req = urlopen_with_auth(url, auth=auth, cache=cache)
thisurl = cache.get(url, dict())
if req.getcode() == 304:
return None, thisurl['md5'], None
mime_type = req.info()['Content-type']
# record these headers, they will let us pretend like we are a cacheing
# proxy server, and send conditional GETs next time we see this file
etag = req.info().get('ETag', None);
if etag:
thisurl['If-None-Match'] = etag
lmod = req.info().get('Last-Modified', None);
if lmod:
thisurl['If-Modified-Since'] = lmod
downloaded = 0
with temp_file:
while True:
chunk = req.read(BLOCKSIZE)
hasher.update(chunk)
downloaded += len(chunk)
if not chunk:
break
temp_file.write(chunk)
except urllib.error.HTTPError as e:
print("HTTP Error:", e.code, url)
return False
except urllib.error.URLError as e:
print("URL Error:", e.reason, url)
return False
md5 = hasher.hexdigest()
thisurl['md5'] = md5
cache[url] = thisurl
return temp_file.name, md5, mime_type
def s3move(place1, place2, mime, s3):
l = logging.getLogger('MD5S3:s3move')
l.debug({
'place1': place1,
'place2': place2,
'mime': mime,
's3': s3,
})
parts = urllib.parse.urlsplit(place2)
# SplitResult(scheme='s3', netloc='test.pdf', path='/dkd', query=''
# , fragment='')
try:
bucket = s3.get_bucket(parts.netloc, validate=False)
l.debug('bucket exists')
except boto.exception.S3ResponseError:
bucket = s3.create_bucket(parts.netloc)
l.debug('bucket created')
if not(bucket.get_key(parts.path, validate=False)):
key = bucket.new_key(parts.path)
# metadata has to be set before setting contents/creating object.
# See https://gist.github.com/garnaat/1791086
key.set_metadata("Content-Type", mime)
key.set_contents_from_filename(place1)
# key.set_acl('public-read')
l.debug('file sent to s3')
else:
l.info('key existed already')
def image_info(filepath):
''' get image info
`filepath` path to a file
returns
a tuple of two values
1. mime/type if an image; otherwise None
2. a tuple of (height, width) if an image; otherwise (0,0)
'''
try:
return (
magic.Magic(mime=True).from_file(filepath),
Image.open(filepath).size
)
except IOError as e:
if not e.message.startswith('cannot identify image file'):
raise e
else:
return (None, (0,0))
# example 11.7 Defining URL handlers
# http://www.diveintopython.net/http_web_services/etags.html
class DefaultErrorHandler(urllib.request.HTTPDefaultErrorHandler):
def http_error_304(self, req, fp, code, msg, headers):
result = urllib.error.HTTPError(
req.get_full_url(), code, msg, headers, fp)
result.status = code
return result
# main() idiom for importing into REPL for debugging
if __name__ == "__main__":
sys.exit(main())
"""
Copyright (c) 2015, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
|
|
"""AVL Binary Search Tree module."""
import math
import shutil
class BinaryTree(object):
"""
AVL Binary search tree.
"""
def __init__(self, iterable=None, autobalance=True):
"""Initialize bst with root and size."""
self.root = None
self._size = 0
self.autobalance = autobalance
self.rotations = 0
if iterable:
if isinstance(iterable, range):
iterable = treegen(iterable)
try:
for val in iterable:
self.insert(val)
except:
raise Exception
def insert(self, val):
"""Insert a new node into the bst."""
cur = self.root
if cur is None:
self.root = BinaryTreeNode(val)
self._size += 1
else:
while True:
if val == cur.val:
return
child, left_or_right = cur.left_or_right(val)
if child is None:
self._size += 1
new_node = BinaryTreeNode(val, parent=cur)
setattr(cur, left_or_right, new_node)
self._bubble_up_depth_from(cur)
if self.autobalance:
r_root = self._find_unbalanced(new_node)
if r_root:
self._rebalance(r_root)
break
else:
cur = child
def search(self, val):
"""Return node with value val if it exists, otherwise None."""
cur_node = self.root
while cur_node:
if cur_node.val == val:
return cur_node
cur_node, trash = cur_node.left_or_right(val)
def delete(self, val, error=False):
"""Delete a node and reorganize tree as needed."""
to_d = self.search(val)
if error and to_d is None:
raise ValueError('Not in tree.')
if to_d:
replacement = None
check_from = to_d.parent
self._size -= 1
if to_d.is_leaf():
to_d.set_parents_child(None)
else:
children = to_d.children()
if len(children) == 1:
child = children[0]
child.parent = to_d.parent
to_d.set_parents_child(child)
replacement = child
else:
lmost = self.node_furthest('left', from_=to_d.right)
replacement = lmost
if lmost.parent is to_d:
check_from = lmost
else:
check_from = lmost.parent
if lmost.right:
lmost.right.parent = lmost.parent
lmost.set_parents_child(lmost.right)
if to_d.right:
to_d.right.parent = lmost
if to_d.left:
to_d.left.parent = lmost
lmost.right = to_d.right
lmost.left = to_d.left
to_d.set_parents_child(lmost)
lmost.parent = to_d.parent
if to_d.parent is None:
self.root = replacement
if check_from:
self._bubble_up_depth_from(check_from)
if self.autobalance:
r_root = self._find_unbalanced(check_from)
if r_root:
self._rebalance(r_root)
def contains(self, val):
"""Return whether val in bst."""
return bool(self.search(val))
def balance(self, from_='root'):
"""Return left vs right balance from a node on the bst."""
if from_ == 'root':
from_ = self.root
if from_ is None:
return 0
left_depth = from_.left.depth if from_.left else 0
right_depth = from_.right.depth if from_.right else 0
return left_depth - right_depth
def pre_order(self, start='root', attr='val'):
"""Return a generator of pre-order traversal."""
if start == 'root':
start = self.root
if start:
yield getattr(start, attr) if attr else start
for node in start.children():
for child in self.pre_order(start=node, attr=attr):
yield child
def post_order(self, start='root', attr='val'):
"""Return a generator of post-order traversal."""
if start == 'root':
start = self.root
if start:
for node in start.children():
for child in self.post_order(start=node, attr=attr):
yield child
yield getattr(start, attr) if attr else start
def in_order(self, start='root', attr='val'):
"""Return a generator of tree's nodes in order."""
if start == 'root':
start = self.root
if start:
for child in self.in_order(start=start.left, attr=attr):
yield child
yield getattr(start, attr) if attr else start
for child in self.in_order(start=start.right, attr=attr):
yield child
def breadth_first(self, start='root', attr='val'):
"""Return generator of breadth first traversal of tree rooted at root."""
if start == 'root':
start = self.root
parent_queue = []
if start:
parent_queue.append(start)
while parent_queue:
parent = parent_queue.pop(0)
yield getattr(parent, attr) if attr else parent
parent_queue.extend(parent.children())
def _rebalance(self, node):
"""Rebalance a tree starting at node."""
self.rotations += 1
if self.balance(from_=node) < 0:
if self.balance(from_=node.right) <= 0:
r_root = self._lr(node)
else:
r_root = self._rlr(node)
else:
if self.balance(from_=node.left) >= 0:
r_root = self._rr(node)
else:
r_root = self._lrr(node)
self._reset_depths_from(r_root)
self._bubble_up_depth_from(r_root)
next_node = self._find_unbalanced(node)
if next_node:
self._rebalance(next_node)
def _find_unbalanced(self, node):
"""Bubble up from a node and check for unbalanced trees."""
while node:
if abs(self.balance(from_=node)) > 1:
return node
node = node.parent
def _lr(self, old_root):
r"""
Perform a left rotation (lr) around the old_root (old_root).
3 5
\ / \
5 --> 3 9
/ \ \
4 9 4
"""
new_root = old_root.right
old_root.right = new_root.left
if new_root.left:
new_root.left.parent = old_root
if old_root.parent is None:
self.root = new_root
old_root.set_parents_child(new_root)
new_root.parent = old_root.parent
new_root.left = old_root
old_root.parent = new_root
return new_root
def _rr(self, old_root):
r"""
Perform a right rotation (rl) around the rotation_root (r_root).
7 5
/ / \
5 --> 1 7
/ \ /
1 6 6
"""
new_root = old_root.left
old_root.left = new_root.right
if new_root.right:
new_root.right.parent = old_root
if old_root.parent is None:
self.root = new_root
old_root.set_parents_child(new_root)
new_root.right = old_root
new_root.parent = old_root.parent
old_root.parent = new_root
return new_root
def _lrr(self, old_root):
"""
Perform left-right rotation.
Equivalent to left rotation on old_root.left then right rotation
on old_root, but faster.
"""
left_root = old_root.left
new_root = left_root.right
left_root.right = new_root.left
old_root.left = new_root.right
if new_root.left:
new_root.left.parent = left_root
if new_root.right:
new_root.right.parent = old_root
old_root.set_parents_child(new_root)
if old_root.parent is None:
self.root = new_root
new_root.parent = old_root.parent
new_root.right = old_root
new_root.left = left_root
old_root.parent = new_root
left_root.parent = new_root
return new_root
def _rlr(self, old_root):
"""
Perform right-left rotation.
Equivalent to right rotation on old_root.right then left rotation
on old_root, but faster.
"""
right_root = old_root.right
new_root = right_root.left
right_root.left = new_root.right
old_root.right = new_root.left
if new_root.right:
new_root.right.parent = right_root
if new_root.left:
new_root.left.parent = old_root
old_root.set_parents_child(new_root)
if old_root.parent is None:
self.root = new_root
new_root.parent = old_root.parent
new_root.left = old_root
new_root.right = right_root
old_root.parent = new_root
right_root.parent = new_root
return new_root
def _bubble_up_depth_from(self, node):
children = node.children()
node.depth = 1
if children:
node.depth += max(c.depth for c in children)
if node.parent:
self._bubble_up_depth_from(node.parent)
def _reset_depths_from(self, node):
children = node.children()
node.depth = 1
if children:
node.depth += max(self._reset_depths_from(c) for c in children)
return node.depth
def node_furthest(self, direction, from_='root'):
"""Traverse as far as possible in direction from from_."""
if from_ == 'root':
from_ = self.root
while True:
nxt = getattr(from_, direction)
if nxt is None:
break
from_ = nxt
return from_
def __len__(self):
"""Return number of nodes in bst."""
return self._size
def __repr__(self):
"""Return representation of tree instance."""
instance, iD = super(BinaryTree, self).__repr__().split('object')
return '''%s {
size: %d,
depth: %d,
rotations: %d
} %s
''' % (
instance,
len(self),
0 if not self.root else self.root.depth,
self.rotations,
iD
)
def __str__(self):
"""Return string depiction of tree."""
if self.root is None:
return 'Empty'
head = self.display_rows_from(self.root, 5, lambda n: n.val)
if self.root.depth > 5:
head += '\n\t...'
return head
class BinaryTreeNode(object):
"""Node object with helper methods for use in a Binary Tree."""
def __init__(self, val, left=None, right=None, parent=None):
"""Set attributes on node object."""
self.val = val
self.left = left
self.right = right
self.parent = parent
self.depth = 1
@property
def balance(self):
"""Balance from node."""
right = self.right.depth if self.right else 0
left = self.left.depth if self.left else 0
return right - left
def is_leaf(self):
"""Return whether node has no children."""
return not (self.right or self.left)
def children(self):
"""Return non-none children of node."""
return [node for node in [self.left, self.right] if node]
def left_or_right(self, val):
"""Compare node to a value and return which path to take."""
if val < self.val:
return self.left, 'left'
return self.right, 'right'
def set_parents_child(self, new_child):
"""Reassign parent's pointer to this node to new_child node."""
if self.parent:
if self.parent.left is self:
self.parent.left = new_child
else:
self.parent.right = new_child
def display_rows_from(root, num_rows, node_func, max_len=4, args=()):
"""Return printable tree."""
width = shutil.get_terminal_size((96, 20)).columns
rows = [[root]]
for i in range(num_rows - 1):
rows.append(children_of(rows[i]))
if not any(rows[-1]):
break
vals = []
for row in rows:
vals.append([node_func(node, *args) if node else '_' for node in row])
return stringify_rows(vals, width, max_len)
def display(binary_tree, num_rows=5, node_func=None, args=(), attrs=None):
"""Interactive print loop."""
if binary_tree.root is None:
print('Empty')
return
import curses
stdscr = curses.initscr()
stdscr.keypad(True)
curses.nonl()
if node_func is None:
if attrs is None:
attrs = ['val']
elif isinstance(attrs, str):
attrs = [attrs]
node_func = attrs_func
args = [attrs]
furthest_right = binary_tree.node_furthest('right')
root = binary_tree.root
top, prev_inp = 0, ''
try:
while True:
max_len = len(str(node_func(furthest_right, *args)))
hunk = display_rows_from(root,
num_rows,
node_func,
max_len=max_len,
args=args)
stdscr.clear()
stdscr.addstr(hunk)
valid_moves = ''
if root.left and root.left.depth > (num_rows - 2):
valid_moves += 'a'
if root.right and root.right.depth > (num_rows - 2):
valid_moves += 'd'
if top != 0:
valid_moves += 'w'
quits = {'q', 'quit', 'exit'}
stdscr.addstr('q(uit)/attr/' + valid_moves + ': ')
while True:
inp = stdscr.getstr().decode()
if inp == '':
inp = prev_inp
else:
prev_inp = inp
if inp in set(valid_moves) | quits:
break
else:
try:
for attr in inp.split(':'):
getr(root, attr)
break
except AttributeError:
pass
if inp in quits:
break
if inp in valid_moves:
top += {'a': 1, 'w': -1, 'd': 1}[inp]
root = {'a': root.left,
'w': root.parent,
'd': root.right}[inp]
else:
inp_list = inp.split(':')
node_func = attrs_func
args = [inp_list]
except KeyboardInterrupt:
pass
stdscr.keypad(False)
curses.endwin()
def stringify_rows(rows, width, max_len):
"""Create tree string."""
funcs = [math.floor, math.ceil]
width += width % 2
num_per = 1
res = []
for row in rows:
rowstr = ''
each_gets = width / num_per
split_len = (width - (max_len * num_per)) / (num_per * 2)
for x in range(num_per):
rowstr += str(row[x]).center(max_len).join(' ' * funcs[x % 2](split_len) for x in range(2))
each_currently_getting = len(rowstr) / (x + 1)
if each_gets < each_currently_getting:
rowstr = rowstr[:-1]
elif each_gets > each_currently_getting:
rowstr += ' '
res.append(rowstr)
num_per *= 2
res = '\n'.join(res)
return res
def children_of(nodes):
"""Return children of a list of nodes."""
res = []
for node in nodes:
if node:
res += [node.left, node.right]
else:
res += [None, None]
return res
def attrs_func(node, attrs):
"""Get all attrs of a node, separated by colons."""
res = ('{}:' * len(attrs)).format(*[getr(node, a) for a in attrs])[:-1]
return res
def getr(obj, toget):
"""Getattr with dots."""
res = obj
for attr in toget.split('.'):
res = getattr(res, attr)
if res is None:
break
return res
def treegen(iterable):
"""Yield items from sorted iterable to build tree balanced."""
iterable = sorted(iterable)
if len(iterable) < 2:
for x in iterable:
yield x
else:
h = len(iterable) // 2
l, m, r = treegen(iterable[:h]), iterable[h], treegen(iterable[h + 1:])
yield m
el, ri = True, True
while el or ri:
try:
yield next(l)
except:
el = False
try:
yield next(r)
except:
ri = False
|
|
# Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from cassandra.policies import WhiteListRoundRobinPolicy, FallthroughRetryPolicy
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from cassandra.query import SimpleStatement
from cassandra import ConsistencyLevel, WriteTimeout, Unavailable, ReadTimeout
from cassandra.protocol import SyntaxException
from cassandra.cluster import Cluster, NoHostAvailable
from tests.integration import get_cluster, get_node, use_singledc, PROTOCOL_VERSION, execute_until_pass
from greplin import scales
from tests.integration import BasicSharedKeyspaceUnitTestCaseWTable, BasicExistingKeyspaceUnitTestCase
def setup_module():
use_singledc()
class MetricsTests(unittest.TestCase):
def setUp(self):
contact_point = ['127.0.0.2']
self.cluster = Cluster(contact_points=contact_point, metrics_enabled=True, protocol_version=PROTOCOL_VERSION,
load_balancing_policy=WhiteListRoundRobinPolicy(contact_point),
default_retry_policy=FallthroughRetryPolicy())
self.session = self.cluster.connect("test3rf", wait_for_all_pools=True)
def tearDown(self):
self.cluster.shutdown()
def test_connection_error(self):
"""
Trigger and ensure connection_errors are counted
Stop all node with the driver knowing about the "DOWN" states.
"""
# Test writes
for i in range(0, 100):
self.session.execute_async("INSERT INTO test (k, v) VALUES ({0}, {1})".format(i, i))
# Stop the cluster
get_cluster().stop(wait=True, gently=False)
try:
# Ensure the nodes are actually down
query = SimpleStatement("SELECT * FROM test", consistency_level=ConsistencyLevel.ALL)
with self.assertRaises(NoHostAvailable):
self.session.execute(query)
finally:
get_cluster().start(wait_for_binary_proto=True, wait_other_notice=True)
# Give some time for the cluster to come back up, for the next test
time.sleep(5)
self.assertGreater(self.cluster.metrics.stats.connection_errors, 0)
def test_write_timeout(self):
"""
Trigger and ensure write_timeouts are counted
Write a key, value pair. Pause a node without the coordinator node knowing about the "DOWN" state.
Attempt a write at cl.ALL and receive a WriteTimeout.
"""
# Test write
self.session.execute("INSERT INTO test (k, v) VALUES (1, 1)")
# Assert read
query = SimpleStatement("SELECT * FROM test WHERE k=1", consistency_level=ConsistencyLevel.ALL)
results = execute_until_pass(self.session, query)
self.assertTrue(results)
# Pause node so it shows as unreachable to coordinator
get_node(1).pause()
try:
# Test write
query = SimpleStatement("INSERT INTO test (k, v) VALUES (2, 2)", consistency_level=ConsistencyLevel.ALL)
with self.assertRaises(WriteTimeout):
self.session.execute(query, timeout=None)
self.assertEqual(1, self.cluster.metrics.stats.write_timeouts)
finally:
get_node(1).resume()
def test_read_timeout(self):
"""
Trigger and ensure read_timeouts are counted
Write a key, value pair. Pause a node without the coordinator node knowing about the "DOWN" state.
Attempt a read at cl.ALL and receive a ReadTimeout.
"""
# Test write
self.session.execute("INSERT INTO test (k, v) VALUES (1, 1)")
# Assert read
query = SimpleStatement("SELECT * FROM test WHERE k=1", consistency_level=ConsistencyLevel.ALL)
results = execute_until_pass(self.session, query)
self.assertTrue(results)
# Pause node so it shows as unreachable to coordinator
get_node(1).pause()
try:
# Test read
query = SimpleStatement("SELECT * FROM test", consistency_level=ConsistencyLevel.ALL)
with self.assertRaises(ReadTimeout):
self.session.execute(query, timeout=None)
self.assertEqual(1, self.cluster.metrics.stats.read_timeouts)
finally:
get_node(1).resume()
def test_unavailable(self):
"""
Trigger and ensure unavailables are counted
Write a key, value pair. Stop a node with the coordinator node knowing about the "DOWN" state.
Attempt an insert/read at cl.ALL and receive a Unavailable Exception.
"""
# Test write
self.session.execute("INSERT INTO test (k, v) VALUES (1, 1)")
# Assert read
query = SimpleStatement("SELECT * FROM test WHERE k=1", consistency_level=ConsistencyLevel.ALL)
results = execute_until_pass(self.session, query)
self.assertTrue(results)
# Stop node gracefully
get_node(1).stop(wait=True, wait_other_notice=True)
try:
# Test write
query = SimpleStatement("INSERT INTO test (k, v) VALUES (2, 2)", consistency_level=ConsistencyLevel.ALL)
with self.assertRaises(Unavailable):
self.session.execute(query)
self.assertEqual(self.cluster.metrics.stats.unavailables, 1)
# Test write
query = SimpleStatement("SELECT * FROM test", consistency_level=ConsistencyLevel.ALL)
with self.assertRaises(Unavailable):
self.session.execute(query, timeout=None)
self.assertEqual(self.cluster.metrics.stats.unavailables, 2)
finally:
get_node(1).start(wait_other_notice=True, wait_for_binary_proto=True)
# Give some time for the cluster to come back up, for the next test
time.sleep(5)
self.cluster.shutdown()
# def test_other_error(self):
# # TODO: Bootstrapping or Overloaded cases
# pass
#
# def test_ignore(self):
# # TODO: Look for ways to generate ignores
# pass
#
# def test_retry(self):
# # TODO: Look for ways to generate retries
# pass
class MetricsNamespaceTest(BasicSharedKeyspaceUnitTestCaseWTable):
def test_metrics_per_cluster(self):
"""
Test to validate that metrics can be scopped to invdividual clusters
@since 3.6.0
@jira_ticket PYTHON-561
@expected_result metrics should be scopped to a cluster level
@test_category metrics
"""
cluster2 = Cluster(metrics_enabled=True, protocol_version=PROTOCOL_VERSION,
default_retry_policy=FallthroughRetryPolicy())
cluster2.connect(self.ks_name, wait_for_all_pools=True)
query = SimpleStatement("SELECT * FROM {0}.{0}".format(self.ks_name), consistency_level=ConsistencyLevel.ALL)
self.session.execute(query)
# Pause node so it shows as unreachable to coordinator
get_node(1).pause()
try:
# Test write
query = SimpleStatement("INSERT INTO {0}.{0} (k, v) VALUES (2, 2)".format(self.ks_name), consistency_level=ConsistencyLevel.ALL)
with self.assertRaises(WriteTimeout):
self.session.execute(query, timeout=None)
finally:
get_node(1).resume()
# Change the scales stats_name of the cluster2
cluster2.metrics.set_stats_name('cluster2-metrics')
stats_cluster1 = self.cluster.metrics.get_stats()
stats_cluster2 = cluster2.metrics.get_stats()
# Test direct access to stats
self.assertEqual(1, self.cluster.metrics.stats.write_timeouts)
self.assertEqual(0, cluster2.metrics.stats.write_timeouts)
# Test direct access to a child stats
self.assertNotEqual(0.0, self.cluster.metrics.request_timer['mean'])
self.assertEqual(0.0, cluster2.metrics.request_timer['mean'])
# Test access via metrics.get_stats()
self.assertNotEqual(0.0, stats_cluster1['request_timer']['mean'])
self.assertEqual(0.0, stats_cluster2['request_timer']['mean'])
# Test access by stats_name
self.assertEqual(0.0, scales.getStats()['cluster2-metrics']['request_timer']['mean'])
cluster2.shutdown()
def test_duplicate_metrics_per_cluster(self):
"""
Test to validate that cluster metrics names can't overlap.
@since 3.6.0
@jira_ticket PYTHON-561
@expected_result metric names should not be allowed to be same.
@test_category metrics
"""
cluster2 = Cluster(metrics_enabled=True, protocol_version=PROTOCOL_VERSION,
default_retry_policy=FallthroughRetryPolicy())
cluster3 = Cluster(metrics_enabled=True, protocol_version=PROTOCOL_VERSION,
default_retry_policy=FallthroughRetryPolicy())
# Ensure duplicate metric names are not allowed
cluster2.metrics.set_stats_name("appcluster")
cluster2.metrics.set_stats_name("appcluster")
with self.assertRaises(ValueError):
cluster3.metrics.set_stats_name("appcluster")
cluster3.metrics.set_stats_name("devops")
session2 = cluster2.connect(self.ks_name, wait_for_all_pools=True)
session3 = cluster3.connect(self.ks_name, wait_for_all_pools=True)
# Basic validation that naming metrics doesn't impact their segration or accuracy
for i in range(10):
query = SimpleStatement("SELECT * FROM {0}.{0}".format(self.ks_name), consistency_level=ConsistencyLevel.ALL)
session2.execute(query)
for i in range(5):
query = SimpleStatement("SELECT * FROM {0}.{0}".format(self.ks_name), consistency_level=ConsistencyLevel.ALL)
session3.execute(query)
self.assertEqual(cluster2.metrics.get_stats()['request_timer']['count'], 10)
self.assertEqual(cluster3.metrics.get_stats()['request_timer']['count'], 5)
# Check scales to ensure they are appropriately named
self.assertTrue("appcluster" in scales._Stats.stats.keys())
self.assertTrue("devops" in scales._Stats.stats.keys())
class RequestAnalyzer(object):
"""
Class used to track request and error counts for a Session.
Also computes statistics on encoded request size.
"""
requests = scales.PmfStat('request size')
errors = scales.IntStat('errors')
successful = scales.IntStat("success")
# Throw exceptions when invoked.
throw_on_success = False
throw_on_fail = False
def __init__(self, session, throw_on_success=False, throw_on_fail=False):
scales.init(self, '/request')
# each instance will be registered with a session, and receive a callback for each request generated
session.add_request_init_listener(self.on_request)
self.throw_on_fail = throw_on_fail
self.throw_on_success = throw_on_success
def on_request(self, rf):
# This callback is invoked each time a request is created, on the thread creating the request.
# We can use this to count events, or add callbacks
rf.add_callbacks(self.on_success, self.on_error, callback_args=(rf,), errback_args=(rf,))
def on_success(self, _, response_future):
# future callback on a successful request; just record the size
self.requests.addValue(response_future.request_encoded_size)
self.successful += 1
if self.throw_on_success:
raise AttributeError
def on_error(self, _, response_future):
# future callback for failed; record size and increment errors
self.requests.addValue(response_future.request_encoded_size)
self.errors += 1
if self.throw_on_fail:
raise AttributeError
def remove_ra(self, session):
session.remove_request_init_listener(self.on_request)
def __str__(self):
# just extracting request count from the size stats (which are recorded on all requests)
request_sizes = dict(self.requests)
count = request_sizes.pop('count')
return "%d requests (%d errors)\nRequest size statistics:\n%s" % (count, self.errors, pp.pformat(request_sizes))
class MetricsRequestSize(BasicExistingKeyspaceUnitTestCase):
def wait_for_count(self, ra, expected_count, error=False):
for _ in range(10):
if not error:
if ra.successful is expected_count:
return True
else:
if ra.errors is expected_count:
return True
time.sleep(.01)
return False
def test_metrics_per_cluster(self):
"""
Test to validate that requests listeners.
This test creates a simple metrics based request listener to track request size, it then
check to ensure that on_success and on_error methods are invoked appropriately.
@since 3.7.0
@jira_ticket PYTHON-284
@expected_result in_error, and on_success should be invoked apropriately
@test_category metrics
"""
ra = RequestAnalyzer(self.session)
for _ in range(10):
self.session.execute("SELECT release_version FROM system.local")
for _ in range(3):
try:
self.session.execute("nonesense")
except SyntaxException:
continue
self.assertTrue(self.wait_for_count(ra, 10))
self.assertTrue(self.wait_for_count(ra, 3, error=True))
ra.remove_ra(self.session)
# Make sure a poorly coded RA doesn't cause issues
RequestAnalyzer(self.session, throw_on_success=False, throw_on_fail=True)
self.session.execute("SELECT release_version FROM system.local")
try:
self.session.execute("nonesense")
except SyntaxException:
pass
|
|
# coding: utf-8
from __future__ import unicode_literals
import json
from .base_object import BaseObject
from boxsdk.config import API
from boxsdk.exception import BoxAPIException
class Item(BaseObject):
"""Box API endpoint for interacting with files and folders."""
def _get_accelerator_upload_url(self, file_id=None):
"""
Make an API call to get the Accelerator upload url for either upload a new file or updating an existing file.
:param file_id:
Box id of the file to be uploaded. Not required for new file uploads.
:type file_id:
`unicode` or None
:return:
The Accelerator upload url or None if cannot get the Accelerator upload url.
:rtype:
`unicode` or None
"""
endpoint = '{0}/content'.format(file_id) if file_id else 'content'
url = '{0}/files/{1}'.format(API.BASE_API_URL, endpoint)
try:
response_json = self._session.options(
url=url,
expect_json_response=True,
).json()
return response_json.get('upload_url', None)
except BoxAPIException:
return None
def _preflight_check(self, size, name=None, file_id=None, parent_id=None):
"""
Make an API call to check if certain file can be uploaded to Box or not.
(https://developers.box.com/docs/#files-preflight-check)
:param size:
The size of the file to be uploaded in bytes. Specify 0 for unknown file sizes.
:type size:
`int`
:param name:
The name of the file to be uploaded. This is optional if `file_id` is specified,
but required for new file uploads.
:type name:
`unicode`
:param file_id:
Box id of the file to be uploaded. Not required for new file uploads.
:type file_id:
`unicode`
:param parent_id:
The ID of the parent folder. Required only for new file uploads.
:type parent_id:
`unicode`
:raises:
:class:`BoxAPIException` when preflight check fails.
"""
endpoint = '{0}/content'.format(file_id) if file_id else 'content'
url = '{0}/files/{1}'.format(API.BASE_API_URL, endpoint)
data = {'size': size}
if name:
data['name'] = name
if parent_id:
data['parent'] = {'id': parent_id}
self._session.options(
url=url,
expect_json_response=False,
data=json.dumps(data),
)
def update_info(self, data, etag=None):
"""Baseclass override.
:param etag:
If specified, instruct the Box API to perform the update only if
the current version's etag matches.
:type etag:
`unicode` or None
:return:
The updated object.
Return a new object of the same type, without modifying the original object passed as self.
Construct the new object with all the default attributes that are returned from the endpoint.
:rtype:
:class:`BaseObject`
"""
# pylint:disable=arguments-differ
headers = {'If-Match': etag} if etag is not None else None
return super(Item, self).update_info(data, headers=headers)
def rename(self, name):
"""
Rename the item to a new name.
:param name:
The new name, you want the item to be renamed to.
:type name:
`unicode`
"""
data = {
'name': name,
}
return self.update_info(data)
def get(self, fields=None, etag=None):
"""Base class override.
:param etag:
If specified, instruct the Box API to get the info only if the current version's etag doesn't match.
:type etag:
`unicode` or None
:returns:
Information about the file or folder.
:rtype:
`dict`
:raises: :class:`BoxAPIException` if the specified etag matches the latest version of the item.
"""
# pylint:disable=arguments-differ
headers = {'If-None-Match': etag} if etag is not None else None
return super(Item, self).get(fields=fields, headers=headers)
def copy(self, parent_folder):
"""Copy the item to the given folder.
:param parent_folder:
The folder to which the item should be copied.
:type parent_folder:
:class:`Folder`
"""
url = self.get_url('copy')
data = {
'parent': {'id': parent_folder.object_id}
}
box_response = self._session.post(url, data=json.dumps(data))
response = box_response.json()
return self.__class__(
session=self._session,
object_id=response['id'],
response_object=response,
)
def move(self, parent_folder):
"""
Move the item to the given folder.
:param parent_folder:
The parent `Folder` object, where the item will be moved to.
:type parent_folder:
`Folder`
"""
data = {
'parent': {'id': parent_folder.object_id}
}
return self.update_info(data)
def get_shared_link(self, access=None, etag=None):
"""Get a shared link for the item with the given access permissions.
:param access:
Determines who can access the shared link. May be open, company, or collaborators. If no access is
specified, the default access will be used.
:type access:
`unicode` or None
:param etag:
If specified, instruct the Box API to create the link only if the current version's etag matches.
:type etag:
`unicode` or None
:returns:
The URL of the shared link.
:rtype:
`unicode`
:raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
"""
data = {
'shared_link': {} if not access else {
'access': access
}
}
item = self.update_info(data, etag=etag)
return item.shared_link['url']
def remove_shared_link(self, etag=None):
"""Delete the shared link for the item.
:param etag:
If specified, instruct the Box API to delete the link only if the current version's etag matches.
:type etag:
`unicode` or None
:returns:
Whether or not the update was successful.
:rtype:
`bool`
:raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
"""
data = {'shared_link': None}
item = self.update_info(data, etag=etag)
return item.shared_link['url']
def delete(self, params=None, etag=None):
"""Delete the item.
:param params:
Additional parameters to send with the request.
:type params:
`dict`
:param etag:
If specified, instruct the Box API to delete the item only if the current version's etag matches.
:type etag:
`unicode` or None
:returns:
Whether or not the delete was successful.
:rtype:
`bool`
:raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
"""
headers = {'If-Match': etag} if etag is not None else None
return super(Item, self).delete(params, headers)
|
|
# Copyright 2014
# The Cloudscaling Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import datetime
import json
import re
from glanceclient.common import exceptions as glance_exception
from lxml import etree
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import timeutils
import six
from ec2api import clients
from ec2api.db import api as db_api
from ec2api import exception
from ec2api.i18n import _, _LE
LOG = logging.getLogger(__name__)
ec2_opts = [
cfg.StrOpt('external_network',
default=None,
help='Name of the external network, which is used to connect'
'VPCs to Internet and to allocate Elastic IPs.'),
]
CONF = cfg.CONF
CONF.register_opts(ec2_opts)
LEGACY_BDM_FIELDS = set(['device_name', 'delete_on_termination', 'snapshot_id',
'volume_id', 'volume_size', 'no_device'])
_c2u = re.compile('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))')
def camelcase_to_underscore(str):
return _c2u.sub(r'_\1', str).lower().strip('_')
def _try_convert(value):
"""Return a non-string from a string or unicode, if possible.
============= =====================================================
When value is returns
============= =====================================================
zero-length ''
'None' None
'True' True case insensitive
'False' False case insensitive
'0', '-0' 0
0xN, -0xN int from hex (positive) (N is any number)
0bN, -0bN int from binary (positive) (N is any number)
* try conversion to int, float, complex, fallback value
"""
def _negative_zero(value):
epsilon = 1e-7
return 0 if abs(value) < epsilon else value
if len(value) == 0:
return ''
if value == 'None':
return None
lowered_value = value.lower()
if lowered_value == 'true':
return True
if lowered_value == 'false':
return False
for prefix, base in [('0x', 16), ('0b', 2), ('0', 8), ('', 10)]:
try:
if lowered_value.startswith((prefix, "-" + prefix)):
return int(lowered_value, base)
except ValueError:
pass
try:
return _negative_zero(float(value))
except ValueError:
return value
def dict_from_dotted_str(items):
"""parse multi dot-separated argument into dict.
EBS boot uses multi dot-separated arguments like
BlockDeviceMapping.1.DeviceName=snap-id
Convert the above into
{'block_device_mapping': {'1': {'device_name': snap-id}}}
"""
args = {}
for key, value in items:
parts = key.split(".")
key = str(camelcase_to_underscore(parts[0]))
if isinstance(value, six.string_types):
# NOTE(vish): Automatically convert strings back
# into their respective values
value = _try_convert(value)
if len(parts) > 1:
d = args.get(key, {})
args[key] = d
for k in parts[1:-1]:
k = camelcase_to_underscore(k)
v = d.get(k, {})
d[k] = v
d = v
d[camelcase_to_underscore(parts[-1])] = value
else:
args[key] = value
return args
def _render_dict(el, data):
try:
for key, val in six.iteritems(data):
sub_el = etree.SubElement(el, key)
_render_data(sub_el, val)
except Exception:
LOG.debug(data)
raise
def _render_data(el, data):
if isinstance(data, list):
for item in data:
sub_el = etree.SubElement(el, 'item')
_render_data(sub_el, item)
elif isinstance(data, dict):
_render_dict(el, data)
elif hasattr(data, '__dict__'):
_render_dict(el, data.__dict__)
elif isinstance(data, bool):
el.text = str(data).lower()
elif isinstance(data, datetime.datetime):
el.text = _database_to_isoformat(data)
elif isinstance(data, six.binary_type):
el.text = data.decode("utf-8")
elif data is not None:
el.text = six.text_type(data)
def _database_to_isoformat(datetimeobj):
"""Return a xs:dateTime parsable string from datatime."""
return datetimeobj.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + 'Z'
def dict_to_xml(data_dict, root_tag):
root = etree.Element(root_tag)
_render_dict(root, data_dict)
return root
_ms_time_regex = re.compile('^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3,6}Z$')
def is_ec2_timestamp_expired(request, expires=None):
"""Checks the timestamp or expiry time included in an EC2 request
and returns true if the request is expired
"""
query_time = None
timestamp = request.get('Timestamp')
expiry_time = request.get('Expires')
def parse_strtime(strtime):
if _ms_time_regex.match(strtime):
# NOTE(MotoKen): time format for aws-sdk-java contains millisecond
time_format = "%Y-%m-%dT%H:%M:%S.%fZ"
else:
time_format = "%Y-%m-%dT%H:%M:%SZ"
return timeutils.parse_strtime(strtime, time_format)
try:
if timestamp and expiry_time:
msg = _("Request must include either Timestamp or Expires,"
" but cannot contain both")
LOG.error(msg)
raise exception.InvalidRequest(msg)
elif expiry_time:
query_time = parse_strtime(expiry_time)
return timeutils.is_older_than(query_time, -1)
elif timestamp:
query_time = parse_strtime(timestamp)
# Check if the difference between the timestamp in the request
# and the time on our servers is larger than 5 minutes, the
# request is too old (or too new).
if query_time and expires:
return (timeutils.is_older_than(query_time, expires) or
timeutils.is_newer_than(query_time, expires))
return False
except ValueError:
LOG.exception(_("Timestamp is invalid: "))
return True
# NOTE(ft): extra functions to use in vpc specific code or instead of
# malformed existed functions
def get_ec2_id_kind(obj_id):
return obj_id.split('-')[0]
def change_ec2_id_kind(obj_id, new_kind):
return '%(kind)s-%(id)s' % {'kind': new_kind,
'id': obj_id.split('-')[-1]}
NOT_FOUND_EXCEPTION_MAP = {
'vpc': exception.InvalidVpcIDNotFound,
'igw': exception.InvalidInternetGatewayIDNotFound,
'subnet': exception.InvalidSubnetIDNotFound,
'eni': exception.InvalidNetworkInterfaceIDNotFound,
'dopt': exception.InvalidDhcpOptionsIDNotFound,
'eipalloc': exception.InvalidAllocationIDNotFound,
'sg': exception.InvalidGroupNotFound,
'rtb': exception.InvalidRouteTableIDNotFound,
'i': exception.InvalidInstanceIDNotFound,
'kp': exception.InvalidKeypairNotFound,
'az': exception.InvalidAvailabilityZoneNotFound,
'vol': exception.InvalidVolumeNotFound,
'snap': exception.InvalidSnapshotNotFound,
'ami': exception.InvalidAMIIDNotFound,
'aki': exception.InvalidAMIIDNotFound,
'ari': exception.InvalidAMIIDNotFound,
'vgw': exception.InvalidVpnGatewayIDNotFound,
'cgw': exception.InvalidCustomerGatewayIDNotFound,
'vpn': exception.InvalidVpnConnectionIDNotFound,
}
def get_db_item(context, ec2_id, expected_kind=None):
"""Get an DB item, raise AWS compliant exception if it's not found.
Args:
context (RequestContext): The request context.
ec2_id (str): The ID of the requested item.
expected_kind (str): The expected kind of the requested item.
It should be specified for a kind of ec2_id to be validated,
if you need it.
Returns:
The DB item.
"""
item = db_api.get_item_by_id(context, ec2_id)
if (item is None or
expected_kind and get_ec2_id_kind(ec2_id) != expected_kind):
kind = expected_kind or get_ec2_id_kind(ec2_id)
params = {'id': ec2_id}
raise NOT_FOUND_EXCEPTION_MAP[kind](**params)
return item
def get_db_items(context, kind, ec2_ids):
if not ec2_ids:
return db_api.get_items(context, kind)
if not isinstance(ec2_ids, set):
ec2_ids = set(ec2_ids)
items = db_api.get_items_by_ids(context, ec2_ids)
if len(items) < len(ec2_ids):
missed_ids = ec2_ids - set((item['id'] for item in items))
params = {'id': next(iter(missed_ids))}
raise NOT_FOUND_EXCEPTION_MAP[kind](**params)
return items
_auto_create_db_item_extensions = {}
def register_auto_create_db_item_extension(kind, extension):
_auto_create_db_item_extensions[kind] = extension
def auto_create_db_item(context, kind, os_id, **extension_kwargs):
item = {'os_id': os_id}
extension = _auto_create_db_item_extensions.get(kind)
if extension:
extension(context, item, **extension_kwargs)
return db_api.add_item(context, kind, item)
def get_db_item_by_os_id(context, kind, os_id, items_by_os_id=None,
**extension_kwargs):
"""Get DB item by OS id (create if it doesn't exist).
Args:
context (RequestContext): The request context.
kind (str): The kind of item.
os_id (str): OS id of an object.
items_by_os_id (dict of items): The dict of known DB items,
OS id is used as a key.
extension_kwargs (dict): Additional parameters passed to
a registered extension at creating item.
Returns:
A found or created item.
Search item in passed dict. If it's not found - create a new item, and
add it to the dict (if it's passed).
If an extension is registered on corresponding item kind, call it
passing extension_kwargs to it.
"""
if os_id is None:
return None
if items_by_os_id is not None:
item = items_by_os_id.get(os_id)
if item:
return item
else:
item = next((i for i in db_api.get_items(context, kind)
if i['os_id'] == os_id), None)
if not item:
item = auto_create_db_item(context, kind, os_id, **extension_kwargs)
if items_by_os_id is not None:
items_by_os_id[os_id] = item
return item
# TODO(Alex): The project_id passing mechanism can be potentially
# reconsidered in future.
def os_id_to_ec2_id(context, kind, os_id, items_by_os_id=None,
ids_by_os_id=None, project_id=None):
if os_id is None:
return None
if ids_by_os_id is not None:
item_id = ids_by_os_id.get(os_id)
if item_id:
return item_id
if items_by_os_id is not None:
item = items_by_os_id.get(os_id)
if item:
return item['id']
ids = db_api.get_items_ids(context, kind, item_os_ids=(os_id,))
if len(ids):
item_id, _os_id = ids[0]
else:
item_id = db_api.add_item_id(context, kind, os_id,
project_id=project_id)
if ids_by_os_id is not None:
ids_by_os_id[os_id] = item_id
return item_id
def get_os_image(context, ec2_image_id):
kind = get_ec2_id_kind(ec2_image_id)
ids = db_api.get_items_ids(context, kind, item_ids=(ec2_image_id,))
if not ids:
raise exception.InvalidAMIIDNotFound(id=ec2_image_id)
_id, os_id = ids[0]
if not os_id:
return None
glance = clients.glance(context)
try:
return glance.images.get(os_id)
except glance_exception.HTTPNotFound:
raise exception.InvalidAMIIDNotFound(id=ec2_image_id)
def deserialize_os_image_properties(os_image):
def prepare_property(property_name):
if property_name in properties:
properties[property_name] = json.loads(properties[property_name])
properties = copy.copy(os_image.properties)
prepare_property('mappings')
prepare_property('block_device_mapping')
return properties
def create_virtual_bdm(device_name, virtual_name):
bdm = {'device_name': device_name,
'source_type': 'blank',
'destination_type': 'local',
'device_type': 'disk',
'delete_on_termination': True,
'boot_index': -1,
'virtual_name': virtual_name}
if virtual_name == 'swap':
bdm['guest_format'] = 'swap'
return bdm
def get_os_image_mappings(os_image_properties):
mappings = []
names = set()
# TODO(ft): validate device names for both virtual and block device
# mappings
def is_virtual(virtual_name):
return virtual_name == 'swap' or (virtual_name and
_ephemeral.match(virtual_name))
# NOTE(ft): substitute mapping if the same device name is specified
def add_mapping(mapping):
device_name = block_device_strip_dev(mapping.get('device_name'))
if device_name in names:
for i, m in enumerate(mappings):
if (device_name ==
block_device_strip_dev(m.get('device_name'))):
mappings[i] = mapping
break
else:
if device_name:
names.add(device_name)
mappings.append(mapping)
# TODO(ft): From Juno virtual device mapping has precedence of block one
# in boot logic. This function should do the same, despite Nova EC2
# behavior.
# NOTE(ft): Nova EC2 prepended device names for virtual device mappings.
# But AWS doesn't do it.
for vdm in os_image_properties.get('mappings', []):
if is_virtual(vdm.get('virtual')):
add_mapping(create_virtual_bdm(
block_device_prepend_dev(vdm.get('device')), vdm['virtual']))
legacy_mapping = not os_image_properties.get('bdm_v2', False)
for bdm in os_image_properties.get('block_device_mapping', []):
if legacy_mapping:
virtual_name = bdm.get('virtual_name')
if is_virtual(virtual_name):
new_bdm = create_virtual_bdm(bdm.get('device_name'),
virtual_name)
else:
new_bdm = {key: val for key, val in six.iteritems(bdm)
if key in LEGACY_BDM_FIELDS}
if bdm.get('snapshot_id'):
new_bdm.update({'source_type': 'snapshot',
'destination_type': 'volume'})
elif bdm.get('volume_id'):
new_bdm.update({'source_type': 'volume',
'destination_type': 'volume'})
bdm = new_bdm
bdm.setdefault('delete_on_termination', False)
add_mapping(bdm)
return mappings
def get_os_public_network(context):
neutron = clients.neutron(context)
search_opts = {'router:external': True, 'name': CONF.external_network}
os_networks = neutron.list_networks(**search_opts)['networks']
if len(os_networks) != 1:
if CONF.external_network:
if len(os_networks) == 0:
msg = _LE("No external network with name '%s' is found")
else:
msg = _LE("More than one external network with name '%s' "
"is found")
LOG.error(msg, CONF.external_network)
else:
if len(os_networks) == 0:
msg = _LE('No external network is found')
else:
msg = _LE('More than one external network is found')
LOG.error(msg)
raise exception.Unsupported(_('Feature is restricted by OS admin'))
return os_networks[0]
def get_attached_gateway(context, vpc_id, gateway_kind):
# TODO(ft): move search by vpc_id to DB api
return next((gw for gw in db_api.get_items(context, gateway_kind)
if gw['vpc_id'] == vpc_id), None)
# NOTE(ft): following functions are copied from various parts of Nova
_ephemeral = re.compile('^ephemeral(\d|[1-9]\d+)$')
_dev = re.compile('^/dev/')
def block_device_strip_dev(device_name):
"""remove leading '/dev/'."""
return _dev.sub('', device_name) if device_name else device_name
def block_device_prepend_dev(device_name):
"""Make sure there is a leading '/dev/'."""
return device_name and '/dev/' + block_device_strip_dev(device_name)
def block_device_properties_root_device_name(properties):
"""get root device name from image meta data.
If it isn't specified, return None.
"""
if 'root_device_name' in properties:
return properties.get('root_device_name')
elif 'mappings' in properties:
return next((bdm['device'] for bdm in properties['mappings']
if bdm['virtual'] == 'root'), None)
else:
return None
|
|
from __future__ import absolute_import
import re
from functools import partial
from inspect import getargspec
from django.conf import settings
from django.template.context import (Context, RequestContext,
ContextPopException)
from django.utils.importlib import import_module
from django.utils.itercompat import is_iterable
from django.utils.text import (smart_split, unescape_string_literal,
get_text_list)
from django.utils.encoding import smart_unicode, force_unicode, smart_str
from django.utils.translation import ugettext_lazy, pgettext_lazy
from django.utils.safestring import (SafeData, EscapeData, mark_safe,
mark_for_escaping)
from django.utils.formats import localize
from django.utils.html import escape
from django.utils.module_loading import module_has_submodule
from django.utils.timezone import template_localtime
TOKEN_TEXT = 0
TOKEN_VAR = 1
TOKEN_BLOCK = 2
TOKEN_COMMENT = 3
TOKEN_MAPPING = {
TOKEN_TEXT: 'Text',
TOKEN_VAR: 'Var',
TOKEN_BLOCK: 'Block',
TOKEN_COMMENT: 'Comment',
}
# template syntax constants
FILTER_SEPARATOR = '|'
FILTER_ARGUMENT_SEPARATOR = ':'
VARIABLE_ATTRIBUTE_SEPARATOR = '.'
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
VARIABLE_TAG_START = '{{'
VARIABLE_TAG_END = '}}'
COMMENT_TAG_START = '{#'
COMMENT_TAG_END = '#}'
TRANSLATOR_COMMENT_MARK = 'Translators'
SINGLE_BRACE_START = '{'
SINGLE_BRACE_END = '}'
ALLOWED_VARIABLE_CHARS = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.')
# what to report as the origin for templates that come from non-loader sources
# (e.g. strings)
UNKNOWN_SOURCE = '<unknown source>'
# match a variable or block tag and capture the entire tag, including start/end
# delimiters
tag_re = (re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END))))
# global dictionary of libraries that have been loaded using get_library
libraries = {}
# global list of libraries to load by default for a new parser
builtins = []
# True if TEMPLATE_STRING_IF_INVALID contains a format string (%s). None means
# uninitialised.
invalid_var_format_string = None
class TemplateSyntaxError(Exception):
pass
class TemplateDoesNotExist(Exception):
pass
class TemplateEncodingError(Exception):
pass
class VariableDoesNotExist(Exception):
def __init__(self, msg, params=()):
self.msg = msg
self.params = params
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
return self.msg % tuple([force_unicode(p, errors='replace')
for p in self.params])
class InvalidTemplateLibrary(Exception):
pass
class Origin(object):
def __init__(self, name):
self.name = name
def reload(self):
raise NotImplementedError
def __str__(self):
return self.name
class StringOrigin(Origin):
def __init__(self, source):
super(StringOrigin, self).__init__(UNKNOWN_SOURCE)
self.source = source
def reload(self):
return self.source
class Template(object):
def __init__(self, template_string, origin=None,
name='<Unknown Template>'):
try:
template_string = smart_unicode(template_string)
except UnicodeDecodeError:
raise TemplateEncodingError("Templates can only be constructed "
"from unicode or UTF-8 strings.")
if settings.TEMPLATE_DEBUG and origin is None:
origin = StringOrigin(template_string)
self.nodelist = compile_string(template_string, origin)
self.name = name
def __iter__(self):
for node in self.nodelist:
for subnode in node:
yield subnode
def _render(self, context):
return self.nodelist.render(context)
def render(self, context):
"Display stage -- can be called many times"
context.render_context.push()
try:
return self._render(context)
finally:
context.render_context.pop()
def compile_string(template_string, origin):
"Compiles template_string into NodeList ready for rendering"
if settings.TEMPLATE_DEBUG:
from django.template.debug import DebugLexer, DebugParser
lexer_class, parser_class = DebugLexer, DebugParser
else:
lexer_class, parser_class = Lexer, Parser
lexer = lexer_class(template_string, origin)
parser = parser_class(lexer.tokenize())
return parser.parse()
class Token(object):
def __init__(self, token_type, contents):
# token_type must be TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK or
# TOKEN_COMMENT.
self.token_type, self.contents = token_type, contents
self.lineno = None
def __str__(self):
token_name = TOKEN_MAPPING[self.token_type]
return ('<%s token: "%s...">' %
(token_name, self.contents[:20].replace('\n', '')))
def split_contents(self):
split = []
bits = iter(smart_split(self.contents))
for bit in bits:
# Handle translation-marked template pieces
if bit.startswith('_("') or bit.startswith("_('"):
sentinal = bit[2] + ')'
trans_bit = [bit]
while not bit.endswith(sentinal):
bit = bits.next()
trans_bit.append(bit)
bit = ' '.join(trans_bit)
split.append(bit)
return split
class Lexer(object):
def __init__(self, template_string, origin):
self.template_string = template_string
self.origin = origin
self.lineno = 1
def tokenize(self):
"""
Return a list of tokens from a given template_string.
"""
in_tag = False
result = []
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, in_tag))
in_tag = not in_tag
return result
def create_token(self, token_string, in_tag):
"""
Convert the given token string into a new Token object and return it.
If in_tag is True, we are processing something that matched a tag,
otherwise it should be treated as a literal string.
"""
if in_tag:
# The [2:-2] ranges below strip off *_TAG_START and *_TAG_END.
# We could do len(BLOCK_TAG_START) to be more "correct", but we've
# hard-coded the 2s here for performance. And it's not like
# the TAG_START values are going to change anytime, anyway.
if token_string.startswith(VARIABLE_TAG_START):
token = Token(TOKEN_VAR, token_string[2:-2].strip())
elif token_string.startswith(BLOCK_TAG_START):
token = Token(TOKEN_BLOCK, token_string[2:-2].strip())
elif token_string.startswith(COMMENT_TAG_START):
content = ''
if token_string.find(TRANSLATOR_COMMENT_MARK):
content = token_string[2:-2].strip()
token = Token(TOKEN_COMMENT, content)
else:
token = Token(TOKEN_TEXT, token_string)
token.lineno = self.lineno
self.lineno += token_string.count('\n')
return token
class Parser(object):
def __init__(self, tokens):
self.tokens = tokens
self.tags = {}
self.filters = {}
for lib in builtins:
self.add_library(lib)
def parse(self, parse_until=None):
if parse_until is None:
parse_until = []
nodelist = self.create_nodelist()
while self.tokens:
token = self.next_token()
# Use the raw values here for TOKEN_* for a tiny performance boost.
if token.token_type == 0: # TOKEN_TEXT
self.extend_nodelist(nodelist, TextNode(token.contents), token)
elif token.token_type == 1: # TOKEN_VAR
if not token.contents:
self.empty_variable(token)
filter_expression = self.compile_filter(token.contents)
var_node = self.create_variable_node(filter_expression)
self.extend_nodelist(nodelist, var_node, token)
elif token.token_type == 2: # TOKEN_BLOCK
try:
command = token.contents.split()[0]
except IndexError:
self.empty_block_tag(token)
if command in parse_until:
# put token back on token list so calling
# code knows why it terminated
self.prepend_token(token)
return nodelist
# execute callback function for this tag and append
# resulting node
self.enter_command(command, token)
try:
compile_func = self.tags[command]
except KeyError:
self.invalid_block_tag(token, command, parse_until)
try:
compiled_result = compile_func(self, token)
except TemplateSyntaxError as e:
if not self.compile_function_error(token, e):
raise
self.extend_nodelist(nodelist, compiled_result, token)
self.exit_command()
if parse_until:
self.unclosed_block_tag(parse_until)
return nodelist
def skip_past(self, endtag):
while self.tokens:
token = self.next_token()
if token.token_type == TOKEN_BLOCK and token.contents == endtag:
return
self.unclosed_block_tag([endtag])
def create_variable_node(self, filter_expression):
return VariableNode(filter_expression)
def create_nodelist(self):
return NodeList()
def extend_nodelist(self, nodelist, node, token):
if node.must_be_first and nodelist:
try:
if nodelist.contains_nontext:
raise AttributeError
except AttributeError:
raise TemplateSyntaxError("%r must be the first tag "
"in the template." % node)
if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
nodelist.contains_nontext = True
nodelist.append(node)
def enter_command(self, command, token):
pass
def exit_command(self):
pass
def error(self, token, msg):
return TemplateSyntaxError(msg)
def empty_variable(self, token):
raise self.error(token, "Empty variable tag")
def empty_block_tag(self, token):
raise self.error(token, "Empty block tag")
def invalid_block_tag(self, token, command, parse_until=None):
if parse_until:
raise self.error(token, "Invalid block tag: '%s', expected %s" %
(command, get_text_list(["'%s'" % p for p in parse_until])))
raise self.error(token, "Invalid block tag: '%s'" % command)
def unclosed_block_tag(self, parse_until):
raise self.error(None, "Unclosed tags: %s " % ', '.join(parse_until))
def compile_function_error(self, token, e):
pass
def next_token(self):
return self.tokens.pop(0)
def prepend_token(self, token):
self.tokens.insert(0, token)
def delete_first_token(self):
del self.tokens[0]
def add_library(self, lib):
self.tags.update(lib.tags)
self.filters.update(lib.filters)
def compile_filter(self, token):
"""
Convenient wrapper for FilterExpression
"""
return FilterExpression(token, self)
def find_filter(self, filter_name):
if filter_name in self.filters:
return self.filters[filter_name]
else:
raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
class TokenParser(object):
"""
Subclass this and implement the top() method to parse a template line.
When instantiating the parser, pass in the line from the Django template
parser.
The parser's "tagname" instance-variable stores the name of the tag that
the filter was called with.
"""
def __init__(self, subject):
self.subject = subject
self.pointer = 0
self.backout = []
self.tagname = self.tag()
def top(self):
"""
Overload this method to do the actual parsing and return the result.
"""
raise NotImplementedError()
def more(self):
"""
Returns True if there is more stuff in the tag.
"""
return self.pointer < len(self.subject)
def back(self):
"""
Undoes the last microparser. Use this for lookahead and backtracking.
"""
if not len(self.backout):
raise TemplateSyntaxError("back called without some previous "
"parsing")
self.pointer = self.backout.pop()
def tag(self):
"""
A microparser that just returns the next tag from the line.
"""
subject = self.subject
i = self.pointer
if i >= len(subject):
raise TemplateSyntaxError("expected another tag, found "
"end of string: %s" % subject)
p = i
while i < len(subject) and subject[i] not in (' ', '\t'):
i += 1
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
def value(self):
"""
A microparser that parses for a value: some string constant or
variable name.
"""
subject = self.subject
i = self.pointer
def next_space_index(subject, i):
"""
Increment pointer until a real space (i.e. a space not within
quotes) is encountered
"""
while i < len(subject) and subject[i] not in (' ', '\t'):
if subject[i] in ('"', "'"):
c = subject[i]
i += 1
while i < len(subject) and subject[i] != c:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. "
"Unexpected end of string in column %d: %s" %
(i, subject))
i += 1
return i
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Expected another "
"value but found end of string: %s" %
subject)
if subject[i] in ('"', "'"):
p = i
i += 1
while i < len(subject) and subject[i] != subject[p]:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Unexpected "
"end of string in column %d: %s" %
(i, subject))
i += 1
# Continue parsing until next "real" space,
# so that filters are also included
i = next_space_index(subject, i)
res = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return res
else:
p = i
i = next_space_index(subject, i)
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
# This only matches constant *strings* (things in quotes or marked for
# translation). Numbers are treated as variables for implementation reasons
# (so that they retain their type when passed to filters).
constant_string = r"""
(?:%(i18n_open)s%(strdq)s%(i18n_close)s|
%(i18n_open)s%(strsq)s%(i18n_close)s|
%(strdq)s|
%(strsq)s)
""" % {
'strdq': r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string
'strsq': r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string
'i18n_open': re.escape("_("),
'i18n_close': re.escape(")"),
}
constant_string = constant_string.replace("\n", "")
filter_raw_string = r"""
^(?P<constant>%(constant)s)|
^(?P<var>[%(var_chars)s]+|%(num)s)|
(?:%(filter_sep)s
(?P<filter_name>\w+)
(?:%(arg_sep)s
(?:
(?P<constant_arg>%(constant)s)|
(?P<var_arg>[%(var_chars)s]+|%(num)s)
)
)?
)""" % {
'constant': constant_string,
'num': r'[-+\.]?\d[\d\.e]*',
'var_chars': "\w\.",
'filter_sep': re.escape(FILTER_SEPARATOR),
'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR),
}
filter_re = re.compile(filter_raw_string, re.UNICODE | re.VERBOSE)
class FilterExpression(object):
"""
Parses a variable token and its optional filters (all as a single string),
and return a list of tuples of the filter name and arguments.
Sample::
>>> token = 'variable|default:"Default value"|date:"Y-m-d"'
>>> p = Parser('')
>>> fe = FilterExpression(token, p)
>>> len(fe.filters)
2
>>> fe.var
<Variable: 'variable'>
This class should never be instantiated outside of the
get_filters_from_token helper function.
"""
def __init__(self, token, parser):
self.token = token
matches = filter_re.finditer(token)
var_obj = None
filters = []
upto = 0
for match in matches:
start = match.start()
if upto != start:
raise TemplateSyntaxError("Could not parse some characters: "
"%s|%s|%s" %
(token[:upto], token[upto:start],
token[start:]))
if var_obj is None:
var, constant = match.group("var", "constant")
if constant:
try:
var_obj = Variable(constant).resolve({})
except VariableDoesNotExist:
var_obj = None
elif var is None:
raise TemplateSyntaxError("Could not find variable at "
"start of %s." % token)
else:
var_obj = Variable(var)
else:
filter_name = match.group("filter_name")
args = []
constant_arg, var_arg = match.group("constant_arg", "var_arg")
if constant_arg:
args.append((False, Variable(constant_arg).resolve({})))
elif var_arg:
args.append((True, Variable(var_arg)))
filter_func = parser.find_filter(filter_name)
self.args_check(filter_name, filter_func, args)
filters.append((filter_func, args))
upto = match.end()
if upto != len(token):
raise TemplateSyntaxError("Could not parse the remainder: '%s' "
"from '%s'" % (token[upto:], token))
self.filters = filters
self.var = var_obj
def resolve(self, context, ignore_failures=False):
if isinstance(self.var, Variable):
try:
obj = self.var.resolve(context)
except VariableDoesNotExist:
if ignore_failures:
obj = None
else:
if settings.TEMPLATE_STRING_IF_INVALID:
global invalid_var_format_string
if invalid_var_format_string is None:
invalid_var_format_string = '%s' in settings.TEMPLATE_STRING_IF_INVALID
if invalid_var_format_string:
return settings.TEMPLATE_STRING_IF_INVALID % self.var
return settings.TEMPLATE_STRING_IF_INVALID
else:
obj = settings.TEMPLATE_STRING_IF_INVALID
else:
obj = self.var
for func, args in self.filters:
arg_vals = []
for lookup, arg in args:
if not lookup:
arg_vals.append(mark_safe(arg))
else:
arg_vals.append(arg.resolve(context))
if getattr(func, 'expects_localtime', False):
obj = template_localtime(obj, context.use_tz)
if getattr(func, 'needs_autoescape', False):
new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
else:
new_obj = func(obj, *arg_vals)
if getattr(func, 'is_safe', False) and isinstance(obj, SafeData):
obj = mark_safe(new_obj)
elif isinstance(obj, EscapeData):
obj = mark_for_escaping(new_obj)
else:
obj = new_obj
return obj
def args_check(name, func, provided):
provided = list(provided)
plen = len(provided)
# Check to see if a decorator is providing the real function.
func = getattr(func, '_decorated_function', func)
args, varargs, varkw, defaults = getargspec(func)
# First argument is filter input.
args.pop(0)
if defaults:
nondefs = args[:-len(defaults)]
else:
nondefs = args
# Args without defaults must be provided.
try:
for arg in nondefs:
provided.pop(0)
except IndexError:
# Not enough
raise TemplateSyntaxError("%s requires %d arguments, %d provided" %
(name, len(nondefs), plen))
# Defaults can be overridden.
defaults = defaults and list(defaults) or []
try:
for parg in provided:
defaults.pop(0)
except IndexError:
# Too many.
raise TemplateSyntaxError("%s requires %d arguments, %d provided" %
(name, len(nondefs), plen))
return True
args_check = staticmethod(args_check)
def __str__(self):
return self.token
def resolve_variable(path, context):
"""
Returns the resolved variable, which may contain attribute syntax, within
the given context.
Deprecated; use the Variable class instead.
"""
return Variable(path).resolve(context)
class Variable(object):
"""
A template variable, resolvable against a given context. The variable may
be a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':u'News'}}
>>> Variable('article.section').resolve(c)
u'News'
>>> Variable('article').resolve(c)
{'section': u'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = u'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
"""
def __init__(self, var):
self.var = var
self.literal = None
self.lookups = None
self.translate = False
self.message_context = None
try:
# First try to treat this variable as a number.
#
# Note that this could cause an OverflowError here that we're not
# catching. Since this should only happen at compile time, that's
# probably OK.
self.literal = float(var)
# So it's a float... is it an int? If the original value contained a
# dot or an "e" then it was a float, not an int.
if '.' not in var and 'e' not in var.lower():
self.literal = int(self.literal)
# "2." is invalid
if var.endswith('.'):
raise ValueError
except ValueError:
# A ValueError means that the variable isn't a number.
if var.startswith('_(') and var.endswith(')'):
# The result of the lookup should be translated at rendering
# time.
self.translate = True
var = var[2:-1]
# If it's wrapped with quotes (single or double), then
# we're also dealing with a literal.
try:
self.literal = mark_safe(unescape_string_literal(var))
except ValueError:
# Otherwise we'll set self.lookups so that resolve() knows we're
# dealing with a bonafide variable
if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_':
raise TemplateSyntaxError("Variables and attributes may "
"not begin with underscores: '%s'" %
var)
self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
def resolve(self, context):
"""Resolve this variable against a given context."""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
if self.message_context:
return pgettext_lazy(self.message_context, value)
else:
return ugettext_lazy(value)
return value
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.var)
def __str__(self):
return self.var
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
try: # dictionary lookup
current = current[bit]
except (TypeError, AttributeError, KeyError):
try: # attribute lookup
current = getattr(current, bit)
except (TypeError, AttributeError):
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError): # unsubscriptable object
raise VariableDoesNotExist("Failed lookup for key "
"[%s] in %r",
(bit, current)) # missing attribute
if callable(current):
if getattr(current, 'do_not_call_in_templates', False):
pass
elif getattr(current, 'alters_data', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try: # method call (assuming no args required)
current = current()
except TypeError: # arguments *were* required
# GOTCHA: This will also catch any TypeError
# raised in the function itself.
current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
except Exception as e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
return current
class Node(object):
# Set this to True for nodes that must be first in the template (although
# they can be preceded by text nodes.
must_be_first = False
child_nodelists = ('nodelist',)
def render(self, context):
"""
Return the node rendered as a string.
"""
pass
def __iter__(self):
yield self
def get_nodes_by_type(self, nodetype):
"""
Return a list of all nodes (within this node and its nodelist)
of the given type
"""
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
for attr in self.child_nodelists:
nodelist = getattr(self, attr, None)
if nodelist:
nodes.extend(nodelist.get_nodes_by_type(nodetype))
return nodes
class NodeList(list):
# Set to True the first time a non-TextNode is inserted by
# extend_nodelist().
contains_nontext = False
def render(self, context):
bits = []
for node in self:
if isinstance(node, Node):
bit = self.render_node(node, context)
else:
bit = node
bits.append(force_unicode(bit))
return mark_safe(u''.join(bits))
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes of the given type"
nodes = []
for node in self:
nodes.extend(node.get_nodes_by_type(nodetype))
return nodes
def render_node(self, node, context):
return node.render(context)
class TextNode(Node):
def __init__(self, s):
self.s = s
def __repr__(self):
return "<Text Node: '%s'>" % smart_str(self.s[:25], 'ascii',
errors='replace')
def render(self, context):
return self.s
def _render_value_in_context(value, context):
"""
Converts any value to a string to become part of a rendered template. This
means escaping, if required, and conversion to a unicode object. If value
is a string, it is expected to have already been translated.
"""
value = template_localtime(value, use_tz=context.use_tz)
value = localize(value, use_l10n=context.use_l10n)
value = force_unicode(value)
if ((context.autoescape and not isinstance(value, SafeData)) or
isinstance(value, EscapeData)):
return escape(value)
else:
return value
class VariableNode(Node):
def __init__(self, filter_expression):
self.filter_expression = filter_expression
def __repr__(self):
return "<Variable Node: %s>" % self.filter_expression
def render(self, context):
try:
output = self.filter_expression.resolve(context)
except UnicodeDecodeError:
# Unicode conversion can fail sometimes for reasons out of our
# control (e.g. exception rendering). In that case, we fail
# quietly.
return ''
return _render_value_in_context(output, context)
# Regex for token keyword arguments
kwarg_re = re.compile(r"(?:(\w+)=)?(.+)")
def token_kwargs(bits, parser, support_legacy=False):
"""
A utility method for parsing token keyword arguments.
:param bits: A list containing remainder of the token (split by spaces)
that is to be checked for arguments. Valid arguments will be removed
from this list.
:param support_legacy: If set to true ``True``, the legacy format
``1 as foo`` will be accepted. Otherwise, only the standard ``foo=1``
format is allowed.
:returns: A dictionary of the arguments retrieved from the ``bits`` token
list.
There is no requirement for all remaining token ``bits`` to be keyword
arguments, so the dictionary will be returned as soon as an invalid
argument format is reached.
"""
if not bits:
return {}
match = kwarg_re.match(bits[0])
kwarg_format = match and match.group(1)
if not kwarg_format:
if not support_legacy:
return {}
if len(bits) < 3 or bits[1] != 'as':
return {}
kwargs = {}
while bits:
if kwarg_format:
match = kwarg_re.match(bits[0])
if not match or not match.group(1):
return kwargs
key, value = match.groups()
del bits[:1]
else:
if len(bits) < 3 or bits[1] != 'as':
return kwargs
key, value = bits[2], bits[0]
del bits[:3]
kwargs[key] = parser.compile_filter(value)
if bits and not kwarg_format:
if bits[0] != 'and':
return kwargs
del bits[:1]
return kwargs
def parse_bits(parser, bits, params, varargs, varkw, defaults,
takes_context, name):
"""
Parses bits for template tag helpers (simple_tag, include_tag and
assignment_tag), in particular by detecting syntax errors and by
extracting positional and keyword arguments.
"""
if takes_context:
if params[0] == 'context':
params = params[1:]
else:
raise TemplateSyntaxError(
"'%s' is decorated with takes_context=True so it must "
"have a first argument of 'context'" % name)
args = []
kwargs = {}
unhandled_params = list(params)
for bit in bits:
# First we try to extract a potential kwarg from the bit
kwarg = token_kwargs([bit], parser)
if kwarg:
# The kwarg was successfully extracted
param, value = kwarg.items()[0]
if param not in params and varkw is None:
# An unexpected keyword argument was supplied
raise TemplateSyntaxError(
"'%s' received unexpected keyword argument '%s'" %
(name, param))
elif param in kwargs:
# The keyword argument has already been supplied once
raise TemplateSyntaxError(
"'%s' received multiple values for keyword argument '%s'" %
(name, param))
else:
# All good, record the keyword argument
kwargs[str(param)] = value
if param in unhandled_params:
# If using the keyword syntax for a positional arg, then
# consume it.
unhandled_params.remove(param)
else:
if kwargs:
raise TemplateSyntaxError(
"'%s' received some positional argument(s) after some "
"keyword argument(s)" % name)
else:
# Record the positional argument
args.append(parser.compile_filter(bit))
try:
# Consume from the list of expected positional arguments
unhandled_params.pop(0)
except IndexError:
if varargs is None:
raise TemplateSyntaxError(
"'%s' received too many positional arguments" %
name)
if defaults is not None:
# Consider the last n params handled, where n is the
# number of defaults.
unhandled_params = unhandled_params[:-len(defaults)]
if unhandled_params:
# Some positional arguments were not supplied
raise TemplateSyntaxError(
u"'%s' did not receive value(s) for the argument(s): %s" %
(name, u", ".join([u"'%s'" % p for p in unhandled_params])))
return args, kwargs
def generic_tag_compiler(parser, token, params, varargs, varkw, defaults,
name, takes_context, node_class):
"""
Returns a template.Node subclass.
"""
bits = token.split_contents()[1:]
args, kwargs = parse_bits(parser, bits, params, varargs, varkw,
defaults, takes_context, name)
return node_class(takes_context, args, kwargs)
class TagHelperNode(Node):
"""
Base class for tag helper nodes such as SimpleNode, InclusionNode and
AssignmentNode. Manages the positional and keyword arguments to be passed
to the decorated function.
"""
def __init__(self, takes_context, args, kwargs):
self.takes_context = takes_context
self.args = args
self.kwargs = kwargs
def get_resolved_arguments(self, context):
resolved_args = [var.resolve(context) for var in self.args]
if self.takes_context:
resolved_args = [context] + resolved_args
resolved_kwargs = dict((k, v.resolve(context))
for k, v in self.kwargs.items())
return resolved_args, resolved_kwargs
class Library(object):
def __init__(self):
self.filters = {}
self.tags = {}
def tag(self, name=None, compile_function=None):
if name is None and compile_function is None:
# @register.tag()
return self.tag_function
elif name is not None and compile_function is None:
if callable(name):
# @register.tag
return self.tag_function(name)
else:
# @register.tag('somename') or @register.tag(name='somename')
def dec(func):
return self.tag(name, func)
return dec
elif name is not None and compile_function is not None:
# register.tag('somename', somefunc)
self.tags[name] = compile_function
return compile_function
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.tag: (%r, %r)", (name, compile_function))
def tag_function(self, func):
self.tags[getattr(func, "_decorated_function", func).__name__] = func
return func
def filter(self, name=None, filter_func=None, **flags):
if name is None and filter_func is None:
# @register.filter()
def dec(func):
return self.filter_function(func, **flags)
return dec
elif name is not None and filter_func is None:
if callable(name):
# @register.filter
return self.filter_function(name, **flags)
else:
# @register.filter('somename') or @register.filter(name='somename')
def dec(func):
return self.filter(name, func, **flags)
return dec
elif name is not None and filter_func is not None:
# register.filter('somename', somefunc)
self.filters[name] = filter_func
for attr in ('expects_localtime', 'is_safe', 'needs_autoescape'):
if attr in flags:
value = flags[attr]
# set the flag on the filter for FilterExpression.resolve
setattr(filter_func, attr, value)
# set the flag on the innermost decorated function
# for decorators that need it e.g. stringfilter
if hasattr(filter_func, "_decorated_function"):
setattr(filter_func._decorated_function, attr, value)
return filter_func
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.filter: (%r, %r)", (name, filter_func))
def filter_function(self, func, **flags):
name = getattr(func, "_decorated_function", func).__name__
return self.filter(name, func, **flags)
def simple_tag(self, func=None, takes_context=None, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class SimpleNode(TagHelperNode):
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
return func(*resolved_args, **resolved_kwargs)
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
compile_func = partial(generic_tag_compiler,
params=params, varargs=varargs, varkw=varkw,
defaults=defaults, name=function_name,
takes_context=takes_context, node_class=SimpleNode)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
if func is None:
# @register.simple_tag(...)
return dec
elif callable(func):
# @register.simple_tag
return dec(func)
else:
raise TemplateSyntaxError("Invalid arguments provided to simple_tag")
def assignment_tag(self, func=None, takes_context=None, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class AssignmentNode(TagHelperNode):
def __init__(self, takes_context, args, kwargs, target_var):
super(AssignmentNode, self).__init__(takes_context, args, kwargs)
self.target_var = target_var
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
context[self.target_var] = func(*resolved_args, **resolved_kwargs)
return ''
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
def compile_func(parser, token):
bits = token.split_contents()[1:]
if len(bits) < 2 or bits[-2] != 'as':
raise TemplateSyntaxError(
"'%s' tag takes at least 2 arguments and the "
"second last argument must be 'as'" % function_name)
target_var = bits[-1]
bits = bits[:-2]
args, kwargs = parse_bits(parser, bits, params,
varargs, varkw, defaults, takes_context, function_name)
return AssignmentNode(takes_context, args, kwargs, target_var)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
if func is None:
# @register.assignment_tag(...)
return dec
elif callable(func):
# @register.assignment_tag
return dec(func)
else:
raise TemplateSyntaxError("Invalid arguments provided to assignment_tag")
def inclusion_tag(self, file_name, context_class=Context, takes_context=False, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class InclusionNode(TagHelperNode):
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
_dict = func(*resolved_args, **resolved_kwargs)
if not getattr(self, 'nodelist', False):
from django.template.loader import get_template, select_template
if isinstance(file_name, Template):
t = file_name
elif not isinstance(file_name, basestring) and is_iterable(file_name):
t = select_template(file_name)
else:
t = get_template(file_name)
self.nodelist = t.nodelist
new_context = context_class(_dict, **{
'autoescape': context.autoescape,
'current_app': context.current_app,
'use_l10n': context.use_l10n,
'use_tz': context.use_tz,
})
# Copy across the CSRF token, if present, because
# inclusion tags are often used for forms, and we need
# instructions for using CSRF protection to be as simple
# as possible.
csrf_token = context.get('csrf_token', None)
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
return self.nodelist.render(new_context)
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
compile_func = partial(generic_tag_compiler,
params=params, varargs=varargs, varkw=varkw,
defaults=defaults, name=function_name,
takes_context=takes_context, node_class=InclusionNode)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
return dec
def is_library_missing(name):
"""Check if library that failed to load cannot be found under any
templatetags directory or does exist but fails to import.
Non-existing condition is checked recursively for each subpackage in cases
like <appdir>/templatetags/subpackage/package/module.py.
"""
# Don't bother to check if '.' is in name since any name will be prefixed
# with some template root.
path, module = name.rsplit('.', 1)
try:
package = import_module(path)
return not module_has_submodule(package, module)
except ImportError:
return is_library_missing(path)
def import_library(taglib_module):
"""
Load a template tag library module.
Verifies that the library contains a 'register' attribute, and
returns that attribute as the representation of the library
"""
try:
mod = import_module(taglib_module)
except ImportError as e:
# If the ImportError is because the taglib submodule does not exist,
# that's not an error that should be raised. If the submodule exists
# and raised an ImportError on the attempt to load it, that we want
# to raise.
if is_library_missing(taglib_module):
return None
else:
raise InvalidTemplateLibrary("ImportError raised loading %s: %s" %
(taglib_module, e))
try:
return mod.register
except AttributeError:
raise InvalidTemplateLibrary("Template library %s does not have "
"a variable named 'register'" %
taglib_module)
templatetags_modules = []
def get_templatetags_modules():
"""
Return the list of all available template tag modules.
Caches the result for faster access.
"""
global templatetags_modules
if not templatetags_modules:
_templatetags_modules = []
# Populate list once per process. Mutate the local list first, and
# then assign it to the global name to ensure there are no cases where
# two threads try to populate it simultaneously.
for app_module in ['django'] + list(settings.INSTALLED_APPS):
try:
templatetag_module = '%s.templatetags' % app_module
import_module(templatetag_module)
_templatetags_modules.append(templatetag_module)
except ImportError:
continue
templatetags_modules = _templatetags_modules
return templatetags_modules
def get_library(library_name):
"""
Load the template library module with the given name.
If library is not already loaded loop over all templatetags modules
to locate it.
{% load somelib %} and {% load someotherlib %} loops twice.
Subsequent loads eg. {% load somelib %} in the same process will grab
the cached module from libraries.
"""
lib = libraries.get(library_name, None)
if not lib:
templatetags_modules = get_templatetags_modules()
tried_modules = []
for module in templatetags_modules:
taglib_module = '%s.%s' % (module, library_name)
tried_modules.append(taglib_module)
lib = import_library(taglib_module)
if lib:
libraries[library_name] = lib
break
if not lib:
raise InvalidTemplateLibrary("Template library %s not found, "
"tried %s" %
(library_name,
','.join(tried_modules)))
return lib
def add_to_builtins(module):
builtins.append(import_library(module))
add_to_builtins('django.template.defaulttags')
add_to_builtins('django.template.defaultfilters')
|
|
# coding=utf-8
import unittest
import sys
import os
import json
import re
import time
from StringIO import StringIO
sys.path.append( os.path.join("..", "src", "bin") )
from send_insteon_command import InsteonCommandField, SendInsteonCommandAlert, InsteonDeviceField, InsteonMultipleDeviceField, InsteonExtendedDataField
from insteon_control_app.modular_alert import ModularAlert, Field, BooleanField, IPAddressField, FieldValidationException
class FakeInputStream:
"""
The fake input stream is for testing code that accepts data from an input stream.
"""
def __init__(self):
self.value = ""
def setValue(self, value):
self.value = value
def read(self):
return self.value
class ModularAlertTest(unittest.TestCase):
"""
Test the modular alert base class.
"""
def test_field_empty_not_allowed(self):
field = Field('test', none_allowed=True, empty_allowed=False)
with self.assertRaises(FieldValidationException) as context:
field.to_python("")
def test_field_none_not_allowed(self):
field = Field('test', none_allowed=False, empty_allowed=True)
with self.assertRaises(FieldValidationException) as context:
field.to_python(None)
def get_modular_alert_instance(self):
class TestModularAlert(ModularAlert):
def __init__(self, **kwargs):
params = [
Field("foo", empty_allowed=False),
BooleanField("bar", empty_allowed=False)
]
ModularAlert.__init__( self, params, "test_modular_alert" )
def run(self, cleaned_params, payload):
return "Alert ran successfully " + self.create_event_string(cleaned_params)
return TestModularAlert()
def test_modular_alert_validation(self):
test_instance = self.get_modular_alert_instance()
# Ok input
self.assertTrue(test_instance.validate({'foo' : 'foo', 'bar' : 'true'})['bar'])
# Bad input
with self.assertRaises(FieldValidationException) as context:
test_instance.validate({'foo' : 'foo', 'bar' : '65'})
def test_modular_alert_run(self):
in_stream = FakeInputStream()
test_instance = self.get_modular_alert_instance()
input = {
"result": {
"_kv":"1",
"_raw": "something"
},
"configuration":{
'foo' : 'FOO',
'bar' : 'true'
}
}
in_stream.setValue(json.dumps(input))
result = test_instance.execute(in_stream)
self.assertEquals( len(re.findall("Alert ran successfully", result)), 1)
class IPAddressFieldTest(unittest.TestCase):
def test_validate_good_input(self):
field = IPAddressField('device')
# Good input
self.assertEqual(field.to_python('192.168.4.1'), '192.168.4.1')
self.assertEqual(field.to_python('192.168.004.001'), '192.168.004.001')
def test_validate_bad_input(self):
field = IPAddressField('device')
# Not long enough
with self.assertRaises(FieldValidationException) as context:
field.to_python('192.')
# Too long
with self.assertRaises(FieldValidationException) as context:
field.to_python('abc')
# Non-hex characters
with self.assertRaises(FieldValidationException) as context:
field.to_python('10-0.0.1')
class InsteonExtendedDataFieldTest(unittest.TestCase):
def test_validate_good_input(self):
field = InsteonExtendedDataField('device')
# Good input
self.assertEqual(field.to_python('9296'), '0000000000000000000000009296')
self.assertEqual(field.to_python('0000000000000000000000009296'), '0000000000000000000000009296')
self.assertEqual(field.to_python('af'), '00000000000000000000000000AF')
def test_validate_bad_input(self):
field = InsteonExtendedDataField('device')
# Too long
with self.assertRaises(FieldValidationException) as context:
field.to_python('00000000000000000000000092961')
# Non-hex characters
with self.assertRaises(FieldValidationException) as context:
field.to_python('0g')
class SendInsteonCommandAlertTest(unittest.TestCase):
"""
Test the send_insteon_command modular alert.
"""
def toInt(self, str_int):
if str_int is None:
return None
else:
return int(str_int)
def loadConfig(self, properties_file=None):
if properties_file is None:
properties_file = os.path.join( "..", "local.properties")
fp = open(properties_file)
regex = re.compile("(?P<key>[^=]+)[=](?P<value>.*)")
settings = {}
for l in fp.readlines():
r = regex.search(l)
if r is not None:
d = r.groupdict()
settings[ d["key"] ] = d["value"]
self.username = settings.get("value.test.insteon_hub.username", None)
self.password = settings.get("value.test.insteon_hub.password", None)
self.address = settings.get("value.test.insteon_hub.address", None)
self.port = self.toInt(settings.get("value.test.insteon_hub.port", 25105))
self.device = settings.get("value.test.insteon_hub.device", None)
def setUp(self):
self.loadConfig()
def is_configured(self):
if self.username is not None and self.password is not None and self.address is not None and self.port is not None and self.device is not None:
return True
else:
return False
"""
def test_send_command(self):
# Only perform this test if requested
if self.is_configured():
time.sleep(2)
insteon_alert = SendInsteonCommandAlert()
results = insteon_alert.call_insteon_web_api_repeatedly(self.address, self.port, self.username, self.password, self.device, "30", "01", 3)
self.assertEquals( results[0]['success'], True)
def test_execute(self):
# Only perform this test if requested
if self.is_configured():
time.sleep(2)
insteon_alert = SendInsteonCommandAlert()
in_stream = FakeInputStream()
input = {
"result": {
"_kv":"1",
"_raw": "something"
},
"configuration":{
'address' : self.address,
'port' : self.port,
'username' : self.username,
'password' : self.password,
'device' : self.device,
'command' : 'beep'
}
}
in_stream.setValue(json.dumps(input))
self.assertEquals(insteon_alert.execute(in_stream), 1)
def test_parse_raw_response_sd_command(self):
response = SendInsteonCommandAlert.parse_raw_response("02622C86260F15FF0602502C86262CB84E2F1900")
self.assertEqual(response['last_command'], "02622C86260F15FF")
self.assertEqual(response['last_command_cmd1'], "15")
self.assertEqual(response['last_command_cmd2'], "FF")
self.assertEqual(response['full_response'], "0602502C86262CB84E2F1900")
self.assertEqual(response['target_device'], "2C8626")
self.assertEqual(response['source_device'], "2CB84E")
self.assertEqual(response['cmd1'], "19")
self.assertEqual(response['cmd2'], "00")
"""
def test_get_response(self):
response = SendInsteonCommandAlert.call_insteon_web_api(self.address, self.port, self.username, self.password, self.device, '19', '02', True)
print response
self.assertEquals(response['full_response'][6:12], InsteonDeviceField.normalize_device_id(self.device, False))
class InsteonCommandFieldTest(unittest.TestCase):
"""
Test the InsteonCommandField that is used to convert a shortcut of a command into a real Insteon command.
"""
def test_validate_good_input(self):
ic_field = InsteonCommandField('command')
# Good input
self.assertEqual(ic_field.to_python('on').cmd1, '11')
self.assertEqual(ic_field.to_python('on').cmd2, 'FF')
def test_validate_ok_input_wrong_case(self):
ic_field = InsteonCommandField('command')
# Incorrect case, but acceptable input
self.assertEqual(ic_field.to_python('ON').cmd1, '11')
self.assertEqual(ic_field.to_python('OFF').cmd1, '13')
def test_validate_ok_input_extra_space(self):
ic_field = InsteonCommandField('command')
# Leading space
self.assertEqual(ic_field.to_python(' on').cmd1, '11')
# Trailing space
self.assertEqual(ic_field.to_python('on ').cmd1, '11')
def test_validate_bad_input(self):
ic_field = InsteonCommandField('command')
# Bad input
with self.assertRaises(FieldValidationException) as context:
ic_field.to_python('self_destruct')
def test_extended_commands(self):
ic_field = InsteonCommandField('command')
# For a command with an extended data section
self.assertEqual(ic_field.to_python('thermostat_info').cmd1, '2E')
self.assertEqual(ic_field.to_python('thermostat_info').cmd2, '02')
self.assertEqual(ic_field.to_python('thermostat_info').extended, True)
self.assertEqual(ic_field.to_python('thermostat_info').data, '0000000000000000000000009296')
# For a command without an extended data section
self.assertEqual(ic_field.to_python('ping').cmd1, '0F')
self.assertEqual(ic_field.to_python('ping').cmd2, '00')
self.assertEqual(ic_field.to_python('ping').extended, False)
self.assertEqual(ic_field.to_python('ping').data, None)
class InsteonDeviceFieldTest(unittest.TestCase):
"""
Test the InsteonDeviceField that is used to normalize an Insteon device ID.
"""
def test_validate_good_input(self):
device_field = InsteonDeviceField('device')
# Good input
self.assertEqual(device_field.to_python('56:78:9a'), '56789A')
self.assertEqual(device_field.to_python('56:78:9A'), '56789A')
self.assertEqual(device_field.to_python('56-78-9f'), '56789F')
self.assertEqual(device_field.to_python('56.78.9f'), '56789F')
self.assertEqual(device_field.to_python('56789f'), '56789F')
self.assertEqual(device_field.to_python('56.789f'), '56789F')
self.assertEqual(device_field.to_python('56.789f '), '56789F')
self.assertEqual(device_field.to_python(' 56.789f'), '56789F')
def test_validate_bad_input(self):
device_field = InsteonDeviceField('device')
# Not long enough
with self.assertRaises(FieldValidationException) as context:
device_field.to_python('56:78:9')
# Too long
with self.assertRaises(FieldValidationException) as context:
device_field.to_python('56:78:9a1')
# Non-hex characters
with self.assertRaises(FieldValidationException) as context:
device_field.to_python('56:78:9g')
class InsteonMultipleDeviceFieldTest(unittest.TestCase):
"""
Test the InsteonMultipleDeviceField that is used to normalize Insteon device IDs.
"""
def test_validate_good_input(self):
devices_field = InsteonMultipleDeviceField('device')
# Good input
self.assertEqual(devices_field.to_python('56:78:9a,56:78:9a'), set(['56789A']))
self.assertEqual(devices_field.to_python('56:78:9A'), set(['56789A']))
self.assertEqual(len(devices_field.to_python('56-78-9f,56-78-9a')), 2)
def test_validate_bad_input(self):
devices_field = InsteonMultipleDeviceField('device')
# Not long enough
with self.assertRaises(FieldValidationException) as context:
devices_field.to_python('56:78:9')
# Too long
with self.assertRaises(FieldValidationException) as context:
devices_field.to_python('56:78:9a,56:78:9a1')
# Non-hex characters
with self.assertRaises(FieldValidationException) as context:
devices_field.to_python('56:78:9g')
if __name__ == "__main__":
loader = unittest.TestLoader()
suites = []
suites.append(loader.loadTestsFromTestCase(ModularAlertTest))
suites.append(loader.loadTestsFromTestCase(SendInsteonCommandAlertTest))
suites.append(loader.loadTestsFromTestCase(InsteonCommandFieldTest))
suites.append(loader.loadTestsFromTestCase(InsteonDeviceFieldTest))
suites.append(loader.loadTestsFromTestCase(InsteonMultipleDeviceFieldTest))
suites.append(loader.loadTestsFromTestCase(IPAddressFieldTest))
suites.append(loader.loadTestsFromTestCase(InsteonExtendedDataFieldTest))
unittest.TextTestRunner(verbosity=2).run(unittest.TestSuite(suites))
|
|
import json
import re
import errno
from os.path import getmtime, join, exists
from urllib import urlencode
from ConfigParser import ConfigParser
from django.shortcuts import render_to_response
from django.http import QueryDict
from django.conf import settings
from django.contrib.auth import login, authenticate, logout
from django.contrib.staticfiles import finders
from django.utils.safestring import mark_safe
from graphite.compat import HttpResponse
from graphite.dashboard.models import Dashboard, Template
from graphite.render.views import renderView
from send_graph import send_graph_email
fieldRegex = re.compile(r'<([^>]+)>')
defaultScheme = {
'name' : 'Everything',
'pattern' : '<category>',
'fields' : [ dict(name='category', label='Category') ],
}
defaultUIConfig = {
'default_graph_width' : 400,
'default_graph_height' : 250,
'refresh_interval' : 60,
'autocomplete_delay' : 375,
'merge_hover_delay' : 700,
'theme' : 'default',
}
defaultKeyboardShortcuts = {
'toggle_toolbar' : 'ctrl-z',
'toggle_metrics_panel' : 'ctrl-space',
'erase_all_graphs' : 'alt-x',
'save_dashboard' : 'alt-s',
'completer_add_metrics' : 'alt-enter',
'completer_del_metrics' : 'alt-backspace',
'give_completer_focus' : 'shift-space',
}
ALL_PERMISSIONS = ['change', 'delete']
class DashboardConfig:
def __init__(self):
self.last_read = 0
self.schemes = [defaultScheme]
self.ui_config = defaultUIConfig.copy()
def check(self):
if getmtime(settings.DASHBOARD_CONF) > self.last_read:
self.load()
def load(self):
schemes = [defaultScheme]
parser = ConfigParser()
parser.read(settings.DASHBOARD_CONF)
for option, default_value in defaultUIConfig.items():
if parser.has_option('ui', option):
try:
self.ui_config[option] = parser.getint('ui', option)
except ValueError:
self.ui_config[option] = parser.get('ui', option)
else:
self.ui_config[option] = default_value
if parser.has_option('ui', 'automatic_variants'):
self.ui_config['automatic_variants'] = parser.getboolean('ui', 'automatic_variants')
else:
self.ui_config['automatic_variants'] = True
self.ui_config['keyboard_shortcuts'] = defaultKeyboardShortcuts.copy()
if parser.has_section('keyboard-shortcuts'):
self.ui_config['keyboard_shortcuts'].update( parser.items('keyboard-shortcuts') )
for section in parser.sections():
if section in ('ui', 'keyboard-shortcuts'):
continue
scheme = parser.get(section, 'scheme')
fields = []
for match in fieldRegex.finditer(scheme):
field = match.group(1)
if parser.has_option(section, '%s.label' % field):
label = parser.get(section, '%s.label' % field)
else:
label = field
fields.append({
'name' : field,
'label' : label
})
schemes.append({
'name' : section,
'pattern' : scheme,
'fields' : fields,
})
self.schemes = schemes
config = DashboardConfig()
def dashboard(request, name=None):
dashboard_conf_missing = False
try:
config.check()
except OSError as e:
if e.errno == errno.ENOENT:
dashboard_conf_missing = True
else:
raise
initialError = None
debug = request.GET.get('debug', False)
theme = request.GET.get('theme', config.ui_config['theme'])
css_file = finders.find('css/dashboard-%s.css' % theme)
if css_file is None:
initialError = "Invalid theme '%s'" % theme
theme = config.ui_config['theme']
context = {
'schemes_json': mark_safe(json.dumps(config.schemes)),
'ui_config_json': mark_safe(json.dumps(config.ui_config)),
'jsdebug': debug or settings.JAVASCRIPT_DEBUG,
'debug': debug,
'theme': theme,
'initialError': initialError,
'querystring': mark_safe(json.dumps(dict(request.GET.items()))),
'dashboard_conf_missing': dashboard_conf_missing,
'userName': '',
'permissions': mark_safe(json.dumps(getPermissions(request.user))),
'permissionsUnauthenticated': mark_safe(json.dumps(getPermissions(None)))
}
user = request.user
if user:
context['userName'] = user.username
if name is not None:
try:
dashboard = Dashboard.objects.get(name=name)
except Dashboard.DoesNotExist:
context['initialError'] = "Dashboard '%s' does not exist." % name
else:
context['initialState'] = dashboard.state
return render_to_response("dashboard.html", context)
def template(request, name, val):
template_conf_missing = False
try:
config.check()
except OSError, e:
if e.errno == errno.ENOENT:
template_conf_missing = True
else:
raise
initialError = None
debug = request.GET.get('debug', False)
theme = request.GET.get('theme', config.ui_config['theme'])
css_file = join(settings.CSS_DIR, 'dashboard-%s.css' % theme)
if not exists(css_file):
initialError = "Invalid theme '%s'" % theme
theme = config.ui_config['theme']
context = {
'schemes_json' : json.dumps(config.schemes),
'ui_config_json' : json.dumps(config.ui_config),
'jsdebug' : debug or settings.JAVASCRIPT_DEBUG,
'debug' : debug,
'theme' : theme,
'initialError' : initialError,
'querystring' : json.dumps( dict( request.GET.items() ) ),
'template_conf_missing' : template_conf_missing,
'userName': '',
'permissions': json.dumps(getPermissions(request.user)),
'permissionsUnauthenticated': json.dumps(getPermissions(None))
}
user = request.user
if user:
context['userName'] = user.username
try:
template = Template.objects.get(name=name)
except Template.DoesNotExist:
context['initialError'] = "Template '%s' does not exist." % name
else:
state = json.loads(template.loadState(val))
state['name'] = '%s/%s' % (name, val)
context['initialState'] = json.dumps(state)
return render_to_response("dashboard.html", context)
def getPermissions(user):
"""Return [change, delete] based on authorisation model and user privileges/groups"""
if user and not user.is_authenticated():
user = None
if not settings.DASHBOARD_REQUIRE_AUTHENTICATION:
return ALL_PERMISSIONS # don't require login
if not user:
return []
# from here on, we have a user
permissions = ALL_PERMISSIONS
if settings.DASHBOARD_REQUIRE_PERMISSIONS:
permissions = [permission for permission in ALL_PERMISSIONS if user.has_perm('dashboard.%s_dashboard' % permission)]
editGroup = settings.DASHBOARD_REQUIRE_EDIT_GROUP
if editGroup and len(user.groups.filter(name = editGroup)) == 0:
permissions = []
return permissions
def save(request, name):
if 'change' not in getPermissions(request.user):
return json_response( dict(error="Must be logged in with appropriate permissions to save") )
# Deserialize and reserialize as a validation step
state = str( json.dumps( json.loads( request.POST['state'] ) ) )
try:
dashboard = Dashboard.objects.get(name=name)
except Dashboard.DoesNotExist:
dashboard = Dashboard.objects.create(name=name, state=state)
else:
dashboard.state = state
dashboard.save();
return json_response( dict(success=True) )
def save_template(request, name, key):
if 'change' not in getPermissions(request.user):
return json_response( dict(error="Must be logged in with appropriate permissions to save the template") )
# Deserialize and reserialize as a validation step
state = str( json.dumps( json.loads( request.POST['state'] ) ) )
try:
template = Template.objects.get(name=name)
except Template.DoesNotExist:
template = Template.objects.create(name=name)
template.setState(state)
template.save()
else:
template.setState(state, key)
template.save();
return json_response( dict(success=True) )
def load(request, name):
try:
dashboard = Dashboard.objects.get(name=name)
except Dashboard.DoesNotExist:
return json_response( dict(error="Dashboard '%s' does not exist. " % name) )
return json_response( dict(state=json.loads(dashboard.state)) )
def load_template(request, name, val):
try:
template = Template.objects.get(name=name)
except Template.DoesNotExist:
return json_response( dict(error="Template '%s' does not exist. " % name) )
state = json.loads(template.loadState(val))
state['name'] = '%s/%s' % (name, val)
return json_response( dict(state=state) )
def delete(request, name):
if 'delete' not in getPermissions(request.user):
return json_response( dict(error="Must be logged in with appropriate permissions to delete") )
try:
dashboard = Dashboard.objects.get(name=name)
except Dashboard.DoesNotExist:
return json_response( dict(error="Dashboard '%s' does not exist. " % name) )
else:
dashboard.delete()
return json_response( dict(success=True) )
def delete_template(request, name):
if 'delete' not in getPermissions(request.user):
return json_response( dict(error="Must be logged in with appropriate permissions to delete the template") )
try:
template = Template.objects.get(name=name)
except Dashboard.DoesNotExist:
return json_response( dict(error="Template '%s' does not exist. " % name) )
else:
template.delete()
return json_response( dict(success=True) )
def find(request):
query = request.REQUEST['query']
query_terms = set( query.lower().split() )
results = []
# Find all dashboard names that contain each of our query terms as a substring
for dashboard in Dashboard.objects.all():
name = dashboard.name.lower()
if name.startswith('temporary-'):
continue
found = True # blank queries return everything
for term in query_terms:
if term in name:
found = True
else:
found = False
break
if found:
results.append( dict(name=dashboard.name) )
return json_response( dict(dashboards=results) )
def find_template(request):
query = request.REQUEST['query']
query_terms = set( query.lower().split() )
results = []
# Find all dashboard names that contain each of our query terms as a substring
for template in Template.objects.all():
name = template.name.lower()
found = True # blank queries return everything
for term in query_terms:
if term in name:
found = True
else:
found = False
break
if found:
results.append( dict(name=template.name) )
return json_response( dict(templates=results) )
def help(request):
context = {}
return render_to_response("dashboardHelp.html", context)
def email(request):
sender = request.POST['sender']
recipients = request.POST['recipients'].split()
subject = request.POST['subject']
message = request.POST['message']
# these need to be passed to the render function in an HTTP request.
graph_params = json.loads(request.POST['graph_params'], parse_int=str)
target = QueryDict(graph_params.pop('target'))
graph_params = QueryDict(urlencode(graph_params))
new_post = request.POST.copy()
new_post.update(graph_params)
new_post.update(target)
request.POST = new_post
resp = renderView(request)
img = resp.content
if img:
attachments = [('graph.png', img, 'image/png')]
send_graph_email(subject, sender, recipients, attachments, message)
return json_response(dict(success=True))
def create_temporary(request):
state = str( json.dumps( json.loads( request.POST['state'] ) ) )
i = 0
while True:
name = "temporary-%d" % i
try:
Dashboard.objects.get(name=name)
except Dashboard.DoesNotExist:
dashboard = Dashboard.objects.create(name=name, state=state)
break
else:
i += 1
return json_response( dict(name=dashboard.name) )
def json_response(obj):
return HttpResponse(content_type='application/json', content=json.dumps(obj))
def user_login(request):
response = dict(errors={}, text={}, success=False, permissions=[])
user = authenticate(username=request.POST['username'],
password=request.POST['password'])
if user is not None:
if user.is_active:
login(request, user)
response['success'] = True
response['permissions'].extend(getPermissions(user))
else:
response['errors']['reason'] = 'Account disabled.'
else:
response['errors']['reason'] = 'Username and/or password invalid.'
return json_response(response)
def user_logout(request):
response = dict(errors={}, text={}, success=True)
logout(request)
return json_response(response)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,unused-argument, not-context-manager
"""QNN dialect operators."""
from __future__ import absolute_import as _abs
import tvm
import tvm.ir
from tvm import relay
from tvm.relay.expr import Tuple, TupleWrapper
from tvm.relay.op.nn.utils import get_pad_tuple2d
from tvm.runtime import Object
from tvm.target import Target
from tvm.topi.nn.qnn import SQNN_DTYPE_TO_CODE
from tvm.topi.x86.utils import target_has_sse41
from ... import op as reg
from ...op import OpPattern
from . import _make, _requantize
@tvm._ffi.register_object("relay.qnn.op.RequantizeConfig")
class RequantizeConfig(Object):
"""Configure the requantization behavior by setting config variables.
Note
----
This object is backed by node system in C++, with arguments that can be
exchanged between python and C++.
Do not construct directly, use requantize_config instead.
The fields that are backed by the C++ node are immutable once an instance
is constructed. Use _node_defaults getters to get results for the fields.
"""
@staticmethod
def _get_node_default_rounding():
return "UPWARD"
@staticmethod
def _get_node_default_compute_dtype():
target = Target.current(True)
if target and str(target.kind) == "llvm" and target_has_sse41(target.mcpu):
return "float32"
return "int64"
_node_defaults = {
"rounding": _get_node_default_rounding.__func__,
"compute_dtype": _get_node_default_compute_dtype.__func__,
}
# pylint: disable=no-member
def __init__(self, handle):
"""Initialize the function with handle
Parameters
----------
handle : SymbolHandle
the handle to the underlying C++ Symbol
"""
super(RequantizeConfig, self).__init__(handle)
self.handle = handle
def __enter__(self):
# pylint: disable=protected-access
_requantize._EnterRequantizeConfigScope(self)
return self
def __exit__(self, ptype, value, trace):
_requantize._ExitRequantizeConfigScope()
def __setattr__(self, name, value):
if name in RequantizeConfig._node_defaults:
raise AttributeError("'%s' object cannot set attribute '%s'" % (str(type(self)), name))
return super(RequantizeConfig, self).__setattr__(name, value)
def current_requantize_config():
"""Get the current requantization configuration."""
return _requantize._GetCurrentRequantizeConfig()
def requantize_config(**kwargs):
"""Configure the requantization behavior by setting config variables.
Parameters
---------
rounding: "UPWARD" or "TONEAREST"
Rounding direction for fixed point multiplications.
compute_dtype:
Specifies the data type used during requantize.
Supported options: \"int64\", \"float32\", \"float64\"
Returns
-------
config: RequantizeConfig
The requantization configuration
"""
node_args = {
k: v() if k not in kwargs else kwargs[k] for k, v in RequantizeConfig._node_defaults.items()
}
return tvm.ir.make_node("relay.qnn.op.RequantizeConfig", **node_args)
def requantize(
data,
input_scale,
input_zero_point,
output_scale,
output_zero_point,
axis=-1,
rounding="None",
compute_dtype="None",
out_dtype="int8",
):
r"""Requantized operator.
The requantize operator converts one quantized tensor representation to
another quantized tensor representation. For the output tensor, we are
provided with output scale and zero point. The computation is as follows
Q_output = zp_output + (scale_input)/(scale_output) * (Q_input - zp_input)
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
input_scale: tvm.relay.Expr
The quantization scale for the input tensor.
input_zero_point: tvm.relay.Expr
The zero point of the input tensor.
output_scale: tvm.relay.Expr
The quantization scale for the output tensor.
output_zero_point: tvm.relay.Expr
The zero point of the output tensor.
axis : int
The channel axis for quantization. Default value is -1 which corresponds to the last axis.
rounding : string, optional
Defines the rounding direction when the value is midway between two
representable values.
compute_dtype:
Specifies the data type used during requantize.
Supported options: \"int64\", \"float32\", \"float64\"
out_dtype : str, optional
Specifies the output data type.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.requantize(
data,
input_scale,
input_zero_point,
output_scale,
output_zero_point,
axis,
rounding,
compute_dtype,
out_dtype,
)
def quantize(data, output_scale, output_zero_point, axis=-1, out_dtype="int8"):
r"""Quantize op
This operator takes float32 as input and produces quantized int8 or unit8 as output.
The input tensor can be of any shape. The output shape is the same as input shape.
Q_output = clamp((round(input_tensor/output_scale) + output_zero_point),
out_dtype::min,
out_dtype::max)
Parameters
----------
data : tvm.relay.Expr
The input tensor to be quantized. Can be of type float32.
output_scale : tvm.relay.Expr
The output scale.
output_zero_point : tvm.relay.Expr
The output zero_point.
axis : int
The channel axis for quantization. Default value is -1 which corresponds to the last axis.
out_dtype : str, optional
The data type of the input tensor. Can be [int8, uint8, int32]
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.quantize(data, output_scale, output_zero_point, axis, out_dtype)
def simulated_quantize(data, output_scale, output_zero_point, axis=-1, out_dtype="int8"):
r"""Simulated Quantize op
Mimics the quantize op but has more flexibility in valid inputs and always
outputs the same type as the input. This can be useful for
calibrating or training a quantized network.
Parameters
----------
data : tvm.relay.Expr
The input tensor to be quantized. Can be of type float32.
out_dtype : string or tvm.relay.Expr
A string or tensor indicating which datatype to quantize to.
output_scale : tvm.relay.Expr
The output scale.
output_zero_point : tvm.relay.Expr
The output zero_point.
axis : int
The channel axis for quantization. Default value is -1 which corresponds to the last axis.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
# Convert string dtype to a constant if needed.
if isinstance(out_dtype, str):
type_code = SQNN_DTYPE_TO_CODE[out_dtype]
out_dtype = relay.const(type_code, dtype="int32")
# Wrap reshapes around qnn parameter tensors to guarantee shape compatibility.
output_scale = relay.op.reshape(output_scale, [-1])
output_zero_point = relay.op.reshape(output_zero_point, [-1])
return _make.simulated_quantize(data, out_dtype, output_scale, output_zero_point, axis)
def dequantize(data, input_scale, input_zero_point, axis=-1):
r"""Dequantize op
This operator takes quantized int8 and unit8 as input and produces
dequantized float32 as output. The output shape is the same as input shape. The input
tensor can be of any shape.
Parameters
----------
data : tvm.relay.Expr
The input tensor to be dequantized. Can be of type [int8, uint8, int32].
input_scale : tvm.relay.Expr
The input scale.
input_zero_point : tvm.relay.Expr
The input zero_point.
axis : int
The channel axis for quantization. Default value is -1 which corresponds to the last axis.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.dequantize(data, input_scale, input_zero_point, axis)
def simulated_dequantize(data, input_scale, input_zero_point, axis=-1, in_dtype="int8"):
r"""Simulated Dequantize op
Mimics the dequantize op but has more flexibility in valid inputs and always
outputs the same type as the input. This can be useful for calibrating or
training a quantized network.
Parameters
----------
data : tvm.relay.Expr
The input tensor to be dequantized.
in_dtype : string or tvm.relay.Expr
A string or tensor indicating which datatype to dequantize from.
input_scale : tvm.relay.Expr
The input scale.
input_zero_point : tvm.relay.Expr
The input zero_point.
axis : int
The channel axis for quantization. Default value is -1 which corresponds to the last axis.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
# Convert string dtype to a constant if needed.
if isinstance(in_dtype, str):
type_code = SQNN_DTYPE_TO_CODE[in_dtype]
in_dtype = relay.const(type_code, dtype="int32")
# Wrap reshapes around qnn parameter tensors to guarantee shape compatibility.
input_scale = relay.op.reshape(input_scale, [-1])
input_zero_point = relay.op.reshape(input_zero_point, [-1])
return _make.simulated_dequantize(data, in_dtype, input_scale, input_zero_point, axis)
def concatenate(data, input_scales, input_zero_points, output_scale, output_zero_point, axis):
"""Concatenate the quantized input tensors along the given axis.
Parameters
----------
data : Union(List[relay.Expr], Tuple[relay.Expr], TupleWrapper[relay.Expr])
The list of quantized tensors.
input_scales : List[relay.Expr]
The list of scales of input quantized tensors.
input_zero_points : List[relay.Expr]
The list of zero points of input quantized tensors.
output_scale : relay.Expr
The scale of the output quantized tensor.
output_zero_point : relay.Expr
The zero point of the output quantized tensor.
axis : int
The axis along which the tensors are concatenated.
Returns
-------
result: relay.Expr
The concatenated quantized tensor.
"""
if isinstance(data, (list, tuple)):
data = Tuple(data)
elif isinstance(data, TupleWrapper):
data = data.tuple_value
if not isinstance(axis, int):
raise ValueError("For now, we only support integer axis")
input_scales = list(input_scales)
input_zero_points = list(input_zero_points)
return _make.concatenate(
data, Tuple(input_scales), Tuple(input_zero_points), output_scale, output_zero_point, axis
)
def conv2d(
data,
kernel,
input_zero_point,
kernel_zero_point,
input_scale,
kernel_scale,
kernel_size,
channels,
strides=(1, 1),
padding=(0, 0),
dilation=(1, 1),
groups=1,
data_layout="NCHW",
kernel_layout="OIHW",
out_layout="",
out_dtype="int32",
):
r"""Quantized 2D convolution.
This operator convolves quantized data with quantized kernel.
If doing Per-channel quantization, qnn expects the kernel_zero_scale
and optionally the kernel_zero_point will be 1-D vectors instead of scalars.
The scale of the output quantized tensor is the product of the kernel_scale and
input_scale of the input quantized tensors. The zero point of the output
quantized tensor is 0. By default, the dtype of output is int32. Please also
refer to Requantize operator to understand how to scale back the int32
output to (u)int8.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
kernel : tvm.relay.Expr
The kernel expressions.
input_zero_point: tvm.relay.Expr
The zero point of the data distribution.
kernel_zero_point: tvm.relay.Expr
The zero point of the quantized_kernel distribution.
input_scale: tvm.relay.Expr
The scale for the input tensor. The scale for the input tensor is
stored purely for convenience here. See more commentary below.
kernel_scale: tvm.relay.Expr
The scale for the weight tensor. The scale for the weight tensor is
stored for access to this during relay. This information is not
needed in the pass pipeline after qnn.conv2d is lowered to the
sequence of steps as in nn.conv2d. See also input_scale in Requantize.
kernel_size : tuple of int
The spatial width and height of the convolution kernel.
channels : int
Number of output channels of this convolution.
strides : tuple of int, optional
The strides of convolution.
padding : tuple of int, optional
The padding of convolution on both sides of inputs before convolution.
dilation : tuple of int, optional
Specifies the dilation rate to be used for dilated convolution.
groups : int, optional
Number of groups for grouped convolution.
data_layout : str, optional
Layout of the input.
kernel_layout : str, optional
Layout of the kernel.
out_layout : str, optional
Layout of the output, by default, out_layout is the same as data_layout
out_dtype : str, optional
Specifies the output data type for mixed precision conv2d.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
# TODO enforce 4-way padding in topi/nn/conv2d after #4644 merged
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.conv2d(
data,
kernel,
input_zero_point,
kernel_zero_point,
input_scale,
kernel_scale,
strides,
padding,
dilation,
groups,
channels,
kernel_size,
data_layout,
kernel_layout,
out_layout,
out_dtype,
)
def conv2d_transpose(
data,
weight,
input_zero_point,
kernel_zero_point,
input_scale,
kernel_scale,
strides=(1, 1),
padding=(0, 0),
dilation=(1, 1),
groups=1,
channels=None,
kernel_size=None,
data_layout="NCHW",
kernel_layout="IOHW",
out_layout="",
output_padding=(0, 0),
out_dtype="int32",
):
"""This operator deconvolves quantized data with quantized kernel. The scale of
the output quantized tensor is the product of the kernel_scale and
input_scale of the input quantized tensors. The zero point of the output
quantized tensor is 0. By default, the dtype of output is int32. Please also
refer to Requantize operator to understand how to scale back the int32
output to (u)int8.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
weight : tvm.relay.Expr
The weight expressions.
input_zero_point: tvm.relay.Expr
The zero point of the data distribution.
kernel_zero_point: tvm.relay.Expr
The zero point of the quantized_kernel distribution.
input_scale: tvm.relay.Expr
The scale for the input tensor. The scale for the input tensor is
stored purely for convenience here. See more commentary below.
kernel_scale: tvm.relay.Expr
The scale for the weight tensor. The scale for the weight tensor is
stored for access to this during relay. This information is not
needed in the pass pipeline after qnn.conv2d_transpose is lowered to the
sequence of steps as in nn.conv2d_transpose. See also input_scale in Requantize.
strides : Tuple[int], optional
The strides of convolution.
padding : Tuple[int], optional
The padding of convolution.
dilation : Tuple[int], optional
Specifies the dilation rate to be used for dilated convolution.
channels : int, optional
Number of output channels of this convolution.
kernel_size : tuple of int, optional
The spatial dimensions of the convolution kernel.
groups : int, optional
Number of groups for grouped convolution.
data_layout : str, optional
Layout of the input.
kernel_layout : str, optional
Layout of the weight.
out_layout : Optional[str]
Layout of the output, by default, out_layout is the same as data_layout
output_padding : Tuple[int], optional
Used to identify the padding within the output shape
(only used in training, where transpose_conv represents the gradient of a convolution )
out_dtype : str, optional
Specifies the output data type for mixed precision conv2d.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.conv2d_transpose(
data,
weight,
input_zero_point,
kernel_zero_point,
input_scale,
kernel_scale,
strides,
padding,
dilation,
groups,
channels,
kernel_size,
data_layout,
kernel_layout,
out_layout,
output_padding,
out_dtype,
)
def add(
lhs, rhs, lhs_scale, lhs_zero_point, rhs_scale, rhs_zero_point, output_scale, output_zero_point
):
"""Quantized addition with numpy-style broadcasting.
Parameters
----------
lhs : relay.Expr
The left hand side quantized input data.
rhs : relay.Expr
The right hand side quantized input data.
lhs_scale: relay.Expr
The scale of the lhs quantized expr.
lhs_zero_point: relay.Expr
The zero point of lhs quantized expr.
rhs_scale: relay.Expr
The scale of the rhs quantized expr.
rhs_zero_point: relay.Expr
The zero point of rhs quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.add(
lhs,
rhs,
lhs_scale,
lhs_zero_point,
rhs_scale,
rhs_zero_point,
output_scale,
output_zero_point,
)
def dense(
data,
weight,
input_zero_point,
kernel_zero_point,
input_scale,
kernel_scale,
units,
out_dtype="int32",
):
"""Qnn Dense operator.
Applies a quantized linear transformation
.. math::
`Y = X * W`
If doing Per-channel quantization, qnn expects the kernel_zero_scale
and optionally the kernel_zero_point will be 1-D vectors instead of scalars.
Parameters
----------
data : tvm.relay.Expr
The quantized input data to the operator.
weight : tvm.relay.Expr
The quantized weight expressions.
input_zero_point: tvm.relay.Expr
The input zero point.
kernel_zero_point: tvm.relay.Expr
The kernel zero point.
input_scale: tvm.relay.Expr
The scale for the input tensor.
kernel_scale: tvm.relay.Expr
The scale for the weight tensor. The scale for the weight tensor is
stored for access to this during relay. This information is not
needed in the pass pipeline after qnn.conv2d is lowered to the
sequence of steps as in nn.conv2d. See also input_scale in Requantize.
units : int
Number of hidden units of the dense transformation.
out_dtype : str, optional
Specifies the output data type for mixed precision dense can be int32 or int16.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.dense(
data,
weight,
input_zero_point,
kernel_zero_point,
input_scale,
kernel_scale,
units,
out_dtype,
)
def mul(
lhs, rhs, lhs_scale, lhs_zero_point, rhs_scale, rhs_zero_point, output_scale, output_zero_point
):
"""Quantized multiplication with numpy-style broadcasting.
Parameters
----------
lhs : relay.Expr
The left hand side quantized input data.
rhs : relay.Expr
The right hand side quantized input data.
lhs_scale: relay.Expr
The scale of the lhs quantized expr.
lhs_zero_point: relay.Expr
The zero point of lhs quantized expr.
rhs_scale: relay.Expr
The scale of the rhs quantized expr.
rhs_zero_point: relay.Expr
The zero point of rhs quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.mul(
lhs,
rhs,
lhs_scale,
lhs_zero_point,
rhs_scale,
rhs_zero_point,
output_scale,
output_zero_point,
)
def tanh(x, scale, zero_point, output_scale, output_zero_point):
"""Quantized tanh.
Parameters
----------
x : relay.Expr
The quantized input tensor.
scale: relay.Expr
The scale of the quantized expr.
zero_point: relay.Expr
The zero point of quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.tanh(
x,
scale,
zero_point,
output_scale,
output_zero_point,
)
def exp(x, scale, zero_point, output_scale, output_zero_point):
"""Quantized exponential function.
Parameters
----------
x : relay.Expr
The quantized input tensor.
scale: relay.Expr
The scale of the quantized expr.
zero_point: relay.Expr
The zero point of quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.exp(
x,
scale,
zero_point,
output_scale,
output_zero_point,
)
def sqrt(x, scale, zero_point, output_scale, output_zero_point):
"""Quantized square root.
Parameters
----------
x : relay.Expr
The quantized input tensor.
scale: relay.Expr
The scale of the quantized expr.
zero_point: relay.Expr
The zero point of quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.sqrt(
x,
scale,
zero_point,
output_scale,
output_zero_point,
)
def rsqrt(x, scale, zero_point, output_scale, output_zero_point):
"""Quantized reciprocal square root.
Parameters
----------
x : relay.Expr
The quantized input tensor.
scale: relay.Expr
The scale of the quantized expr.
zero_point: relay.Expr
The zero point of quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.rsqrt(
x,
scale,
zero_point,
output_scale,
output_zero_point,
)
def erf(x, scale, zero_point, output_scale, output_zero_point):
"""Quantized error function.
Parameters
----------
x : relay.Expr
The quantized input tensor.
scale: relay.Expr
The scale of the quantized expr.
zero_point: relay.Expr
The zero point of quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.erf(
x,
scale,
zero_point,
output_scale,
output_zero_point,
)
def sigmoid(x, scale, zero_point, output_scale, output_zero_point):
"""Quantized sigmoid.
Parameters
----------
x : relay.Expr
The quantized input tensor.
scale: relay.Expr
The scale of the quantized expr.
zero_point: relay.Expr
The zero point of quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.sigmoid(
x,
scale,
zero_point,
output_scale,
output_zero_point,
)
def subtract(
lhs, rhs, lhs_scale, lhs_zero_point, rhs_scale, rhs_zero_point, output_scale, output_zero_point
):
"""Quantized subtraction with numpy-style broadcasting.
Parameters
----------
lhs : relay.Expr
The left hand side quantized input data.
rhs : relay.Expr
The right hand side quantized input data.
lhs_scale: relay.Expr
The scale of the lhs quantized expr.
lhs_zero_point: relay.Expr
The zero point of lhs quantized expr.
rhs_scale: relay.Expr
The scale of the rhs quantized expr.
rhs_zero_point: relay.Expr
The zero point of rhs quantized expr.
output_scale: relay.Expr
The scale of the output quantized expr.
output_zero_point: relay.Expr
The zero point of output quantized expr.
Returns
-------
result : relay.Expr
The computed result.
"""
return _make.subtract(
lhs,
rhs,
lhs_scale,
lhs_zero_point,
rhs_scale,
rhs_zero_point,
output_scale,
output_zero_point,
)
def batch_matmul(x, y, x_zero_point, y_zero_point, x_scale, y_scale, out_dtype="int32"):
r"""
Computes batch matrix multiplication of `x` and `y` when `x` and `y` are data
in batch.
.. math::
\mbox{batch_matmul}(x, y)[i, :, :] = \mbox{matmul}(x[i, :, :], y[i, :, :]^T)
Parameters
----------
x : tvm.relay.Expr
The first quantized input.
A quantized tensor is represented in following manner
`A = scale_a x (QA - zp_A)`
where QA is quantized tensor, scale_a and zp_A are quantization
params.
y : tvm.relay.Expr
The second quantized input.
x_zero_point: tvm.relay.Expr
The first input zero point.
y_zero_point: tvm.relay.Expr
The second input zero point.
x_scale: tvm.relay.Expr
The scale for the first input tensor.
y_scale: tvm.relay.Expr
The scale for the second input tensor.
out_dtype : str, optional
Specifies the output data type for mixed precision dense can be int32 or int16.
Returns
-------
result: tvm.relay.Expr
The computed result.
"""
return _make.batch_matmul(x, y, x_zero_point, y_zero_point, x_scale, y_scale, out_dtype)
# register fuse pattern for qnn ops
reg.register_pattern("qnn.quantize", OpPattern.OPAQUE)
reg.register_pattern("qnn.dequantize", OpPattern.OPAQUE)
|
|
from __future__ import absolute_import
import logging
from typing import Any, Dict, List, Set, Tuple, Optional, Text
from django.contrib.auth.backends import RemoteUserBackend
from django.conf import settings
from django.http import HttpResponse
import django.contrib.auth
from django_auth_ldap.backend import LDAPBackend, _LDAPUser
from zerver.lib.actions import do_create_user
from zerver.models import UserProfile, Realm, get_user_profile_by_id, \
get_user_profile_by_email, remote_user_to_email, email_to_username, \
get_realm, get_realm_by_email_domain
from apiclient.sample_tools import client as googleapiclient
from oauth2client.crypt import AppIdentityError
from social_core.backends.github import GithubOAuth2, GithubOrganizationOAuth2, \
GithubTeamOAuth2
from social_core.exceptions import AuthFailed, SocialAuthBaseException
from django.contrib.auth import authenticate
from zerver.lib.users import check_full_name
from zerver.lib.request import JsonableError
from zerver.lib.utils import check_subdomain, get_subdomain
def pad_method_dict(method_dict):
# type: (Dict[Text, bool]) -> Dict[Text, bool]
"""Pads an authentication methods dict to contain all auth backends
supported by the software, regardless of whether they are
configured on this server"""
for key in AUTH_BACKEND_NAME_MAP:
if key not in method_dict:
method_dict[key] = False
return method_dict
def auth_enabled_helper(backends_to_check, realm):
# type: (List[Text], Optional[Realm]) -> bool
if realm is not None:
enabled_method_dict = realm.authentication_methods_dict()
pad_method_dict(enabled_method_dict)
else:
enabled_method_dict = dict((method, True) for method in Realm.AUTHENTICATION_FLAGS)
pad_method_dict(enabled_method_dict)
for supported_backend in django.contrib.auth.get_backends():
for backend_name in backends_to_check:
backend = AUTH_BACKEND_NAME_MAP[backend_name]
if enabled_method_dict[backend_name] and isinstance(supported_backend, backend):
return True
return False
def ldap_auth_enabled(realm=None):
# type: (Optional[Realm]) -> bool
return auth_enabled_helper([u'LDAP'], realm)
def email_auth_enabled(realm=None):
# type: (Optional[Realm]) -> bool
return auth_enabled_helper([u'Email'], realm)
def password_auth_enabled(realm=None):
# type: (Optional[Realm]) -> bool
return ldap_auth_enabled(realm) or email_auth_enabled(realm)
def dev_auth_enabled(realm=None):
# type: (Optional[Realm]) -> bool
return auth_enabled_helper([u'Dev'], realm)
def google_auth_enabled(realm=None):
# type: (Optional[Realm]) -> bool
return auth_enabled_helper([u'Google'], realm)
def github_auth_enabled(realm=None):
# type: (Optional[Realm]) -> bool
return auth_enabled_helper([u'GitHub'], realm)
def common_get_active_user_by_email(email, return_data=None):
# type: (Text, Optional[Dict[str, Any]]) -> Optional[UserProfile]
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
return None
if not user_profile.is_active:
if return_data is not None:
return_data['inactive_user'] = True
return None
if user_profile.realm.deactivated:
if return_data is not None:
return_data['inactive_realm'] = True
return None
return user_profile
class ZulipAuthMixin(object):
def get_user(self, user_profile_id):
# type: (int) -> Optional[UserProfile]
""" Get a UserProfile object from the user_profile_id. """
try:
return get_user_profile_by_id(user_profile_id)
except UserProfile.DoesNotExist:
return None
class SocialAuthMixin(ZulipAuthMixin):
auth_backend_name = None # type: Text
def get_email_address(self, *args, **kwargs):
# type: (*Any, **Any) -> Text
raise NotImplementedError
def get_full_name(self, *args, **kwargs):
# type: (*Any, **Any) -> Text
raise NotImplementedError
def authenticate(self, *args, **kwargs):
# type: (*Any, **Any) -> Optional[UserProfile]
return_data = kwargs.get('return_data', {})
email_address = self.get_email_address(*args, **kwargs)
if not email_address:
return None
try:
user_profile = get_user_profile_by_email(email_address)
except UserProfile.DoesNotExist:
return_data["valid_attestation"] = True
return None
if not user_profile.is_active:
return_data["inactive_user"] = True
return None
if user_profile.realm.deactivated:
return_data["inactive_realm"] = True
return None
if not check_subdomain(kwargs.get("realm_subdomain"),
user_profile.realm.subdomain):
return_data["invalid_subdomain"] = True
return None
if not auth_enabled_helper([self.auth_backend_name], user_profile.realm):
return_data["auth_backend_disabled"] = True
return None
return user_profile
def process_do_auth(self, user_profile, *args, **kwargs):
# type: (UserProfile, *Any, **Any) -> Optional[HttpResponse]
# This function needs to be imported from here due to the cyclic
# dependency.
from zerver.views.auth import (login_or_register_remote_user,
redirect_to_subdomain_login_url)
from zerver.views.registration import redirect_and_log_into_subdomain
return_data = kwargs.get('return_data', {})
inactive_user = return_data.get('inactive_user')
inactive_realm = return_data.get('inactive_realm')
invalid_subdomain = return_data.get('invalid_subdomain')
if inactive_user or inactive_realm:
return None
strategy = self.strategy # type: ignore # This comes from Python Social Auth.
request = strategy.request
email_address = self.get_email_address(*args, **kwargs)
full_name = self.get_full_name(*args, **kwargs)
subdomain = strategy.session_get('subdomain')
if not subdomain:
return login_or_register_remote_user(request, email_address,
user_profile, full_name,
bool(invalid_subdomain))
try:
realm = Realm.objects.get(string_id=subdomain)
except Realm.DoesNotExist:
return redirect_to_subdomain_login_url()
return redirect_and_log_into_subdomain(realm, full_name, email_address)
def auth_complete(self, *args, **kwargs):
# type: (*Any, **Any) -> Optional[HttpResponse]
try:
# Call the auth_complete method of BaseOAuth2 is Python Social Auth
return super(SocialAuthMixin, self).auth_complete(*args, **kwargs) # type: ignore
except AuthFailed:
return None
except SocialAuthBaseException as e:
logging.exception(e)
return None
class ZulipDummyBackend(ZulipAuthMixin):
"""
Used when we want to log you in but we don't know which backend to use.
"""
def authenticate(self, username=None, realm_subdomain=None, use_dummy_backend=False,
return_data=None):
# type: (Optional[Text], Optional[Text], bool, Optional[Dict[str, Any]]) -> Optional[UserProfile]
if use_dummy_backend:
user_profile = common_get_active_user_by_email(username)
if user_profile is None:
return None
if not check_subdomain(realm_subdomain, user_profile.realm.subdomain):
return_data["invalid_subdomain"] = True
return None
return user_profile
return None
class EmailAuthBackend(ZulipAuthMixin):
"""
Email Authentication Backend
Allows a user to sign in using an email/password pair rather than
a username/password pair.
"""
def authenticate(self, username=None, password=None, realm_subdomain=None, return_data=None):
# type: (Optional[Text], Optional[str], Optional[Text], Optional[Dict[str, Any]]) -> Optional[UserProfile]
""" Authenticate a user based on email address as the user name. """
if username is None or password is None:
# Return immediately. Otherwise we will look for a SQL row with
# NULL username. While that's probably harmless, it's needless
# exposure.
return None
user_profile = common_get_active_user_by_email(username, return_data=return_data)
if user_profile is None:
return None
if not password_auth_enabled(user_profile.realm):
if return_data is not None:
return_data['password_auth_disabled'] = True
return None
if not email_auth_enabled(user_profile.realm):
if return_data is not None:
return_data['email_auth_disabled'] = True
return None
if user_profile.check_password(password):
if not check_subdomain(realm_subdomain, user_profile.realm.subdomain):
return_data["invalid_subdomain"] = True
return None
return user_profile
return None
class GoogleMobileOauth2Backend(ZulipAuthMixin):
"""
Google Apps authentication for mobile devices
Allows a user to sign in using a Google-issued OAuth2 token.
Ref:
https://developers.google.com/+/mobile/android/sign-in#server-side_access_for_your_app
https://developers.google.com/accounts/docs/CrossClientAuth#offlineAccess
"""
def authenticate(self, google_oauth2_token=None, realm_subdomain=None, return_data={}):
# type: (Optional[str], Optional[Text], Dict[str, Any]) -> Optional[UserProfile]
try:
token_payload = googleapiclient.verify_id_token(google_oauth2_token, settings.GOOGLE_CLIENT_ID)
except AppIdentityError:
return None
if token_payload["email_verified"] in (True, "true"):
try:
user_profile = get_user_profile_by_email(token_payload["email"])
except UserProfile.DoesNotExist:
return_data["valid_attestation"] = True
return None
if not user_profile.is_active:
return_data["inactive_user"] = True
return None
if user_profile.realm.deactivated:
return_data["inactive_realm"] = True
return None
if not check_subdomain(realm_subdomain, user_profile.realm.subdomain):
return_data["invalid_subdomain"] = True
return None
if not google_auth_enabled(realm=user_profile.realm):
return_data["google_auth_disabled"] = True
return None
return user_profile
else:
return_data["valid_attestation"] = False
return None
class ZulipRemoteUserBackend(RemoteUserBackend):
create_unknown_user = False
def authenticate(self, remote_user, realm_subdomain=None):
# type: (str, Optional[Text]) -> Optional[UserProfile]
if not remote_user:
return None
email = remote_user_to_email(remote_user)
user_profile = common_get_active_user_by_email(email)
if user_profile is None:
return None
if not check_subdomain(realm_subdomain, user_profile.realm.subdomain):
return None
if not auth_enabled_helper([u"RemoteUser"], user_profile.realm):
return None
return user_profile
class ZulipLDAPException(Exception):
pass
class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
# Don't use Django LDAP's permissions functions
def has_perm(self, user, perm, obj=None):
# type: (UserProfile, Any, Any) -> bool
# Using Any type is safe because we are not doing anything with
# the arguments.
return False
def has_module_perms(self, user, app_label):
# type: (UserProfile, str) -> bool
return False
def get_all_permissions(self, user, obj=None):
# type: (UserProfile, Any) -> Set
# Using Any type is safe because we are not doing anything with
# the arguments.
return set()
def get_group_permissions(self, user, obj=None):
# type: (UserProfile, Any) -> Set
# Using Any type is safe because we are not doing anything with
# the arguments.
return set()
def django_to_ldap_username(self, username):
# type: (Text) -> Text
if settings.LDAP_APPEND_DOMAIN:
if not username.endswith("@" + settings.LDAP_APPEND_DOMAIN):
raise ZulipLDAPException("Username does not match LDAP domain.")
return email_to_username(username)
return username
def ldap_to_django_username(self, username):
# type: (str) -> str
if settings.LDAP_APPEND_DOMAIN:
return "@".join((username, settings.LDAP_APPEND_DOMAIN))
return username
class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase):
def authenticate(self, username, password, realm_subdomain=None, return_data=None):
# type: (Text, str, Optional[Text], Optional[Dict[str, Any]]) -> Optional[UserProfile]
try:
if settings.REALMS_HAVE_SUBDOMAINS:
self._realm = get_realm(realm_subdomain)
else:
self._realm = get_realm_by_email_domain(username)
username = self.django_to_ldap_username(username)
user_profile = ZulipLDAPAuthBackendBase.authenticate(self, username, password)
if user_profile is None:
return None
if not check_subdomain(realm_subdomain, user_profile.realm.subdomain):
return None
return user_profile
except Realm.DoesNotExist:
return None
except ZulipLDAPException:
return None
def get_or_create_user(self, username, ldap_user):
# type: (str, _LDAPUser) -> Tuple[UserProfile, bool]
try:
user_profile = get_user_profile_by_email(username)
if not user_profile.is_active or user_profile.realm.deactivated:
raise ZulipLDAPException("Realm has been deactivated")
if not ldap_auth_enabled(user_profile.realm):
raise ZulipLDAPException("LDAP Authentication is not enabled")
return user_profile, False
except UserProfile.DoesNotExist:
# No need to check for an inactive user since they don't exist yet
if self._realm.deactivated:
raise ZulipLDAPException("Realm has been deactivated")
full_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["full_name"]
short_name = full_name = ldap_user.attrs[full_name_attr][0]
try:
full_name = check_full_name(full_name)
except JsonableError as e:
raise ZulipLDAPException(e.error)
if "short_name" in settings.AUTH_LDAP_USER_ATTR_MAP:
short_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["short_name"]
short_name = ldap_user.attrs[short_name_attr][0]
user_profile = do_create_user(username, None, self._realm, full_name, short_name)
return user_profile, True
# Just like ZulipLDAPAuthBackend, but doesn't let you log in.
class ZulipLDAPUserPopulator(ZulipLDAPAuthBackendBase):
def authenticate(self, username, password, realm_subdomain=None):
# type: (Text, str, Optional[Text]) -> None
return None
class DevAuthBackend(ZulipAuthMixin):
# Allow logging in as any user without a password.
# This is used for convenience when developing Zulip.
def authenticate(self, username, realm_subdomain=None, return_data=None):
# type: (Text, Optional[Text], Optional[Dict[str, Any]]) -> Optional[UserProfile]
user_profile = common_get_active_user_by_email(username, return_data=return_data)
if user_profile is None:
return None
if not dev_auth_enabled(user_profile.realm):
return None
return user_profile
class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2):
auth_backend_name = u"GitHub"
def get_email_address(self, *args, **kwargs):
# type: (*Any, **Any) -> Optional[Text]
try:
return kwargs['response']['email']
except KeyError:
return None
def get_full_name(self, *args, **kwargs):
# type: (*Any, **Any) -> Text
# In case of any error return an empty string. Name is used by
# the registration page to pre-populate the name field. However,
# if it is not supplied, our registration process will make sure
# that the user enters a valid name.
try:
name = kwargs['response']['name']
except KeyError:
name = ''
if name is None:
return ''
return name
def do_auth(self, *args, **kwargs):
# type: (*Any, **Any) -> Optional[HttpResponse]
kwargs['return_data'] = {}
request = self.strategy.request
kwargs['realm_subdomain'] = get_subdomain(request)
user_profile = None
team_id = settings.SOCIAL_AUTH_GITHUB_TEAM_ID
org_name = settings.SOCIAL_AUTH_GITHUB_ORG_NAME
if (team_id is None and org_name is None):
try:
user_profile = GithubOAuth2.do_auth(self, *args, **kwargs)
except AuthFailed:
logging.info("User authentication failed.")
user_profile = None
elif (team_id):
backend = GithubTeamOAuth2(self.strategy, self.redirect_uri)
try:
user_profile = backend.do_auth(*args, **kwargs)
except AuthFailed:
logging.info("User is not member of GitHub team.")
user_profile = None
elif (org_name):
backend = GithubOrganizationOAuth2(self.strategy, self.redirect_uri)
try:
user_profile = backend.do_auth(*args, **kwargs)
except AuthFailed:
logging.info("User is not member of GitHub organization.")
user_profile = None
return self.process_do_auth(user_profile, *args, **kwargs)
AUTH_BACKEND_NAME_MAP = {
u'Dev': DevAuthBackend,
u'Email': EmailAuthBackend,
u'GitHub': GitHubAuthBackend,
u'Google': GoogleMobileOauth2Backend,
u'LDAP': ZulipLDAPAuthBackend,
u'RemoteUser': ZulipRemoteUserBackend,
} # type: Dict[Text, Any]
|
|
from collections import defaultdict
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Set, Union
import orjson
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ValidationError
from django.db import transaction
from django.http import HttpRequest, HttpResponse
from django.utils.translation import gettext as _
from django.utils.translation import override as override_language
from zerver.context_processors import get_valid_realm_from_request
from zerver.decorator import (
authenticated_json_view,
require_non_guest_user,
require_post,
require_realm_admin,
)
from zerver.lib.actions import (
bulk_add_subscriptions,
bulk_remove_subscriptions,
do_add_default_stream,
do_add_streams_to_default_stream_group,
do_change_default_stream_group_description,
do_change_default_stream_group_name,
do_change_stream_description,
do_change_stream_invite_only,
do_change_stream_message_retention_days,
do_change_stream_post_policy,
do_change_subscription_property,
do_create_default_stream_group,
do_deactivate_stream,
do_delete_messages,
do_get_streams,
do_remove_default_stream,
do_remove_default_stream_group,
do_remove_streams_from_default_stream_group,
do_rename_stream,
do_send_messages,
gather_subscriptions,
get_default_streams_for_realm,
get_subscriber_emails,
internal_prep_private_message,
internal_prep_stream_message,
)
from zerver.lib.exceptions import (
ErrorCode,
JsonableError,
OrganizationOwnerRequired,
ResourceNotFoundError,
)
from zerver.lib.request import REQ, get_request_notes, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.retention import parse_message_retention_days
from zerver.lib.streams import (
StreamDict,
access_default_stream_group_by_id,
access_stream_by_id,
access_stream_by_name,
access_stream_for_delete_or_update,
access_web_public_stream,
check_stream_name,
check_stream_name_available,
filter_stream_authorization,
list_to_streams,
)
from zerver.lib.topic import (
get_topic_history_for_public_stream,
get_topic_history_for_stream,
messages_for_topic,
)
from zerver.lib.types import Validator
from zerver.lib.validator import (
check_bool,
check_capped_string,
check_color,
check_dict,
check_dict_only,
check_int,
check_int_in,
check_list,
check_string,
check_string_or_int,
check_union,
to_non_negative_int,
)
from zerver.models import (
Realm,
Stream,
UserMessage,
UserProfile,
get_active_user,
get_active_user_profile_by_id_in_realm,
get_system_bot,
)
class PrincipalError(JsonableError):
code = ErrorCode.UNAUTHORIZED_PRINCIPAL
data_fields = ["principal"]
http_status_code = 403
def __init__(self, principal: Union[int, str]) -> None:
self.principal: Union[int, str] = principal
@staticmethod
def msg_format() -> str:
return _("User not authorized to execute queries on behalf of '{principal}'")
def principal_to_user_profile(agent: UserProfile, principal: Union[str, int]) -> UserProfile:
try:
if isinstance(principal, str):
return get_active_user(principal, agent.realm)
else:
return get_active_user_profile_by_id_in_realm(principal, agent.realm)
except UserProfile.DoesNotExist:
# We have to make sure we don't leak information about which users
# are registered for Zulip in a different realm. We could do
# something a little more clever and check the domain part of the
# principal to maybe give a better error message
raise PrincipalError(principal)
def check_if_removing_someone_else(
user_profile: UserProfile, principals: Optional[Union[List[str], List[int]]]
) -> bool:
if principals is None or len(principals) == 0:
return False
if len(principals) > 1:
return True
if isinstance(principals[0], int):
return principals[0] != user_profile.id
else:
return principals[0] != user_profile.email
def deactivate_stream_backend(
request: HttpRequest, user_profile: UserProfile, stream_id: int
) -> HttpResponse:
(stream, sub) = access_stream_for_delete_or_update(user_profile, stream_id)
do_deactivate_stream(stream, acting_user=user_profile)
return json_success()
@require_realm_admin
@has_request_variables
def add_default_stream(
request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
) -> HttpResponse:
(stream, sub) = access_stream_by_id(user_profile, stream_id)
if stream.invite_only:
raise JsonableError(_("Private streams cannot be made default."))
do_add_default_stream(stream)
return json_success()
@require_realm_admin
@has_request_variables
def create_default_stream_group(
request: HttpRequest,
user_profile: UserProfile,
group_name: str = REQ(),
description: str = REQ(),
stream_names: List[str] = REQ(json_validator=check_list(check_string)),
) -> None:
streams = []
for stream_name in stream_names:
(stream, sub) = access_stream_by_name(user_profile, stream_name)
streams.append(stream)
do_create_default_stream_group(user_profile.realm, group_name, description, streams)
return json_success()
@require_realm_admin
@has_request_variables
def update_default_stream_group_info(
request: HttpRequest,
user_profile: UserProfile,
group_id: int,
new_group_name: Optional[str] = REQ(default=None),
new_description: Optional[str] = REQ(default=None),
) -> None:
if not new_group_name and not new_description:
raise JsonableError(_('You must pass "new_description" or "new_group_name".'))
group = access_default_stream_group_by_id(user_profile.realm, group_id)
if new_group_name is not None:
do_change_default_stream_group_name(user_profile.realm, group, new_group_name)
if new_description is not None:
do_change_default_stream_group_description(user_profile.realm, group, new_description)
return json_success()
@require_realm_admin
@has_request_variables
def update_default_stream_group_streams(
request: HttpRequest,
user_profile: UserProfile,
group_id: int,
op: str = REQ(),
stream_names: List[str] = REQ(json_validator=check_list(check_string)),
) -> None:
group = access_default_stream_group_by_id(user_profile.realm, group_id)
streams = []
for stream_name in stream_names:
(stream, sub) = access_stream_by_name(user_profile, stream_name)
streams.append(stream)
if op == "add":
do_add_streams_to_default_stream_group(user_profile.realm, group, streams)
elif op == "remove":
do_remove_streams_from_default_stream_group(user_profile.realm, group, streams)
else:
raise JsonableError(_('Invalid value for "op". Specify one of "add" or "remove".'))
return json_success()
@require_realm_admin
@has_request_variables
def remove_default_stream_group(
request: HttpRequest, user_profile: UserProfile, group_id: int
) -> None:
group = access_default_stream_group_by_id(user_profile.realm, group_id)
do_remove_default_stream_group(user_profile.realm, group)
return json_success()
@require_realm_admin
@has_request_variables
def remove_default_stream(
request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
) -> HttpResponse:
(stream, sub) = access_stream_by_id(
user_profile,
stream_id,
allow_realm_admin=True,
)
do_remove_default_stream(stream)
return json_success()
@has_request_variables
def update_stream_backend(
request: HttpRequest,
user_profile: UserProfile,
stream_id: int,
description: Optional[str] = REQ(
str_validator=check_capped_string(Stream.MAX_DESCRIPTION_LENGTH), default=None
),
is_private: Optional[bool] = REQ(json_validator=check_bool, default=None),
is_announcement_only: Optional[bool] = REQ(json_validator=check_bool, default=None),
stream_post_policy: Optional[int] = REQ(
json_validator=check_int_in(Stream.STREAM_POST_POLICY_TYPES), default=None
),
history_public_to_subscribers: Optional[bool] = REQ(json_validator=check_bool, default=None),
new_name: Optional[str] = REQ(default=None),
message_retention_days: Optional[Union[int, str]] = REQ(
json_validator=check_string_or_int, default=None
),
) -> HttpResponse:
# We allow realm administrators to to update the stream name and
# description even for private streams.
(stream, sub) = access_stream_for_delete_or_update(user_profile, stream_id)
if message_retention_days is not None:
if not user_profile.is_realm_owner:
raise OrganizationOwnerRequired()
user_profile.realm.ensure_not_on_limited_plan()
message_retention_days_value = parse_message_retention_days(
message_retention_days, Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP
)
do_change_stream_message_retention_days(stream, message_retention_days_value)
if description is not None:
if "\n" in description:
# We don't allow newline characters in stream descriptions.
description = description.replace("\n", " ")
do_change_stream_description(stream, description)
if new_name is not None:
new_name = new_name.strip()
if stream.name == new_name:
raise JsonableError(_("Stream already has that name!"))
if stream.name.lower() != new_name.lower():
# Check that the stream name is available (unless we are
# are only changing the casing of the stream name).
check_stream_name_available(user_profile.realm, new_name)
do_rename_stream(stream, new_name, user_profile)
if is_announcement_only is not None:
# is_announcement_only is a legacy way to specify
# stream_post_policy. We can probably just delete this code,
# since we're not aware of clients that used it, but we're
# keeping it for backwards-compatibility for now.
stream_post_policy = Stream.STREAM_POST_POLICY_EVERYONE
if is_announcement_only:
stream_post_policy = Stream.STREAM_POST_POLICY_ADMINS
if stream_post_policy is not None:
do_change_stream_post_policy(stream, stream_post_policy)
# But we require even realm administrators to be actually
# subscribed to make a private stream public.
if is_private is not None:
default_stream_ids = {s.id for s in get_default_streams_for_realm(stream.realm_id)}
(stream, sub) = access_stream_by_id(user_profile, stream_id)
if is_private and stream.id in default_stream_ids:
raise JsonableError(_("Default streams cannot be made private."))
do_change_stream_invite_only(stream, is_private, history_public_to_subscribers)
return json_success()
@has_request_variables
def list_subscriptions_backend(
request: HttpRequest,
user_profile: UserProfile,
include_subscribers: bool = REQ(json_validator=check_bool, default=False),
) -> HttpResponse:
subscribed, _ = gather_subscriptions(
user_profile,
include_subscribers=include_subscribers,
)
return json_success({"subscriptions": subscribed})
add_subscriptions_schema = check_list(
check_dict_only(
required_keys=[("name", check_string)],
optional_keys=[
("color", check_color),
("description", check_capped_string(Stream.MAX_DESCRIPTION_LENGTH)),
],
),
)
remove_subscriptions_schema = check_list(check_string)
@has_request_variables
def update_subscriptions_backend(
request: HttpRequest,
user_profile: UserProfile,
delete: Sequence[str] = REQ(json_validator=remove_subscriptions_schema, default=[]),
add: Sequence[Mapping[str, str]] = REQ(json_validator=add_subscriptions_schema, default=[]),
) -> HttpResponse:
if not add and not delete:
raise JsonableError(_('Nothing to do. Specify at least one of "add" or "delete".'))
thunks = [
lambda: add_subscriptions_backend(request, user_profile, streams_raw=add),
lambda: remove_subscriptions_backend(request, user_profile, streams_raw=delete),
]
return compose_views(thunks)
def compose_views(thunks: List[Callable[[], HttpResponse]]) -> HttpResponse:
"""
This takes a series of thunks and calls them in sequence, and it
smushes all the json results into a single response when
everything goes right. (This helps clients avoid extra latency
hops.) It rolls back the transaction when things go wrong in any
one of the composed methods.
"""
json_dict: Dict[str, Any] = {}
with transaction.atomic():
for thunk in thunks:
response = thunk()
json_dict.update(orjson.loads(response.content))
return json_success(json_dict)
check_principals: Validator[Union[List[str], List[int]]] = check_union(
[check_list(check_string), check_list(check_int)],
)
@has_request_variables
def remove_subscriptions_backend(
request: HttpRequest,
user_profile: UserProfile,
streams_raw: Sequence[str] = REQ("subscriptions", json_validator=remove_subscriptions_schema),
principals: Optional[Union[List[str], List[int]]] = REQ(
json_validator=check_principals, default=None
),
) -> HttpResponse:
removing_someone_else = check_if_removing_someone_else(user_profile, principals)
streams_as_dict: List[StreamDict] = []
for stream_name in streams_raw:
streams_as_dict.append({"name": stream_name.strip()})
streams, __ = list_to_streams(
streams_as_dict, user_profile, admin_access_required=removing_someone_else
)
if principals:
people_to_unsub = {
principal_to_user_profile(user_profile, principal) for principal in principals
}
else:
people_to_unsub = {user_profile}
result: Dict[str, List[str]] = dict(removed=[], not_removed=[])
client = get_request_notes(request).client
assert client is not None
(removed, not_subscribed) = bulk_remove_subscriptions(
people_to_unsub, streams, client, acting_user=user_profile
)
for (subscriber, removed_stream) in removed:
result["removed"].append(removed_stream.name)
for (subscriber, not_subscribed_stream) in not_subscribed:
result["not_removed"].append(not_subscribed_stream.name)
return json_success(result)
def you_were_just_subscribed_message(
acting_user: UserProfile, recipient_user: UserProfile, stream_names: Set[str]
) -> str:
subscriptions = sorted(stream_names)
if len(subscriptions) == 1:
with override_language(recipient_user.default_language):
return _("{user_full_name} subscribed you to the stream {stream_name}.").format(
user_full_name=f"@**{acting_user.full_name}**",
stream_name=f"#**{subscriptions[0]}**",
)
with override_language(recipient_user.default_language):
message = _("{user_full_name} subscribed you to the following streams:").format(
user_full_name=f"@**{acting_user.full_name}**",
)
message += "\n\n"
for stream_name in subscriptions:
message += f"* #**{stream_name}**\n"
return message
RETENTION_DEFAULT: Union[str, int] = "realm_default"
EMPTY_PRINCIPALS: Union[Sequence[str], Sequence[int]] = []
@require_non_guest_user
@has_request_variables
def add_subscriptions_backend(
request: HttpRequest,
user_profile: UserProfile,
streams_raw: Sequence[Mapping[str, str]] = REQ(
"subscriptions", json_validator=add_subscriptions_schema
),
invite_only: bool = REQ(json_validator=check_bool, default=False),
stream_post_policy: int = REQ(
json_validator=check_int_in(Stream.STREAM_POST_POLICY_TYPES),
default=Stream.STREAM_POST_POLICY_EVERYONE,
),
history_public_to_subscribers: Optional[bool] = REQ(json_validator=check_bool, default=None),
message_retention_days: Union[str, int] = REQ(
json_validator=check_string_or_int, default=RETENTION_DEFAULT
),
announce: bool = REQ(json_validator=check_bool, default=False),
principals: Union[Sequence[str], Sequence[int]] = REQ(
json_validator=check_principals,
default=EMPTY_PRINCIPALS,
),
authorization_errors_fatal: bool = REQ(json_validator=check_bool, default=True),
) -> HttpResponse:
realm = user_profile.realm
stream_dicts = []
color_map = {}
for stream_dict in streams_raw:
# 'color' field is optional
# check for its presence in the streams_raw first
if "color" in stream_dict:
color_map[stream_dict["name"]] = stream_dict["color"]
stream_dict_copy: StreamDict = {}
stream_dict_copy["name"] = stream_dict["name"].strip()
# We don't allow newline characters in stream descriptions.
if "description" in stream_dict:
stream_dict_copy["description"] = stream_dict["description"].replace("\n", " ")
stream_dict_copy["invite_only"] = invite_only
stream_dict_copy["stream_post_policy"] = stream_post_policy
stream_dict_copy["history_public_to_subscribers"] = history_public_to_subscribers
stream_dict_copy["message_retention_days"] = parse_message_retention_days(
message_retention_days, Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP
)
stream_dicts.append(stream_dict_copy)
# Validation of the streams arguments, including enforcement of
# can_create_streams policy and check_stream_name policy is inside
# list_to_streams.
existing_streams, created_streams = list_to_streams(stream_dicts, user_profile, autocreate=True)
authorized_streams, unauthorized_streams = filter_stream_authorization(
user_profile, existing_streams
)
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
raise JsonableError(
_("Unable to access stream ({stream_name}).").format(
stream_name=unauthorized_streams[0].name,
)
)
# Newly created streams are also authorized for the creator
streams = authorized_streams + created_streams
if len(principals) > 0:
if realm.is_zephyr_mirror_realm and not all(stream.invite_only for stream in streams):
raise JsonableError(
_("You can only invite other Zephyr mirroring users to private streams.")
)
if not user_profile.can_subscribe_other_users():
# Guest users case will not be handled here as it will
# be handled by the decorator above.
raise JsonableError(_("Insufficient permission"))
subscribers = {
principal_to_user_profile(user_profile, principal) for principal in principals
}
else:
subscribers = {user_profile}
(subscribed, already_subscribed) = bulk_add_subscriptions(
realm, streams, subscribers, acting_user=user_profile, color_map=color_map
)
# We can assume unique emails here for now, but we should eventually
# convert this function to be more id-centric.
email_to_user_profile: Dict[str, UserProfile] = {}
result: Dict[str, Any] = dict(
subscribed=defaultdict(list), already_subscribed=defaultdict(list)
)
for sub_info in subscribed:
subscriber = sub_info.user
stream = sub_info.stream
result["subscribed"][subscriber.email].append(stream.name)
email_to_user_profile[subscriber.email] = subscriber
for sub_info in already_subscribed:
subscriber = sub_info.user
stream = sub_info.stream
result["already_subscribed"][subscriber.email].append(stream.name)
result["subscribed"] = dict(result["subscribed"])
result["already_subscribed"] = dict(result["already_subscribed"])
send_messages_for_new_subscribers(
user_profile=user_profile,
subscribers=subscribers,
new_subscriptions=result["subscribed"],
email_to_user_profile=email_to_user_profile,
created_streams=created_streams,
announce=announce,
)
result["subscribed"] = dict(result["subscribed"])
result["already_subscribed"] = dict(result["already_subscribed"])
if not authorization_errors_fatal:
result["unauthorized"] = [s.name for s in unauthorized_streams]
return json_success(result)
def send_messages_for_new_subscribers(
user_profile: UserProfile,
subscribers: Set[UserProfile],
new_subscriptions: Dict[str, List[str]],
email_to_user_profile: Dict[str, UserProfile],
created_streams: List[Stream],
announce: bool,
) -> None:
"""
If you are subscribing lots of new users to new streams,
this function can be pretty expensive in terms of generating
lots of queries and sending lots of messages. We isolate
the code partly to make it easier to test things like
excessive query counts by mocking this function so that it
doesn't drown out query counts from other code.
"""
bots = {subscriber.email: subscriber.is_bot for subscriber in subscribers}
newly_created_stream_names = {s.name for s in created_streams}
# Inform the user if someone else subscribed them to stuff,
# or if a new stream was created with the "announce" option.
notifications = []
if new_subscriptions:
for email, subscribed_stream_names in new_subscriptions.items():
if email == user_profile.email:
# Don't send a Zulip if you invited yourself.
continue
if bots[email]:
# Don't send invitation Zulips to bots
continue
# For each user, we notify them about newly subscribed streams, except for
# streams that were newly created.
notify_stream_names = set(subscribed_stream_names) - newly_created_stream_names
if not notify_stream_names:
continue
sender = get_system_bot(settings.NOTIFICATION_BOT)
recipient_user = email_to_user_profile[email]
msg = you_were_just_subscribed_message(
acting_user=user_profile,
recipient_user=recipient_user,
stream_names=notify_stream_names,
)
notifications.append(
internal_prep_private_message(
realm=user_profile.realm,
sender=sender,
recipient_user=recipient_user,
content=msg,
)
)
if announce and len(created_streams) > 0:
notifications_stream = user_profile.realm.get_notifications_stream()
if notifications_stream is not None:
with override_language(notifications_stream.realm.default_language):
if len(created_streams) > 1:
content = _("{user_name} created the following streams: {stream_str}.")
else:
content = _("{user_name} created a new stream {stream_str}.")
topic = _("new streams")
content = content.format(
user_name=f"@_**{user_profile.full_name}|{user_profile.id}**",
stream_str=", ".join(f"#**{s.name}**" for s in created_streams),
)
sender = get_system_bot(settings.NOTIFICATION_BOT)
notifications.append(
internal_prep_stream_message(
sender=sender,
stream=notifications_stream,
topic=topic,
content=content,
),
)
if not user_profile.realm.is_zephyr_mirror_realm and len(created_streams) > 0:
sender = get_system_bot(settings.NOTIFICATION_BOT)
for stream in created_streams:
with override_language(stream.realm.default_language):
notifications.append(
internal_prep_stream_message(
sender=sender,
stream=stream,
topic=Realm.STREAM_EVENTS_NOTIFICATION_TOPIC,
content=_("Stream created by {user_name}.").format(
user_name=f"@_**{user_profile.full_name}|{user_profile.id}**",
),
),
)
if len(notifications) > 0:
do_send_messages(notifications, mark_as_read=[user_profile.id])
@has_request_variables
def get_subscribers_backend(
request: HttpRequest,
user_profile: UserProfile,
stream_id: int = REQ("stream", converter=to_non_negative_int),
) -> HttpResponse:
(stream, sub) = access_stream_by_id(
user_profile,
stream_id,
allow_realm_admin=True,
)
subscribers = get_subscriber_emails(stream, user_profile)
return json_success({"subscribers": subscribers})
# By default, lists all streams that the user has access to --
# i.e. public streams plus invite-only streams that the user is on
@has_request_variables
def get_streams_backend(
request: HttpRequest,
user_profile: UserProfile,
include_public: bool = REQ(json_validator=check_bool, default=True),
include_web_public: bool = REQ(json_validator=check_bool, default=False),
include_subscribed: bool = REQ(json_validator=check_bool, default=True),
include_all_active: bool = REQ(json_validator=check_bool, default=False),
include_default: bool = REQ(json_validator=check_bool, default=False),
include_owner_subscribed: bool = REQ(json_validator=check_bool, default=False),
) -> HttpResponse:
streams = do_get_streams(
user_profile,
include_public=include_public,
include_web_public=include_web_public,
include_subscribed=include_subscribed,
include_all_active=include_all_active,
include_default=include_default,
include_owner_subscribed=include_owner_subscribed,
)
return json_success({"streams": streams})
@has_request_variables
def get_topics_backend(
request: HttpRequest,
maybe_user_profile: Union[UserProfile, AnonymousUser],
stream_id: int = REQ(converter=to_non_negative_int, path_only=True),
) -> HttpResponse:
if not maybe_user_profile.is_authenticated:
is_web_public_query = True
user_profile: Optional[UserProfile] = None
else:
is_web_public_query = False
assert isinstance(maybe_user_profile, UserProfile)
user_profile = maybe_user_profile
assert user_profile is not None
if is_web_public_query:
realm = get_valid_realm_from_request(request)
stream = access_web_public_stream(stream_id, realm)
result = get_topic_history_for_public_stream(recipient_id=stream.recipient_id)
else:
assert user_profile is not None
(stream, sub) = access_stream_by_id(user_profile, stream_id)
result = get_topic_history_for_stream(
user_profile=user_profile,
recipient_id=stream.recipient_id,
public_history=stream.is_history_public_to_subscribers(),
)
return json_success(dict(topics=result))
@transaction.atomic
@require_realm_admin
@has_request_variables
def delete_in_topic(
request: HttpRequest,
user_profile: UserProfile,
stream_id: int = REQ(converter=to_non_negative_int),
topic_name: str = REQ("topic_name"),
) -> HttpResponse:
(stream, sub) = access_stream_by_id(user_profile, stream_id)
messages = messages_for_topic(stream.recipient_id, topic_name)
if not stream.is_history_public_to_subscribers():
# Don't allow the user to delete messages that they don't have access to.
deletable_message_ids = UserMessage.objects.filter(
user_profile=user_profile, message_id__in=messages
).values_list("message_id", flat=True)
messages = messages.filter(id__in=deletable_message_ids)
messages = messages.select_for_update(of=("self",))
do_delete_messages(user_profile.realm, messages)
return json_success()
@require_post
@authenticated_json_view
@has_request_variables
def json_stream_exists(
request: HttpRequest,
user_profile: UserProfile,
stream_name: str = REQ("stream"),
autosubscribe: bool = REQ(json_validator=check_bool, default=False),
) -> HttpResponse:
check_stream_name(stream_name)
try:
(stream, sub) = access_stream_by_name(user_profile, stream_name)
except JsonableError as e:
raise ResourceNotFoundError(e.msg)
# access_stream functions return a subscription if and only if we
# are already subscribed.
result = {"subscribed": sub is not None}
# If we got here, we're either subscribed or the stream is public.
# So if we're not yet subscribed and autosubscribe is enabled, we
# should join.
if sub is None and autosubscribe:
bulk_add_subscriptions(
user_profile.realm, [stream], [user_profile], acting_user=user_profile
)
result["subscribed"] = True
return json_success(result) # results are ignored for HEAD requests
@has_request_variables
def json_get_stream_id(
request: HttpRequest, user_profile: UserProfile, stream_name: str = REQ("stream")
) -> HttpResponse:
(stream, sub) = access_stream_by_name(user_profile, stream_name)
return json_success({"stream_id": stream.id})
@has_request_variables
def update_subscriptions_property(
request: HttpRequest,
user_profile: UserProfile,
stream_id: int = REQ(json_validator=check_int),
property: str = REQ(),
value: str = REQ(),
) -> HttpResponse:
subscription_data = [{"property": property, "stream_id": stream_id, "value": value}]
return update_subscription_properties_backend(
request, user_profile, subscription_data=subscription_data
)
@has_request_variables
def update_subscription_properties_backend(
request: HttpRequest,
user_profile: UserProfile,
subscription_data: List[Dict[str, Any]] = REQ(
json_validator=check_list(
check_dict(
[
("stream_id", check_int),
("property", check_string),
("value", check_union([check_string, check_bool])),
]
),
),
),
) -> HttpResponse:
"""
This is the entry point to changing subscription properties. This
is a bulk endpoint: requestors always provide a subscription_data
list containing dictionaries for each stream of interest.
Requests are of the form:
[{"stream_id": "1", "property": "is_muted", "value": False},
{"stream_id": "1", "property": "color", "value": "#c2c2c2"}]
"""
property_converters = {
"color": check_color,
"in_home_view": check_bool,
"is_muted": check_bool,
"desktop_notifications": check_bool,
"audible_notifications": check_bool,
"push_notifications": check_bool,
"email_notifications": check_bool,
"pin_to_top": check_bool,
"wildcard_mentions_notify": check_bool,
}
response_data = []
for change in subscription_data:
stream_id = change["stream_id"]
property = change["property"]
value = change["value"]
if property not in property_converters:
raise JsonableError(_("Unknown subscription property: {}").format(property))
(stream, sub) = access_stream_by_id(user_profile, stream_id)
if sub is None:
raise JsonableError(_("Not subscribed to stream id {}").format(stream_id))
try:
value = property_converters[property](property, value)
except ValidationError as error:
raise JsonableError(error.message)
do_change_subscription_property(
user_profile, sub, stream, property, value, acting_user=user_profile
)
response_data.append({"stream_id": stream_id, "property": property, "value": value})
return json_success({"subscription_data": response_data})
|
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
import requests
from openbabel import pybel
from openbabel import openbabel as ob
# TODO: process Open Babel resdata.txt
# if we can find certain non-standard residues
mdLigands = [
"ASH", # Neutral ASP
"CYX", # SS-bonded CYS
"CYM", # Negative CYS
"GLH", # Neutral GLU
"HIP", # Positive HIS
"HID", # Neutral HIS, proton HD1 present
"HIE", # Neutral HIS, proton HE2 present
"LYN", # Neutral LYS
"TYM", # Negative TYR
]
# the location of the LigandExpo list by count
ligandURL = "http://ligand-expo.rcsb.org/dictionaries/cc-counts.tdd"
# URL for the ideal geometry
# e.g http://ligand-expo.rcsb.org/reports/H/HEM/HEM_ideal.pdb
sdfTemplate = "http://ligand-expo.rcsb.org/reports/{}/{}/{}_ideal.sdf"
# URL for the ideal geometry (PDB)
pdbTemplate = "http://ligand-expo.rcsb.org/reports/{}/{}/{}_ideal.pdb"
# save ligands with at least this # of occurrences
ligandThresh = 500
# default ligand list
ligands = [
# amino acids
"ALA", "CYS", "ASP", "GLU", "PHE", "GLY", "HIS", "ILE", "LYS", "LEU",
"MET", "ASN", "PRO", "GLN", "ARG", "SER", "THR", "VAL", "TRP", "TYR",
# DNA nucleic
"DA", "DC", "DG", "DT", "DI",
# RNA nucleic
"A", "C", "G", "U", "I",
# misc
"HEM", "HOH"
]
# okay, we build up the list of ligands to fetch
r = requests.get(ligandURL, stream=True)
for line in r.iter_lines(decode_unicode=True):
if 'count' in str(line):
continue # skip first line
name, count = line.split()
if (int(count) < ligandThresh):
# too rare, we'll skip the rest of the list
break
if str(name) not in ligands:
ligands.append(str(name))
print(
'''
#ifndef AVOGADRO_CORE_RESIDUE_DATA
#define AVOGADRO_CORE_RESIDUE_DATA
#include <map>
#include <string>
#include <vector>
namespace Avogadro {
namespace Core {
class ResidueData
{
private:
std::string m_residueName;
std::map<std::string, int> m_residueAtomNames;
std::vector<std::pair<std::string, std::string>> m_residueSingleBonds;
std::vector<std::pair<std::string, std::string>> m_residueDoubleBonds;
public:
ResidueData() {}
ResidueData(std::string name,
std::map<std::string, int> atomNames,
std::vector<std::pair<std::string, std::string>> singleBonds,
std::vector<std::pair<std::string, std::string>> doubleBonds)
{
m_residueName = name;
m_residueAtomNames = atomNames;
m_residueSingleBonds = singleBonds;
m_residueDoubleBonds = doubleBonds;
}
ResidueData(const ResidueData& other)
{
m_residueName = other.m_residueName;
m_residueAtomNames = other.m_residueAtomNames;
m_residueSingleBonds = other.m_residueSingleBonds;
m_residueDoubleBonds = other.m_residueDoubleBonds;
}
ResidueData& operator=(ResidueData other)
{
using std::swap;
swap(*this, other);
return *this;
}
std::map<std::string, int> residueAtoms() {
return m_residueAtomNames;
}
std::vector<std::pair<std::string, std::string>> residueSingleBonds()
{
return m_residueSingleBonds;
}
std::vector<std::pair<std::string, std::string>> residueDoubleBonds()
{
return m_residueDoubleBonds;
}
};
'''
)
final_ligands = []
for ligand in ligands:
sdf = requests.get(sdfTemplate.format(ligand[0], ligand, ligand))
# there *must* be a way to do this from a requests buffer, but this works
with open('temp.sdf', 'wb') as handle:
for block in sdf.iter_content(1024):
handle.write(block)
try:
mol_sdf = next(pybel.readfile("sdf", 'temp.sdf'))
except StopIteration:
continue
if len(mol_sdf.atoms) < 2:
continue
final_ligands.append(ligand)
pdb = requests.get(pdbTemplate.format(ligand[0], ligand, ligand))
with open('temp.pdb', 'wb') as handle:
for block in pdb.iter_content(1024):
handle.write(block)
try:
mol_pdb = next(pybel.readfile("pdb", 'temp.pdb'))
except StopIteration:
continue
atom_map = {}
for i in range(len(mol_sdf.atoms)):
idx = mol_sdf.atoms[i].idx
atom = mol_pdb.atoms[i].OBAtom
res = atom.GetResidue()
# build up a map between atom index and atom ID
atom_map[idx] = res.GetAtomID(atom).strip().rstrip(), atom.GetAtomicNum()
# go through bonds
single_bonds = []
double_bonds = []
for bond in ob.OBMolBondIter(mol_sdf.OBMol):
begin = bond.GetBeginAtomIdx()
end = bond.GetEndAtomIdx()
if bond.GetBondOrder() == 2:
double_bonds.append((atom_map[begin][0], atom_map[end][0]))
elif bond.GetBondOrder() == 1:
single_bonds.append((atom_map[begin][0], atom_map[end][0]))
# print out the residue data
print('ResidueData %sData("%s",' % (ligand, ligand))
print('// Atoms')
print('{')
for atom in list(atom_map.values())[:-1]:
print('{ "%s", %d },' % (atom[0], atom[1]), end='')
print('{"%s", %d }' % (atom[0], atom[1]))
print('},')
print('// Single Bonds')
print('{')
for bond in single_bonds[:-1]:
print('{ "%s", "%s" },' % bond, end='')
print('{ "%s", "%s" }' % single_bonds[-1])
print('},')
print('// Double Bonds')
print('{')
if len(double_bonds):
for bond in double_bonds[:-1]:
print('{ "%s", "%s" },' % bond, end='')
print('{ "%s", "%s" }' % double_bonds[-1])
print('}')
print(');')
print('''std::map<std::string, ResidueData> residueDict = {''')
# print the list of ligands
for ligand in final_ligands:
print('{ "%s", %sData },' % (ligand, ligand))
print('''
};
}
}
#endif
'''
)
os.remove("temp.sdf")
os.remove('temp.pdb')
|
|
from decimal import Decimal as D
from django.test import TestCase
from django.test.utils import override_settings
from oscar.apps.catalogue.models import ProductClass, Product
from oscar.apps.checkout import calculators
from oscar.apps.offer.utils import Applicator
from oscar.apps.order.models import Order
from oscar.apps.order.utils import OrderCreator
from oscar.apps.shipping.methods import Free, FixedPrice
from oscar.apps.shipping.repository import Repository
from oscar.core.loading import get_class
from oscar.test import factories
from oscar.test.basket import add_product
Range = get_class('offer.models', 'Range')
Benefit = get_class('offer.models', 'Benefit')
def place_order(creator, **kwargs):
"""
Helper function to place an order without the boilerplate
"""
if 'shipping_method' not in kwargs:
kwargs['shipping_method'] = Free()
shipping_charge = kwargs['shipping_method'].calculate(kwargs['basket'])
kwargs['total'] = calculators.OrderTotalCalculator().calculate(
basket=kwargs['basket'], shipping_charge=shipping_charge)
kwargs['shipping_charge'] = shipping_charge
return creator.place_order(**kwargs)
class TestOrderCreatorErrorCases(TestCase):
def setUp(self):
self.creator = OrderCreator()
self.basket = factories.create_basket(empty=True)
def test_raises_exception_when_empty_basket_passed(self):
with self.assertRaises(ValueError):
place_order(self.creator, basket=self.basket)
def test_raises_exception_if_duplicate_order_number_passed(self):
add_product(self.basket, D('12.00'))
place_order(self.creator, basket=self.basket, order_number='1234')
with self.assertRaises(ValueError):
place_order(self.creator, basket=self.basket, order_number='1234')
class TestSuccessfulOrderCreation(TestCase):
def setUp(self):
self.creator = OrderCreator()
self.basket = factories.create_basket(empty=True)
def test_saves_shipping_code(self):
add_product(self.basket, D('12.00'))
free_method = Free()
order = place_order(self.creator, basket=self.basket,
order_number='1234', shipping_method=free_method)
self.assertEqual(order.shipping_code, free_method.code)
def test_creates_order_and_line_models(self):
add_product(self.basket, D('12.00'))
place_order(self.creator, basket=self.basket, order_number='1234')
order = Order.objects.get(number='1234')
lines = order.lines.all()
self.assertEqual(1, len(lines))
def test_sets_correct_order_status(self):
add_product(self.basket, D('12.00'))
place_order(self.creator, basket=self.basket,
order_number='1234', status='Active')
order = Order.objects.get(number='1234')
self.assertEqual('Active', order.status)
def test_defaults_to_using_free_shipping(self):
add_product(self.basket, D('12.00'))
place_order(self.creator, basket=self.basket, order_number='1234')
order = Order.objects.get(number='1234')
self.assertEqual(order.total_incl_tax, self.basket.total_incl_tax)
self.assertEqual(order.total_excl_tax, self.basket.total_excl_tax)
def test_uses_default_order_status_from_settings(self):
add_product(self.basket, D('12.00'))
with override_settings(OSCAR_INITIAL_ORDER_STATUS='A'):
place_order(self.creator, basket=self.basket, order_number='1234')
order = Order.objects.get(number='1234')
self.assertEqual('A', order.status)
def test_uses_default_line_status_from_settings(self):
add_product(self.basket, D('12.00'))
with override_settings(OSCAR_INITIAL_LINE_STATUS='A'):
place_order(self.creator, basket=self.basket, order_number='1234')
order = Order.objects.get(number='1234')
line = order.lines.all()[0]
self.assertEqual('A', line.status)
def test_partner_name_is_optional(self):
for partner_name, order_number in [('', 'A'), ('p1', 'B')]:
self.basket = factories.create_basket(empty=True)
product = factories.create_product(partner_name=partner_name)
add_product(self.basket, D('12.00'), product=product)
place_order(
self.creator, basket=self.basket, order_number=order_number)
line = Order.objects.get(number=order_number).lines.all()[0]
partner = product.stockrecords.all()[0].partner
self.assertTrue(partner_name == line.partner_name == partner.name)
class TestPlacingOrderForDigitalGoods(TestCase):
def setUp(self):
self.creator = OrderCreator()
self.basket = factories.create_basket(empty=True)
def test_does_not_allocate_stock(self):
ProductClass.objects.create(
name="Digital", track_stock=False)
product = factories.create_product(product_class="Digital")
record = factories.create_stockrecord(product, num_in_stock=None)
self.assertTrue(record.num_allocated is None)
add_product(self.basket, D('12.00'), product=product)
place_order(self.creator, basket=self.basket, order_number='1234')
product = Product.objects.get(id=product.id)
stockrecord = product.stockrecords.all()[0]
self.assertTrue(stockrecord.num_in_stock is None)
self.assertTrue(stockrecord.num_allocated is None)
class TestShippingOfferForOrder(TestCase):
def setUp(self):
self.creator = OrderCreator()
self.basket = factories.create_basket(empty=True)
def apply_20percent_shipping_offer(self):
"""Shipping offer 20% off"""
range = Range.objects.create(name="All products range",
includes_all_products=True)
benefit = Benefit.objects.create(
range=range, type=Benefit.SHIPPING_PERCENTAGE, value=20)
offer = factories.create_offer(range=range, benefit=benefit)
Applicator().apply_offers(self.basket, [offer])
return offer
def test_shipping_offer_is_applied(self):
add_product(self.basket, D('12.00'))
offer = self.apply_20percent_shipping_offer()
shipping = FixedPrice(D('5.00'), D('5.00'))
shipping = Repository().apply_shipping_offer(
self.basket, shipping, offer)
place_order(self.creator,
basket=self.basket,
order_number='1234',
shipping_method=shipping)
order = Order.objects.get(number='1234')
self.assertEqual(1, len(order.shipping_discounts))
self.assertEqual(D('4.00'), order.shipping_incl_tax)
self.assertEqual(D('16.00'), order.total_incl_tax)
def test_zero_shipping_discount_is_not_created(self):
add_product(self.basket, D('12.00'))
offer = self.apply_20percent_shipping_offer()
shipping = Free()
shipping = Repository().apply_shipping_offer(
self.basket, shipping, offer)
place_order(self.creator,
basket=self.basket,
order_number='1234',
shipping_method=shipping)
order = Order.objects.get(number='1234')
# No shipping discount
self.assertEqual(0, len(order.shipping_discounts))
self.assertEqual(D('0.00'), order.shipping_incl_tax)
self.assertEqual(D('12.00'), order.total_incl_tax)
class TestMultiSiteOrderCreation(TestCase):
def setUp(self):
self.creator = OrderCreator()
self.basket = factories.create_basket(empty=True)
def test_default_site(self):
add_product(self.basket, D('12.00'))
place_order(self.creator,
basket=self.basket,
order_number='1234')
order = Order.objects.get(number='1234')
self.assertEquals(order.site_id, 1)
def test_multi_sites(self):
site1 = factories.SiteFactory()
site2 = factories.SiteFactory()
add_product(self.basket, D('12.00'))
place_order(self.creator,
basket=self.basket,
order_number='12345',
site=site1)
order1 = Order.objects.get(number='12345')
self.assertEquals(order1.site, site1)
add_product(self.basket, D('12.00'))
place_order(self.creator,
basket=self.basket,
order_number='12346',
site=site2)
order2 = Order.objects.get(number='12346')
self.assertEquals(order2.site, site2)
|
|
# Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Execution API endpoints."""
import tornado.escape
from zoe_api.rest_api.request_handler import ZoeAPIRequestHandler
from zoe_api.exceptions import ZoeException
class ExecutionAPI(ZoeAPIRequestHandler):
"""The Execution API endpoint."""
def get(self, execution_id):
"""GET a single execution by its ID."""
if self.current_user is None:
return
try:
execution_id = int(execution_id)
except ValueError:
self.set_status(400, "Parameter must be an integer")
try:
e = self.api_endpoint.execution_by_id(self.current_user, execution_id)
except ZoeException as e:
self.set_status(e.status_code, e.message)
return
self.write(e.serialize())
def delete(self, execution_id: int):
"""
Terminate an execution.
:param execution_id: the execution to be terminated
"""
if self.current_user is None:
return
try:
execution_id = int(execution_id)
except ValueError:
self.set_status(400, "Parameter must be an integer")
try:
self.api_endpoint.execution_terminate(self.current_user, execution_id, 'user {} request from API'.format(self.current_user))
except ZoeException as e:
self.set_status(e.status_code, e.message)
else:
self.set_status(204)
class ExecutionDeleteAPI(ZoeAPIRequestHandler):
"""The ExecutionDelete API endpoints."""
def delete(self, execution_id: int):
"""
Delete an execution.
:param execution_id: the execution to be deleted
"""
if self.current_user is None:
return
try:
execution_id = int(execution_id)
except ValueError:
self.set_status(400, "Parameter must be an integer")
try:
self.api_endpoint.execution_delete(self.current_user, execution_id)
except ZoeException as e:
self.set_status(e.status_code, e.message)
else:
self.set_status(204)
class ExecutionCollectionAPI(ZoeAPIRequestHandler):
"""The Execution Collection API endpoints."""
def get(self):
"""
Returns a list of all active executions.
The list can be filtered by passing a non-empty JSON dictionary. Any combination of the following filters is supported:
* status: one of submitted, queued, starting, error, running, cleaning up, terminated
* name: execution mane
* user_id: user_id owning the execution (admin only)
* limit: limit the number of returned entries
* earlier_than_submit: all execution that where submitted earlier than this timestamp
* earlier_than_start: all execution that started earlier than this timestamp
* earlier_than_end: all execution that ended earlier than this timestamp
* later_than_submit: all execution that where submitted later than this timestamp
* later_than_start: all execution that started later than this timestamp
* later_than_end: all execution that started later than this timestamp
All timestamps should be passed as number of seconds since the epoch (UTC timezone).
example: curl -u 'username:password' -X GET 'http://bf5:8080/api/0.6/execution?limit=1&status=terminated'
:return:
"""
if self.current_user is None:
return
filt_dict = {}
filters = [
('status', str),
('name', str),
('user_id', str),
('limit', int),
('earlier_than_submit', int),
('earlier_than_start', int),
('earlier_than_end', int),
('later_than_submit', int),
('later_than_start', int),
('later_than_end', int)
]
for filt in filters:
if filt[0] in self.request.arguments:
if filt[1] == str:
filt_dict[filt[0]] = self.request.arguments[filt[0]][0].decode('utf-8')
else:
filt_dict[filt[0]] = filt[1](self.request.arguments[filt[0]][0])
try:
execs = self.api_endpoint.execution_list(self.current_user, **filt_dict)
except ZoeException as e:
self.set_status(e.status_code, e.message)
return
self.write({e.id: e.serialize() for e in execs})
def post(self):
"""
Starts an execution, given an application description. Takes a JSON object.
:return: the new execution_id
"""
if self.current_user is None:
return
try:
data = tornado.escape.json_decode(self.request.body)
except ValueError:
self.set_status(400, 'Error decoding JSON data')
return
application_description = data['application']
exec_name = data['name']
try:
new_id = self.api_endpoint.execution_start(self.current_user, exec_name, application_description)
except ZoeException as e:
self.set_status(e.status_code, e.message)
return
self.set_status(201)
self.write({'execution_id': new_id})
class ExecutionEndpointsAPI(ZoeAPIRequestHandler):
"""The ExecutionEndpoints API endpoint."""
def get(self, execution_id: int):
"""
Get a list of execution endpoints.
:param execution_id: the execution to be deleted
"""
if self.current_user is None:
return
try:
execution_id = int(execution_id)
except ValueError:
self.set_status(400, "Parameter must be an integer")
try:
execution = self.api_endpoint.execution_by_id(self.current_user, execution_id)
services_, endpoints = self.api_endpoint.execution_endpoints(self.current_user, execution)
except ZoeException as e:
self.set_status(e.status_code, e.message)
return
self.write({'endpoints': endpoints})
|
|
# ======================================================================
"""
Functions for reading and writing sets of fits files, including header
information.
"""
# ======================================================================
# Globally useful modules:
import numpy,os
import astropy.io.fits as pyfits
from PIL import Image
vb = 0
# ======================================================================
# BUG: This code implies one file one channel, whereas we want to make
# composites based on N images... Class should be image, not channel.
# RGB channels should be constructed *after* scaling but *before* stretching
class channel:
def __init__(self,fitsfile):
self.input = fitsfile
# Read in image and header:
hdulist = pyfits.open(self.input)
# self.hdr = hdulist[0].header
# self.image = hdulist[0].data
# Picking -1 header assumes we have 1 extension or PS1 (2 ext, image is last)
self.image = hdulist[-1].data
self.hdr = hdulist[-1].header
self.calibrate()
hdulist.close()
return
def calibrate(self):
# Which telescope took these data?
self.get_origin()
# Get zero point, exptime:
self.get_zeropoint()
# EXPTIME is 1.0 for images in counts/s - but headers do not always
# get this right... get_exptime gets this right.
self.get_exptime()
# Airmass? Gain? Should be included in zeropoint.
# print "Image statistics for "+self.input
# print " ",self.origin,self.exptime,self.zpt
# # Report 5 sigma depth:
# image = self.image.copy()
# mean = numpy.average(image)
# stdev = numpy.std(image)
# nsigma = 1
# clip = 3
# # Apply clipping:
# while nsigma > 0.01:
# index = numpy.where(abs((self.image - mean)/stdev) < clip)[0]
# image = image[index]
# newmean = numpy.average(image)
# newstdev = numpy.std(image)
# nsigma = abs(mean - newmean)/newstdev
# mean = newmean
# stdev = newstdev
# print " Before calibration, mean, rms = ",mean,stdev
# depth = -2.5*numpy.log10(5.0*stdev) + self.zpt
# print " Approximate 5-sigma limiting magnitude: ",depth
# Compute calibration factor for image pixel values to
# convert them into flux units. The 30 is arbitrary, and
# simply determines the absolute value of alpha required
# for a nice image.
self.calib = (10.0**(0.4*(30.0 - self.zpt))) / self.exptime
self.image *= self.calib
# # Report 5 sigma depth:
# image = self.image.copy()
# mean = numpy.average(image)
# stdev = numpy.std(image)
# nsigma = 1
# clip = 3
# # Apply clipping:
# while nsigma > 0.01:
# index = numpy.where(abs((self.image - mean)/stdev) < clip)[0]
# image = image[index]
# newmean = numpy.average(image)
# newstdev = numpy.std(image)
# nsigma = abs(mean - newmean)/newstdev
# mean = newmean
# stdev = newstdev
# print " After calibration, mean, rms = ",mean,stdev
return
def get_origin(self):
if 'TELESCOP' in self.hdr:
if self.hdr['TELESCOP'] == 'CFHT 3.6m':
self.origin = 'CFHT'
elif self.hdr['TELESCOP'] == 'ESO-VLT-U0':
# Assume that all data from ESO-VLT-U0 is from KIDS.
self.origin = "KIDS"
else:
self.origin = self.hdr['TELESCOP']
elif 'ORIGIN' in self.hdr:
if self.hdr['ORIGIN'] == 'CFHT':
self.origin = 'CFHT'
elif self.hdr['ORIGIN'] == 'DES':
self.origin = 'DES'
else:
self.origin = self.hdr['ORIGIN']
elif 'PSCAMERA' in self.hdr:
self.origin = 'PS1'
elif 'FID_ZP' in self.hdr:
self.origin = 'DES'
elif 'PROV' in self.hdr:
self.origin = 'VICS82'
else:
self.origin = 'UNKNOWN'
return
def get_zeropoint(self):
if self.origin == 'CFHT':
if 'MZP_AB' in self.hdr:
self.zpt = self.hdr['MZP_AB']
elif 'MAGZP' in self.hdr:
self.zpt = self.hdr['MAGZP']
# elif 'PHOT_C' in self.hdr:
# self.zpt = self.hdr['PHOT_C']
else:
self.zpt = 30.0
elif self.origin == 'PS1':
self.zpt = self.hdr['HIERARCH FPA.ZP']
elif self.origin == 'DES':
self.zpt = self.hdr['MZP_AB']
elif self.origin == 'VICS82':
if 'MZP_AB' in self.hdr:
self.zpt = self.hdr['MZP_AB']
else:
self.zpt = 30.0
elif self.origin == 'KIDS':
# KiDS coadds are calibrated to ZPT=0.
self.zpt = 0.0
else: # UNKNOWN
self.zpt = 30.0
return
def get_exptime(self):
# Here we assume that both CFHT and PS1 provide images with
# pixel values in counts per second... or that the zero point
# takes into account the exptime.
if self.origin == 'CFHT':
self.exptime = 1.0
elif self.origin == 'PS1':
# self.exptime = self.hdr['EXPTIME']
self.exptime = 1.0
elif self.origin == 'DES':
# self.exptime = self.hdr['EXPTIME']
self.exptime = 1.0
elif self.origin == 'VICS82':
# self.exptime = self.hdr['EXPTIME']
self.exptime = 1.0
elif self.origin == 'KIDS':
# self.exptime = self.hdr['EXPTIME']
self.exptime = 1.0
else: #UNKNOWN
# Use 1.0 as default to ensure the program doesn't crash.
self.exptime = 1.0
return
def set_scale(self,manually=False):
if manually:
self.scale = manually
else:
self.scale = 1.0
return
def apply_scale(self):
self.image *= self.scale
return
def subtract_background(self):
self.image -= numpy.median(self.image)
return
def writefits(self):
self.output = str.split(self.input,'.')[0]+'_calibrated.fits'
if os.path.exists(self.output): os.remove(self.output)
hdu = pyfits.PrimaryHDU()
hdu.header = self.hdr
hdu.data = self.image
hdu.verify()
hdu.writeto(self.output)
return
# ======================================================================
def normalize_scales(scales):
assert len(scales) == 3
s1,s2,s3 = scales
mean = (s1 + s2 + s3)/3.0
return s1/mean, s2/mean, s3/mean
# ----------------------------------------------------------------------
def filter2wavelength(fname):
# CFHT MegaCam (from http://www.cfht.hawaii.edu/Instruments/Imaging/Megacam/specsinformation.html)
if fname == 'u.MP9301':
L = 3740
elif fname == 'g.MP9401':
L = 4870
elif fname == 'r.MP9601':
L = 6250
elif fname == 'i.MP9701' or 'i.MP9702':
L = 7700
elif fname == 'z.MP9801':
L = 9000
# SDSS:
# DES:
# etc
return L
# ----------------------------------------------------------------------
def check_image_shapes(r,g,b):
if (numpy.shape(r) != numpy.shape(g)) or \
(numpy.shape(r) != numpy.shape(b)):
raise "Image arrays are of different shapes, exiting"
return
# ----------------------------------------------------------------------
# Make an 8 bit integer image cube from three channels:
def pack_up(r,g,b):
NX,NY = numpy.shape(r)
x = numpy.zeros([NX,NY,3])
x[:,:,0] = numpy.flipud(r)
x[:,:,1] = numpy.flipud(g)
x[:,:,2] = numpy.flipud(b)
x = numpy.clip(x,0.0,1.0)
x = x*255
return Image.fromarray(x.astype(numpy.uint8))
# ======================================================================
|
|
# Copyright (c) 2009-2021 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
"""Implement type conversion helpers."""
import numpy as np
from itertools import repeat, cycle
from abc import ABC, abstractmethod
from collections.abc import Mapping, MutableMapping
from inspect import isclass
from hoomd.error import TypeConversionError
from hoomd.util import _is_iterable
from hoomd.variant import Variant, Constant
from hoomd.trigger import Trigger, Periodic
from hoomd.filter import ParticleFilter, CustomFilter
import hoomd
class RequiredArg:
"""Define a parameter as required."""
pass
def trigger_preprocessing(trigger):
"""Process triggers.
Convert integers to periodic triggers.
"""
if isinstance(trigger, Trigger):
return trigger
else:
try:
return Periodic(period=int(trigger), phase=0)
except Exception:
raise ValueError("Expected a hoomd.trigger.Trigger or int object.")
def variant_preprocessing(variant):
"""Process variants.
Convert floats to constant variants.
"""
if isinstance(variant, Variant):
return variant
else:
try:
return Constant(float(variant))
except Exception:
raise ValueError(
"Expected a hoomd.variant.Variant or float object.")
def box_preprocessing(box):
"""Process boxes.
Convert values that `Box.from_box` handles.
"""
if isinstance(box, hoomd.Box):
return box
else:
try:
return hoomd.Box.from_box(box)
except Exception:
raise ValueError("{} is not convertible into a hoomd.Box object. "
"using hoomd.Box.from_box".format(box))
def positive_real(number):
"""Ensure that a value is positive."""
try:
float_number = float(number)
except Exception as err:
raise TypeConversionError(
f"{number} not convertible to float.") from err
if float_number <= 0:
raise TypeConversionError("Expected a number greater than zero.")
return float_number
def nonnegative_real(number):
"""Ensure that a value is not negative."""
try:
float_number = float(number)
except Exception as err:
raise TypeConversionError(
f"{number} not convertible to float.") from err
if float_number < 0:
raise TypeConversionError("Expected a nonnegative real number.")
return float_number
def identity(value):
"""Return the given value."""
return value
class _HelpValidate(ABC):
"""Base class for classes that perform validation on an inputed value.
Supports arbitrary pre and post processing as well as optionally allowing
None values. The `_validate` function should raise a `ValueError` or
`TypeConverterValue` if validation fails, else it should return the
validated/transformed value.
"""
def __init__(self, preprocess=None, postprocess=None, allow_none=False):
self._preprocess = identity if preprocess is None else preprocess
self._postprocess = identity if postprocess is None else postprocess
self._allow_none = allow_none
def __call__(self, value):
if value is None:
if not self._allow_none:
raise ValueError("None is not allowed.")
else:
return None
return self._postprocess(self._validate(self._preprocess(value)))
@abstractmethod
def _validate(self, value):
pass
class Either(_HelpValidate):
"""Class that has multiple equally valid validation methods for an input.
For instance if a parameter can either be a length 6 tuple or float then
.. code-blocks:: python
e = Either(to_type_converter((float,) * 6), to_type_converter(float))
would allow either value to pass.
"""
def __init__(self, specs, preprocess=None, postprocess=None):
super().__init__(preprocess, postprocess)
self.specs = specs
def _validate(self, value):
for spec in self.specs:
try:
return spec(value)
except Exception:
continue
raise ValueError("value {} not converible using {}".format(
value, [str(spec) for spec in self.specs]))
def __str__(self):
"""str: String representation of the validator."""
return "Either({})".format([str(spec) for spec in self.specs])
class OnlyIf(_HelpValidate):
"""A wrapper around a validation function.
Not strictly necessary, but keeps the theme of the other classes, and allows
pre/post-processing and optionally allows None.
"""
def __init__(self,
cond,
preprocess=None,
postprocess=None,
allow_none=False):
super().__init__(preprocess, postprocess, allow_none)
self.cond = cond
def _validate(self, value):
return self.cond(value)
def __str__(self):
"""str: String representation of the validator."""
return "OnlyIf({})".format(str(self.cond))
class OnlyTypes(_HelpValidate):
"""Only allow values that are instances of type.
Developers should consider the `collections.abc` module in using this type.
In general `OnlyTypes(Sequence)` is more readable than the similar
`OnlyIf(lambda x: hasattr(x, '__iter__'))`. If a sequence of types is
provided and ``strict`` is ``False``, conversions will be attempted in the
order of the ``types`` sequence.
"""
def __init__(self,
*types,
disallow_types=None,
strict=False,
preprocess=None,
postprocess=None,
allow_none=False):
super().__init__(preprocess, postprocess, allow_none)
# Handle if a class is passed rather than an iterable of classes
self.types = types
if disallow_types is None:
self.disallow_types = ()
else:
self.disallow_types = disallow_types
self.strict = strict
def _validate(self, value):
if isinstance(value, self.disallow_types):
raise ValueError(f"Value cannot be of type {type(value)}")
if isinstance(value, self.types):
return value
elif self.strict:
raise ValueError(
f"Value {value} not instance of any of {self.types}.")
else:
for type_ in self.types:
try:
return type_(value)
except Exception:
pass
raise ValueError(
f"Value {value} is not convertable into any of these types "
f"{self.types}")
def __str__(self):
"""str: String representation of the validator."""
return f"OnlyTypes({str(self.types)})"
class OnlyFrom(_HelpValidate):
"""Validates a value against a given set of options.
An example that allows integers less than ten `OnlyFrom(range(10))`. Note
that generator expressions are fine.
"""
def __init__(self,
options,
preprocess=None,
postprocess=None,
allow_none=False):
super().__init__(preprocess, postprocess, allow_none)
self.options = set(options)
def _validate(self, value):
if value in self:
return value
else:
raise ValueError("Value {} not in options: {}".format(
value, self.options))
def __contains__(self, value):
"""bool: True when value is in the options."""
return value in self.options
def __str__(self):
"""str: String representation of the validator."""
return "OnlyFrom[{}]".format(self.options)
class SetOnce:
"""Used to make properties read-only after setting."""
def __init__(self, validation):
if isclass(validation):
self._validation = OnlyTypes(validation)
else:
self._validation = validation
def __call__(self, value):
"""Handle setting values."""
if self._validation is not None:
val = self._validation(value)
self._validation = None
return val
else:
raise ValueError("Attribute is read-only.")
class TypeConverter(ABC):
"""Base class for TypeConverter's encodes structure and validation.
Subclasses represent validating a different data structure. When called they
are to attempt to validate and transform the inputs as given by the
specification set up at the initialization.
Note:
Subclasses should not be instantiated directly. Instead use
`to_type_converter`.
"""
@abstractmethod
def __init__(self, *args, **kwargs):
pass
@abstractmethod
def __call__(self, value):
"""Called when values are set."""
pass
class NDArrayValidator(_HelpValidate):
"""Validates array and array-like structures.
Args:
dtype (numpy.dtype): The type of individual items in the array.
shape (`tuple` [`int`, ...], optional): The shape of the array. The
number of dimensions is specified by the length of the tuple and the
length of a dimension is specified by the value. A value of ``None``
in an index indicates any length is acceptable. Defaults to
``(None,)``.
order (`str`, optional): The kind of ordering needed for the array.
Options are ``["C", "F", "K", "A"]``. See `numpy.array`
documentation for imformation about the orderings. Defaults to
`"K"`.
preprocess (callable, optional): An optional function like argument to
use to preprocess arrays before general validation. Defaults to
``None`` which mean on preprocessing.
preprocess (callable, optional): An optional function like argument to
use to postprocess arrays after general validation. Defaults to
``None`` which means no postprocessing.
allow_none (`bool`, optional): Whether to allow ``None`` as a valid
value. Defaults to ``None``.
The validation will attempt to convert array-like objects to arrays. We will
change the dtype and ordering if necessary, but do not reshape the given
arrays since this is non-trivial depending on the shape specification passed
in.
"""
def __init__(self,
dtype,
shape=(None,),
order="K",
preprocess=None,
postprocess=None,
allow_none=False):
"""Create a NDArrayValidator object."""
super().__init__(preprocess, postprocess, allow_none)
self._dtype = dtype
self._shape = shape
self._order = order
def _validate(self, arr):
"""Validate an array or array-like object."""
typed_and_ordered = np.array(arr, dtype=self._dtype, order=self._order)
if len(typed_and_ordered.shape) != len(self._shape):
raise ValueError(
f"Expected array of {len(self._shape)} dimensions, but "
f"recieved array of {len(typed_and_ordered.shape)} dimensions.")
for i, dim in enumerate(self._shape):
if dim is not None:
if typed_and_ordered.shape[i] != dim:
raise ValueError(
f"In dimension {i}, expected size {dim}, but got size "
f"{typed_and_ordered.shape[i]}")
return typed_and_ordered
class TypeConverterValue(TypeConverter):
"""Represents a scalar value of some kind.
Parameters:
value (Any): Whatever defines the validation. Many ways to specify the
validation exist.
Attributes:
_conversion_func_dict (dict[type, Callable[Any]): A dictionary of type
(e.g. list, str) - callable mappings. The callable is the default
validation for a given type.
Specification:
The initialization specification goes through the following process. If
the value is of a type in `self._conversion_func_dict` or is a type
in `self._conversion_func_dict` then we use the mapping validation
function. Otherwise if the value is a class we use `OnlyTypes(value)`.
Generic callables just get used directly, and finally if no check passes
we use `OnlyTypes(type(value))`.
Examples of valid ways to specify an integer specification,
.. code-block:: python
TypeConverterValue(1)
TypeConverterValue(int)
def natural_number(value):
if i < 1:
raise ValueError(
"Value {} must be a natural number.".format(value))
TypeConverterValue(OnlyTypes(int, postprocess=natural_number))
"""
_conversion_func_dict = {
Variant:
OnlyTypes(Variant, preprocess=variant_preprocessing),
ParticleFilter:
OnlyTypes(ParticleFilter, CustomFilter, strict=True),
str:
OnlyTypes(str, strict=True),
Trigger:
OnlyTypes(Trigger, preprocess=trigger_preprocessing),
# arrays default to float of one dimension of arbitrary length and
# ordering
np.ndarray:
NDArrayValidator(float),
}
def __init__(self, value):
# If the value is a class object
if isclass(value):
# if constructor with special default setting logic
for cls in self._conversion_func_dict:
if issubclass(value, cls):
self.converter = self._conversion_func_dict[cls]
return None
# constructor with no special logic
self.converter = OnlyTypes(value)
return None
# If the value is a class instance
# if value is a subtype of a type with special value setting logic
for cls in self._conversion_func_dict:
if isinstance(value, cls):
self.converter = self._conversion_func_dict[cls]
return None
# if value is a callable assume that it is the validation function
if callable(value):
self.converter = value
# if any other object
else:
self.converter = OnlyTypes(type(value))
def __call__(self, value):
"""Called when the value is set."""
try:
return self.converter(value)
except (TypeError, ValueError, TypeConversionError) as err:
if value is RequiredArg:
raise TypeConversionError("Value is a required argument")
raise TypeConversionError(
"Value {} of type {} cannot be converted using {}. Raised "
"error: {}".format(value, type(value), str(self.converter),
str(err)))
class TypeConverterSequence(TypeConverter):
"""Validation for a generic any length sequence.
Uses `to_type_converter` for construction the validation. For each item in
the inputted sequence, a corresponding `TypeConverter` object is
constructed.
Parameters:
sequence (Sequence[Any]): Any sequence or iterator, anything else passed
is an error.
Specification:
When validating, if a single element was given that element is repeated
for every element of the inputed sequence. Otherwise, we cycle through
the given values. This makes this class unsuited for fix length
sequences (`TypeConverterFixedLengthSequence` exists for this). Examples
include,
.. code-block:: python
# All elements should be floats
TypeConverterSequence([float])
# All elements should be in a float int ordering
TypeConverterSequence([float, int])
"""
def __init__(self, sequence):
self.converter = [to_type_converter(item) for item in sequence]
def __call__(self, sequence):
"""Called when the value is set."""
if not _is_iterable(sequence):
raise TypeConversionError(
"Expected a sequence like instance. Received {} of type {}."
"".format(sequence, type(sequence)))
else:
new_sequence = []
try:
for i, (v, c) in enumerate(zip(sequence, self)):
new_sequence.append(c(v))
except (TypeConversionError) as err:
raise TypeConversionError("In list item number {}: {}"
"".format(i, str(err)))
return new_sequence
def __iter__(self):
"""Iterate over converters in the sequence."""
if len(self.converter) == 1:
yield from repeat(self.converter[0])
else:
yield from cycle(self.converter)
class TypeConverterFixedLengthSequence(TypeConverter):
"""Validation for a fixed length sequence (read tuple).
Uses `to_type_converter` for construction the validation. For each item in
the inputted sequence, a corresponding `TypeConverter` object is
constructed.
Parameters:
sequence (Sequence[Any]): Any sequence or iterable, anything else passed
is an error.
Specification:
When validating, a sequence of the exact length given on instantiation
is expected, else an error is raised.
.. code-block:: python
# Three floats
TypeConverterFixedLengthSequence((float, float, float))
# a string followed for a float and int
TypeConverterFixedLengthSequence((string, float, int))
"""
def __init__(self, sequence):
self.converter = tuple([to_type_converter(item) for item in sequence])
def __call__(self, sequence):
"""Called when the value is set."""
if not _is_iterable(sequence):
raise TypeConversionError(
"Expected a tuple like object. Received {} of type {}."
"".format(sequence, type(sequence)))
elif len(sequence) != len(self.converter):
raise TypeConversionError(
"Expected exactly {} items. Received {}.".format(
len(self.converter), len(sequence)))
else:
new_sequence = []
try:
for i, (v, c) in enumerate(zip(sequence, self)):
new_sequence.append(c(v))
except (TypeConversionError) as err:
raise TypeConversionError("In tuple item number {}: {}"
"".format(i, str(err)))
return tuple(new_sequence)
def __iter__(self):
"""Iterate over converters in the sequence."""
yield from self.converter
class TypeConverterMapping(TypeConverter, MutableMapping):
"""Validation for a mapping of string keys to any type values.
Uses `to_type_converter` for construction the validation. For each value in
the inputted sequence, a corresponding `TypeConverter` object is
constructed.
Parameters:
mapping (Mapping[str, Any]): Any mapping, anything else passed is an
error.
Specification:
When validating, a subset of keys is expected to be used. No error is
raised if not all keys are used in the validation. The validation either
errors or returns a mapping with all the same keys as the inputted
mapping.
.. code-block:: python
t = TypeConverterMapping({'str': str, 'list_of_floats': [float]})
# valid
t({'str': 'hello'})
# invalid
t({'new_key': None})
"""
def __init__(self, mapping):
self.converter = {
key: to_type_converter(value) for key, value in mapping.items()
}
def __call__(self, mapping):
"""Called when the value is set."""
if not isinstance(mapping, Mapping):
raise TypeConversionError(
"Expected a dict like value. Recieved {} of type {}."
"".format(mapping, type(mapping)))
new_mapping = dict()
try:
for key, value in mapping.items():
if key in self:
new_mapping[key] = self.converter[key](value)
else:
new_mapping[key] = value
except (TypeConversionError) as err:
raise TypeConversionError("In key {}: {}"
"".format(str(key), str(err)))
return new_mapping
def __iter__(self):
"""Iterate over converters in the mapping."""
yield from self.converter
def __getitem__(self, key):
"""Get a converter by key."""
return self.converter[key]
def __setitem__(self, key, value):
"""Set a converter by key."""
self.converter[key] = value
def __delitem__(self, key):
"""Remove a converter by key."""
del self.converter[key]
def __len__(self):
"""int: Number of converters."""
return len(self.converter)
def to_type_converter(value):
"""The function to use for creating a structure of `TypeConverter` objects.
This is the function to use when defining validation not any of the
`TypeConverter` subclasses.
.. code-block:: python
# list take a list of tuples of 3 floats each
validation = to_type_converter(
{'str': str, 'list': [(float, float, float)]})
"""
if isinstance(value, tuple):
return TypeConverterFixedLengthSequence(value)
if _is_iterable(value):
return TypeConverterSequence(value)
elif isinstance(value, Mapping):
return TypeConverterMapping(value)
else:
return TypeConverterValue(value)
|
|
# Copyright 2018-2019 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
from collections import defaultdict, OrderedDict
import inspect
import re
import tarfile
import uuid
import warnings
import zipfile
from typing import Callable, Set, List, Text, Dict, Tuple, Any, Union, Optional
import kfp.deprecated as kfp
from kfp.deprecated.dsl import _for_loop
from kfp.deprecated.compiler import _data_passing_rewriter, v2_compat
from kfp.deprecated import dsl
from kfp.deprecated.compiler._k8s_helper import convert_k8s_obj_to_json, sanitize_k8s_name
from kfp.deprecated.compiler._op_to_template import _op_to_template, _process_obj
from kfp.deprecated.compiler._default_transformers import add_pod_env, add_pod_labels
from kfp.deprecated.components.structures import InputSpec
from kfp.deprecated.components._yaml_utils import dump_yaml
from kfp.deprecated.dsl._metadata import _extract_pipeline_metadata
from kfp.deprecated.dsl._ops_group import OpsGroup
from kfp.deprecated.dsl._pipeline_param import extract_pipelineparams_from_any, PipelineParam
_SDK_VERSION_LABEL = 'pipelines.kubeflow.org/kfp_sdk_version'
_SDK_ENV_LABEL = 'pipelines.kubeflow.org/pipeline-sdk-type'
_SDK_ENV_DEFAULT = 'kfp'
class Compiler(object):
"""DSL Compiler that compiles pipeline functions into workflow yaml.
Example:
How to use the compiler to construct workflow yaml::
@dsl.pipeline(
name='name',
description='description'
)
def my_pipeline(a: int = 1, b: str = "default value"):
...
Compiler().compile(my_pipeline, 'path/to/workflow.yaml')
"""
def __init__(self,
mode: dsl.PipelineExecutionMode = kfp.dsl.PipelineExecutionMode
.V1_LEGACY,
launcher_image: Optional[str] = None):
"""Creates a KFP compiler for compiling pipeline functions for
execution.
Args:
mode: The pipeline execution mode to use, defaults to kfp.dsl.PipelineExecutionMode.V1_LEGACY.
launcher_image: Configurable image for KFP launcher to use. Only applies
when `mode == dsl.PipelineExecutionMode.V2_COMPATIBLE`. Should only be
needed for tests or custom deployments right now.
"""
if mode == dsl.PipelineExecutionMode.V2_ENGINE:
raise ValueError('V2_ENGINE execution mode is not supported yet.')
if mode == dsl.PipelineExecutionMode.V2_COMPATIBLE:
raise ValueError('V2_COMPATIBLE mode has been deprecated in KFP SDK'
' 2.0. To use V2_COMPATIBLE mode, install KFP SDK'
' 1.8.*.')
self._mode = mode
self._launcher_image = launcher_image
self._pipeline_name_param: Optional[dsl.PipelineParam] = None
self._pipeline_root_param: Optional[dsl.PipelineParam] = None
def _get_groups_for_ops(self, root_group):
"""Helper function to get belonging groups for each op.
Each pipeline has a root group. Each group has a list of operators (leaf) and groups.
This function traverse the tree and get all ancestor groups for all operators.
Returns:
A dict. Key is the operator's name. Value is a list of ancestor groups including the
op itself. The list of a given operator is sorted in a way that the farthest
group is the first and operator itself is the last.
"""
def _get_op_groups_helper(current_groups, ops_to_groups):
root_group = current_groups[-1]
for g in root_group.groups:
# Add recursive opsgroup in the ops_to_groups
# such that the i/o dependency can be propagated to the ancester opsgroups
if g.recursive_ref:
ops_to_groups[g.name] = [x.name for x in current_groups
] + [g.name]
continue
current_groups.append(g)
_get_op_groups_helper(current_groups, ops_to_groups)
del current_groups[-1]
for op in root_group.ops:
ops_to_groups[op.name] = [x.name for x in current_groups
] + [op.name]
ops_to_groups = {}
current_groups = [root_group]
_get_op_groups_helper(current_groups, ops_to_groups)
return ops_to_groups
#TODO: combine with the _get_groups_for_ops
def _get_groups_for_opsgroups(self, root_group):
"""Helper function to get belonging groups for each opsgroup.
Each pipeline has a root group. Each group has a list of operators (leaf) and groups.
This function traverse the tree and get all ancestor groups for all opsgroups.
Returns:
A dict. Key is the opsgroup's name. Value is a list of ancestor groups including the
opsgroup itself. The list of a given opsgroup is sorted in a way that the farthest
group is the first and opsgroup itself is the last.
"""
def _get_opsgroup_groups_helper(current_groups, opsgroups_to_groups):
root_group = current_groups[-1]
for g in root_group.groups:
# Add recursive opsgroup in the ops_to_groups
# such that the i/o dependency can be propagated to the ancester opsgroups
if g.recursive_ref:
continue
opsgroups_to_groups[g.name] = [x.name for x in current_groups
] + [g.name]
current_groups.append(g)
_get_opsgroup_groups_helper(current_groups, opsgroups_to_groups)
del current_groups[-1]
opsgroups_to_groups = {}
current_groups = [root_group]
_get_opsgroup_groups_helper(current_groups, opsgroups_to_groups)
return opsgroups_to_groups
def _get_groups(self, root_group):
"""Helper function to get all groups (not including ops) in a
pipeline."""
def _get_groups_helper(group):
groups = {group.name: group}
for g in group.groups:
# Skip the recursive opsgroup because no templates
# need to be generated for the recursive opsgroups.
if not g.recursive_ref:
groups.update(_get_groups_helper(g))
return groups
return _get_groups_helper(root_group)
def _get_uncommon_ancestors(self, op_groups, opsgroup_groups, op1, op2):
"""Helper function to get unique ancestors between two ops.
For example, op1's ancestor groups are [root, G1, G2, G3, op1],
op2's ancestor groups are [root, G1, G4, op2], then it returns a
tuple ([G2, G3, op1], [G4, op2]).
"""
#TODO: extract a function for the following two code module
if op1.name in op_groups:
op1_groups = op_groups[op1.name]
elif op1.name in opsgroup_groups:
op1_groups = opsgroup_groups[op1.name]
else:
raise ValueError(op1.name + ' does not exist.')
if op2.name in op_groups:
op2_groups = op_groups[op2.name]
elif op2.name in opsgroup_groups:
op2_groups = opsgroup_groups[op2.name]
else:
raise ValueError(op2.name + ' does not exist.')
both_groups = [op1_groups, op2_groups]
common_groups_len = sum(
1 for x in zip(*both_groups) if x == (x[0],) * len(x))
group1 = op1_groups[common_groups_len:]
group2 = op2_groups[common_groups_len:]
return (group1, group2)
def _get_condition_params_for_ops(self, root_group):
"""Get parameters referenced in conditions of ops."""
conditions = defaultdict(set)
def _get_condition_params_for_ops_helper(group,
current_conditions_params):
new_current_conditions_params = current_conditions_params
if group.type == 'condition':
new_current_conditions_params = list(current_conditions_params)
if isinstance(group.condition.operand1, dsl.PipelineParam):
new_current_conditions_params.append(
group.condition.operand1)
if isinstance(group.condition.operand2, dsl.PipelineParam):
new_current_conditions_params.append(
group.condition.operand2)
for op in group.ops:
for param in new_current_conditions_params:
conditions[op.name].add(param)
for g in group.groups:
# If the subgroup is a recursive opsgroup, propagate the pipelineparams
# in the condition expression, similar to the ops.
if g.recursive_ref:
for param in new_current_conditions_params:
conditions[g.name].add(param)
else:
_get_condition_params_for_ops_helper(
g, new_current_conditions_params)
_get_condition_params_for_ops_helper(root_group, [])
return conditions
def _get_next_group_or_op(cls, to_visit: List, already_visited: Set):
"""Get next group or op to visit."""
if len(to_visit) == 0:
return None
next = to_visit.pop(0)
while next in already_visited:
next = to_visit.pop(0)
already_visited.add(next)
return next
def _get_for_loop_ops(self, new_root) -> Dict[Text, dsl.ParallelFor]:
to_visit = self._get_all_subgroups_and_ops(new_root)
op_name_to_op = {}
already_visited = set()
while len(to_visit):
next_op = self._get_next_group_or_op(to_visit, already_visited)
if next_op is None:
break
to_visit.extend(self._get_all_subgroups_and_ops(next_op))
if isinstance(next_op, dsl.ParallelFor):
op_name_to_op[next_op.name] = next_op
return op_name_to_op
def _get_all_subgroups_and_ops(self, op):
"""Get all ops and groups contained within this group."""
subgroups = []
if hasattr(op, 'ops'):
subgroups.extend(op.ops)
if hasattr(op, 'groups'):
subgroups.extend(op.groups)
return subgroups
def _get_inputs_outputs(
self,
pipeline,
root_group,
op_groups,
opsgroup_groups,
condition_params,
op_name_to_for_loop_op: Dict[Text, dsl.ParallelFor],
):
"""Get inputs and outputs of each group and op.
Returns:
A tuple (inputs, outputs).
inputs and outputs are dicts with key being the group/op names and values being list of
tuples (param_name, producing_op_name). producing_op_name is the name of the op that
produces the param. If the param is a pipeline param (no producer op), then
producing_op_name is None.
"""
inputs = defaultdict(set)
outputs = defaultdict(set)
for op in pipeline.ops.values():
# op's inputs and all params used in conditions for that op are both considered.
for param in op.inputs + list(condition_params[op.name]):
# if the value is already provided (immediate value), then no need to expose
# it as input for its parent groups.
if param.value:
continue
if param.op_name:
upstream_op = pipeline.ops[param.op_name]
upstream_groups, downstream_groups = \
self._get_uncommon_ancestors(op_groups, opsgroup_groups, upstream_op, op)
for i, group_name in enumerate(downstream_groups):
if i == 0:
# If it is the first uncommon downstream group, then the input comes from
# the first uncommon upstream group.
inputs[group_name].add(
(param.full_name, upstream_groups[0]))
else:
# If not the first downstream group, then the input is passed down from
# its ancestor groups so the upstream group is None.
inputs[group_name].add((param.full_name, None))
for i, group_name in enumerate(upstream_groups):
if i == len(upstream_groups) - 1:
# If last upstream group, it is an operator and output comes from container.
outputs[group_name].add((param.full_name, None))
else:
# If not last upstream group, output value comes from one of its child.
outputs[group_name].add(
(param.full_name, upstream_groups[i + 1]))
else:
if not op.is_exit_handler:
for group_name in op_groups[op.name][::-1]:
# if group is for loop group and param is that loop's param, then the param
# is created by that for loop ops_group and it shouldn't be an input to
# any of its parent groups.
inputs[group_name].add((param.full_name, None))
if group_name in op_name_to_for_loop_op:
# for example:
# loop_group.loop_args.name = 'loop-item-param-99ca152e'
# param.name = 'loop-item-param-99ca152e--a'
loop_group = op_name_to_for_loop_op[group_name]
if loop_group.loop_args.name in param.name:
break
# Generate the input/output for recursive opsgroups
# It propagates the recursive opsgroups IO to their ancester opsgroups
def _get_inputs_outputs_recursive_opsgroup(group):
#TODO: refactor the following codes with the above
if group.recursive_ref:
params = [(param, False) for param in group.inputs]
params.extend([(param, True)
for param in list(condition_params[group.name])])
for param, is_condition_param in params:
if param.value:
continue
full_name = param.full_name
if param.op_name:
upstream_op = pipeline.ops[param.op_name]
upstream_groups, downstream_groups = \
self._get_uncommon_ancestors(op_groups, opsgroup_groups, upstream_op, group)
for i, g in enumerate(downstream_groups):
if i == 0:
inputs[g].add((full_name, upstream_groups[0]))
# There is no need to pass the condition param as argument to the downstream ops.
#TODO: this might also apply to ops. add a TODO here and think about it.
elif i == len(downstream_groups
) - 1 and is_condition_param:
continue
else:
inputs[g].add((full_name, None))
for i, g in enumerate(upstream_groups):
if i == len(upstream_groups) - 1:
outputs[g].add((full_name, None))
else:
outputs[g].add(
(full_name, upstream_groups[i + 1]))
elif not is_condition_param:
for g in op_groups[group.name]:
inputs[g].add((full_name, None))
for subgroup in group.groups:
_get_inputs_outputs_recursive_opsgroup(subgroup)
_get_inputs_outputs_recursive_opsgroup(root_group)
# Generate the input for SubGraph along with parallelfor
for sub_graph in opsgroup_groups:
if sub_graph in op_name_to_for_loop_op:
# The opsgroup list is sorted with the farthest group as the first and
# the opsgroup itself as the last. To get the latest opsgroup which is
# not the opsgroup itself -2 is used.
parent = opsgroup_groups[sub_graph][-2]
if parent and parent.startswith('subgraph'):
# propagate only op's pipeline param from subgraph to parallelfor
loop_op = op_name_to_for_loop_op[sub_graph]
pipeline_param = loop_op.loop_args.items_or_pipeline_param
if loop_op.items_is_pipeline_param and pipeline_param.op_name:
param_name = '%s-%s' % (sanitize_k8s_name(
pipeline_param.op_name), pipeline_param.name)
inputs[parent].add((param_name, pipeline_param.op_name))
return inputs, outputs
def _get_dependencies(self, pipeline, root_group, op_groups,
opsgroups_groups, opsgroups, condition_params):
"""Get dependent groups and ops for all ops and groups.
Returns:
A dict. Key is group/op name, value is a list of dependent groups/ops.
The dependencies are calculated in the following way: if op2 depends on op1,
and their ancestors are [root, G1, G2, op1] and [root, G1, G3, G4, op2],
then G3 is dependent on G2. Basically dependency only exists in the first uncommon
ancesters in their ancesters chain. Only sibling groups/ops can have dependencies.
"""
dependencies = defaultdict(set)
for op in pipeline.ops.values():
upstream_op_names = set()
for param in op.inputs + list(condition_params[op.name]):
if param.op_name:
upstream_op_names.add(param.op_name)
upstream_op_names |= set(op.dependent_names)
for upstream_op_name in upstream_op_names:
# the dependent op could be either a BaseOp or an opsgroup
if upstream_op_name in pipeline.ops:
upstream_op = pipeline.ops[upstream_op_name]
elif upstream_op_name in opsgroups:
upstream_op = opsgroups[upstream_op_name]
else:
raise ValueError('compiler cannot find the ' +
upstream_op_name)
upstream_groups, downstream_groups = self._get_uncommon_ancestors(
op_groups, opsgroups_groups, upstream_op, op)
dependencies[downstream_groups[0]].add(upstream_groups[0])
# Generate dependencies based on the recursive opsgroups
#TODO: refactor the following codes with the above
def _get_dependency_opsgroup(group, dependencies):
upstream_op_names = set(
[dependency.name for dependency in group.dependencies])
if group.recursive_ref:
for param in group.inputs + list(condition_params[group.name]):
if param.op_name:
upstream_op_names.add(param.op_name)
for op_name in upstream_op_names:
if op_name in pipeline.ops:
upstream_op = pipeline.ops[op_name]
elif op_name in opsgroups:
upstream_op = opsgroups[op_name]
else:
raise ValueError('compiler cannot find the ' + op_name)
upstream_groups, downstream_groups = \
self._get_uncommon_ancestors(op_groups, opsgroups_groups, upstream_op, group)
dependencies[downstream_groups[0]].add(upstream_groups[0])
for subgroup in group.groups:
_get_dependency_opsgroup(subgroup, dependencies)
_get_dependency_opsgroup(root_group, dependencies)
return dependencies
def _resolve_value_or_reference(self, value_or_reference,
potential_references):
"""_resolve_value_or_reference resolves values and PipelineParams,
which could be task parameters or input parameters.
Args:
value_or_reference: value or reference to be resolved. It could be basic python types or PipelineParam
potential_references(dict{str->str}): a dictionary of parameter names to task names
"""
if isinstance(value_or_reference, dsl.PipelineParam):
parameter_name = value_or_reference.full_name
task_names = [
task_name for param_name, task_name in potential_references
if param_name == parameter_name
]
if task_names:
task_name = task_names[0]
# When the task_name is None, the parameter comes directly from ancient ancesters
# instead of parents. Thus, it is resolved as the input parameter in the current group.
if task_name is None:
return '{{inputs.parameters.%s}}' % parameter_name
else:
return '{{tasks.%s.outputs.parameters.%s}}' % (
task_name, parameter_name)
else:
return '{{inputs.parameters.%s}}' % parameter_name
else:
return str(value_or_reference)
@staticmethod
def _resolve_task_pipeline_param(pipeline_param: PipelineParam,
group_type) -> str:
if pipeline_param.op_name is None:
return '{{workflow.parameters.%s}}' % pipeline_param.name
param_name = '%s-%s' % (sanitize_k8s_name(
pipeline_param.op_name), pipeline_param.name)
if group_type == 'subgraph':
return '{{inputs.parameters.%s}}' % (param_name)
return '{{tasks.%s.outputs.parameters.%s}}' % (sanitize_k8s_name(
pipeline_param.op_name), param_name)
def _group_to_dag_template(self, group, inputs, outputs, dependencies):
"""Generate template given an OpsGroup.
inputs, outputs, dependencies are all helper dicts.
"""
template = {'name': group.name}
if group.parallelism != None:
template["parallelism"] = group.parallelism
# Generate inputs section.
if inputs.get(group.name, None):
template_inputs = [{'name': x[0]} for x in inputs[group.name]]
template_inputs.sort(key=lambda x: x['name'])
template['inputs'] = {'parameters': template_inputs}
# Generate outputs section.
if outputs.get(group.name, None):
template_outputs = []
for param_name, dependent_name in outputs[group.name]:
template_outputs.append({
'name': param_name,
'valueFrom': {
'parameter':
'{{tasks.%s.outputs.parameters.%s}}' %
(dependent_name, param_name)
}
})
template_outputs.sort(key=lambda x: x['name'])
template['outputs'] = {'parameters': template_outputs}
# Generate tasks section.
tasks = []
sub_groups = group.groups + group.ops
for sub_group in sub_groups:
is_recursive_subgroup = (
isinstance(sub_group, OpsGroup) and sub_group.recursive_ref)
# Special handling for recursive subgroup: use the existing opsgroup name
if is_recursive_subgroup:
task = {
'name': sub_group.recursive_ref.name,
'template': sub_group.recursive_ref.name,
}
else:
task = {
'name': sub_group.name,
'template': sub_group.name,
}
if isinstance(sub_group,
dsl.OpsGroup) and sub_group.type == 'condition':
subgroup_inputs = inputs.get(sub_group.name, [])
condition = sub_group.condition
operand1_value = self._resolve_value_or_reference(
condition.operand1, subgroup_inputs)
operand2_value = self._resolve_value_or_reference(
condition.operand2, subgroup_inputs)
if condition.operator in ['==', '!=']:
operand1_value = '"' + operand1_value + '"'
operand2_value = '"' + operand2_value + '"'
task['when'] = '{} {} {}'.format(operand1_value,
condition.operator,
operand2_value)
# Generate dependencies section for this task.
if dependencies.get(sub_group.name, None):
group_dependencies = list(dependencies[sub_group.name])
group_dependencies.sort()
task['dependencies'] = group_dependencies
# Generate arguments section for this task.
if inputs.get(sub_group.name, None):
task['arguments'] = {
'parameters':
self.get_arguments_for_sub_group(
sub_group, is_recursive_subgroup, inputs)
}
# additional task modifications for withItems and withParam
if isinstance(sub_group, dsl.ParallelFor):
if sub_group.items_is_pipeline_param:
# these loop args are a 'withParam' rather than 'withItems'.
# i.e., rather than a static list, they are either the output of another task or were input
# as global pipeline parameters
pipeline_param = sub_group.loop_args.items_or_pipeline_param
withparam_value = self._resolve_task_pipeline_param(
pipeline_param, group.type)
if pipeline_param.op_name:
# these loop args are the output of another task
if 'dependencies' not in task or task[
'dependencies'] is None:
task['dependencies'] = []
if sanitize_k8s_name(
pipeline_param.op_name
) not in task[
'dependencies'] and group.type != 'subgraph':
task['dependencies'].append(
sanitize_k8s_name(pipeline_param.op_name))
task['withParam'] = withparam_value
else:
# Need to sanitize the dict keys for consistency.
loop_tasks = sub_group.loop_args.to_list_for_task_yaml()
nested_pipeline_params = extract_pipelineparams_from_any(
loop_tasks)
# Set dependencies in case of nested pipeline_params
map_to_tmpl_var = {
str(p):
self._resolve_task_pipeline_param(p, group.type)
for p in nested_pipeline_params
}
for pipeline_param in nested_pipeline_params:
if pipeline_param.op_name:
# these pipeline_param are the output of another task
if 'dependencies' not in task or task[
'dependencies'] is None:
task['dependencies'] = []
if sanitize_k8s_name(pipeline_param.op_name
) not in task['dependencies']:
task['dependencies'].append(
sanitize_k8s_name(pipeline_param.op_name))
sanitized_tasks = []
if isinstance(loop_tasks[0], dict):
for argument_set in loop_tasks:
c_dict = {}
for k, v in argument_set.items():
c_dict[sanitize_k8s_name(k, True)] = v
sanitized_tasks.append(c_dict)
else:
sanitized_tasks = loop_tasks
# Replace pipeline param if map_to_tmpl_var not empty
task['withItems'] = _process_obj(
sanitized_tasks,
map_to_tmpl_var) if map_to_tmpl_var else sanitized_tasks
# We will sort dependencies to have determinitc yaml and thus stable tests
if task.get('dependencies'):
task['dependencies'].sort()
tasks.append(task)
tasks.sort(key=lambda x: x['name'])
template['dag'] = {'tasks': tasks}
return template
def get_arguments_for_sub_group(
self,
sub_group: Union[OpsGroup, dsl._container_op.BaseOp],
is_recursive_subgroup: Optional[bool],
inputs: Dict[Text, Tuple[Text, Text]],
):
arguments = []
for param_name, dependent_name in inputs[sub_group.name]:
if is_recursive_subgroup:
for input_name, input in sub_group.arguments.items():
if param_name == input.full_name:
break
referenced_input = sub_group.recursive_ref.arguments[input_name]
argument_name = referenced_input.full_name
else:
argument_name = param_name
# Preparing argument. It can be pipeline input reference, task output reference or loop item (or loop item attribute
sanitized_loop_arg_full_name = '---'
if isinstance(sub_group, dsl.ParallelFor):
sanitized_loop_arg_full_name = sanitize_k8s_name(
sub_group.loop_args.full_name)
arg_ref_full_name = sanitize_k8s_name(param_name)
# We only care about the reference to the current loop item, not the outer loops
if isinstance(sub_group,
dsl.ParallelFor) and arg_ref_full_name.startswith(
sanitized_loop_arg_full_name):
if arg_ref_full_name == sanitized_loop_arg_full_name:
argument_value = '{{item}}'
elif _for_loop.LoopArgumentVariable.name_is_loop_arguments_variable(
param_name):
subvar_name = _for_loop.LoopArgumentVariable.get_subvar_name(
param_name)
argument_value = '{{item.%s}}' % subvar_name
else:
raise ValueError(
"Argument seems to reference the loop item, but not the item itself and not some attribute of the item. param_name: {}, "
.format(param_name))
else:
if dependent_name:
argument_value = '{{tasks.%s.outputs.parameters.%s}}' % (
dependent_name, param_name)
else:
argument_value = '{{inputs.parameters.%s}}' % param_name
arguments.append({
'name': argument_name,
'value': argument_value,
})
arguments.sort(key=lambda x: x['name'])
return arguments
def _create_dag_templates(self,
pipeline,
op_transformers=None,
op_to_templates_handler=None):
"""Create all groups and ops templates in the pipeline.
Args:
pipeline: Pipeline context object to get all the pipeline data from.
op_transformers: A list of functions that are applied to all ContainerOp instances that are being processed.
op_to_templates_handler: Handler which converts a base op into a list of argo templates.
"""
op_to_templates_handler = op_to_templates_handler or (
lambda op: [_op_to_template(op)])
root_group = pipeline.groups[0]
# Call the transformation functions before determining the inputs/outputs, otherwise
# the user would not be able to use pipeline parameters in the container definition
# (for example as pod labels) - the generated template is invalid.
for op in pipeline.ops.values():
for transformer in op_transformers or []:
transformer(op)
# Generate core data structures to prepare for argo yaml generation
# op_name_to_parent_groups: op name -> list of ancestor groups including the current op
# opsgroups: a dictionary of ospgroup.name -> opsgroup
# inputs, outputs: group/op names -> list of tuples (full_param_name, producing_op_name)
# condition_params: recursive_group/op names -> list of pipelineparam
# dependencies: group/op name -> list of dependent groups/ops.
# Special Handling for the recursive opsgroup
# op_name_to_parent_groups also contains the recursive opsgroups
# condition_params from _get_condition_params_for_ops also contains the recursive opsgroups
# groups does not include the recursive opsgroups
opsgroups = self._get_groups(root_group)
op_name_to_parent_groups = self._get_groups_for_ops(root_group)
opgroup_name_to_parent_groups = self._get_groups_for_opsgroups(
root_group)
condition_params = self._get_condition_params_for_ops(root_group)
op_name_to_for_loop_op = self._get_for_loop_ops(root_group)
inputs, outputs = self._get_inputs_outputs(
pipeline,
root_group,
op_name_to_parent_groups,
opgroup_name_to_parent_groups,
condition_params,
op_name_to_for_loop_op,
)
dependencies = self._get_dependencies(
pipeline,
root_group,
op_name_to_parent_groups,
opgroup_name_to_parent_groups,
opsgroups,
condition_params,
)
templates = []
for opsgroup in opsgroups.keys():
template = self._group_to_dag_template(opsgroups[opsgroup], inputs,
outputs, dependencies)
templates.append(template)
for op in pipeline.ops.values():
if hasattr(op, 'importer_spec'):
raise ValueError(
'dsl.importer is not supported with v1 compiler.')
if self._mode == dsl.PipelineExecutionMode.V2_COMPATIBLE:
v2_compat.update_op(
op,
pipeline_name=self._pipeline_name_param,
pipeline_root=self._pipeline_root_param,
launcher_image=self._launcher_image)
templates.extend(op_to_templates_handler(op))
if hasattr(op, 'custom_job_spec'):
warnings.warn(
'CustomJob spec is not supported yet when running on KFP.'
' The component will execute within the KFP cluster.')
return templates
def _create_pipeline_workflow(self,
parameter_defaults,
pipeline,
op_transformers=None,
pipeline_conf=None):
"""Create workflow for the pipeline."""
# Input Parameters
input_params = []
for name, value in parameter_defaults.items():
param = {'name': name}
if value is not None:
param['value'] = value
input_params.append(param)
# Making the pipeline group name unique to prevent name clashes with templates
pipeline_group = pipeline.groups[0]
temp_pipeline_group_name = uuid.uuid4().hex
pipeline_group.name = temp_pipeline_group_name
# Templates
templates = self._create_dag_templates(pipeline, op_transformers)
# Exit Handler
exit_handler = None
if pipeline.groups[0].groups:
first_group = pipeline.groups[0].groups[0]
if first_group.type == 'exit_handler':
exit_handler = first_group.exit_op
# The whole pipeline workflow
# It must valid as a subdomain
pipeline_name = pipeline.name or 'pipeline'
# Workaround for pipeline name clashing with container template names
# TODO: Make sure template names cannot clash at all (container, DAG, workflow)
template_map = {
template['name'].lower(): template for template in templates
}
from ..components._naming import _make_name_unique_by_adding_index
pipeline_template_name = _make_name_unique_by_adding_index(
pipeline_name, template_map, '-')
# Restoring the name of the pipeline template
pipeline_template = template_map[temp_pipeline_group_name]
pipeline_template['name'] = pipeline_template_name
templates.sort(key=lambda x: x['name'])
workflow = {
'apiVersion': 'argoproj.io/v1alpha1',
'kind': 'Workflow',
'metadata': {
'generateName': pipeline_template_name + '-'
},
'spec': {
'entrypoint': pipeline_template_name,
'templates': templates,
'arguments': {
'parameters': input_params
},
'serviceAccountName': 'pipeline-runner',
}
}
# set parallelism limits at pipeline level
if pipeline_conf.parallelism:
workflow['spec']['parallelism'] = pipeline_conf.parallelism
# set ttl after workflow finishes
if pipeline_conf.ttl_seconds_after_finished >= 0:
workflow['spec']['ttlStrategy'] = {'secondsAfterCompletion': pipeline_conf.ttl_seconds_after_finished}
if pipeline_conf._pod_disruption_budget_min_available:
pod_disruption_budget = {
"minAvailable":
pipeline_conf._pod_disruption_budget_min_available
}
workflow['spec']['podDisruptionBudget'] = pod_disruption_budget
if len(pipeline_conf.image_pull_secrets) > 0:
image_pull_secrets = []
for image_pull_secret in pipeline_conf.image_pull_secrets:
image_pull_secrets.append(
convert_k8s_obj_to_json(image_pull_secret))
workflow['spec']['imagePullSecrets'] = image_pull_secrets
if pipeline_conf.timeout:
workflow['spec']['activeDeadlineSeconds'] = pipeline_conf.timeout
if exit_handler:
workflow['spec']['onExit'] = exit_handler.name
# This can be overwritten by the task specific
# nodeselection, specified in the template.
if pipeline_conf.default_pod_node_selector:
workflow['spec'][
'nodeSelector'] = pipeline_conf.default_pod_node_selector
if pipeline_conf.dns_config:
workflow['spec']['dnsConfig'] = convert_k8s_obj_to_json(
pipeline_conf.dns_config)
if pipeline_conf.image_pull_policy != None:
if pipeline_conf.image_pull_policy in [
"Always", "Never", "IfNotPresent"
]:
for template in workflow["spec"]["templates"]:
container = template.get('container', None)
if container and "imagePullPolicy" not in container:
container[
"imagePullPolicy"] = pipeline_conf.image_pull_policy
else:
raise ValueError(
'Invalid imagePullPolicy. Must be one of `Always`, `Never`, `IfNotPresent`.'
)
return workflow
def _validate_exit_handler(self, pipeline):
"""Makes sure there is only one global exit handler.
Note this is a temporary workaround until argo supports local
exit handler.
"""
def _validate_exit_handler_helper(group, exiting_op_names,
handler_exists):
if group.type == 'exit_handler':
if handler_exists or len(exiting_op_names) > 1:
raise ValueError(
'Only one global exit_handler is allowed and all ops need to be included.'
)
handler_exists = True
if group.ops:
exiting_op_names.extend([x.name for x in group.ops])
for g in group.groups:
_validate_exit_handler_helper(g, exiting_op_names,
handler_exists)
return _validate_exit_handler_helper(pipeline.groups[0], [], False)
def _sanitize_and_inject_artifact(self,
pipeline: dsl.Pipeline,
pipeline_conf=None):
"""Sanitize operator/param names and inject pipeline artifact
location."""
# Sanitize operator names and param names
sanitized_ops = {}
for op in pipeline.ops.values():
sanitized_name = sanitize_k8s_name(op.name)
op.name = sanitized_name
for param in op.outputs.values():
param.name = sanitize_k8s_name(param.name, True)
if param.op_name:
param.op_name = sanitize_k8s_name(param.op_name)
if op.output is not None and not isinstance(
op.output, dsl._container_op._MultipleOutputsError):
op.output.name = sanitize_k8s_name(op.output.name, True)
op.output.op_name = sanitize_k8s_name(op.output.op_name)
if op.dependent_names:
op.dependent_names = [
sanitize_k8s_name(name) for name in op.dependent_names
]
if isinstance(op, dsl.ContainerOp) and op.file_outputs is not None:
sanitized_file_outputs = {}
for key in op.file_outputs.keys():
sanitized_file_outputs[sanitize_k8s_name(
key, True)] = op.file_outputs[key]
op.file_outputs = sanitized_file_outputs
elif isinstance(
op, dsl.ResourceOp) and op.attribute_outputs is not None:
sanitized_attribute_outputs = {}
for key in op.attribute_outputs.keys():
sanitized_attribute_outputs[sanitize_k8s_name(key, True)] = \
op.attribute_outputs[key]
op.attribute_outputs = sanitized_attribute_outputs
if isinstance(op, dsl.ContainerOp):
if op.input_artifact_paths:
op.input_artifact_paths = {
sanitize_k8s_name(key, True): value
for key, value in op.input_artifact_paths.items()
}
if op.artifact_arguments:
op.artifact_arguments = {
sanitize_k8s_name(key, True): value
for key, value in op.artifact_arguments.items()
}
sanitized_ops[sanitized_name] = op
pipeline.ops = sanitized_ops
def _create_workflow(
self,
pipeline_func: Callable,
pipeline_name: Optional[Text] = None,
pipeline_description: Optional[Text] = None,
params_list: Optional[List[dsl.PipelineParam]] = None,
pipeline_conf: Optional[dsl.PipelineConf] = None,
) -> Dict[Text, Any]:
"""Internal implementation of create_workflow."""
params_list = params_list or []
# Create the arg list with no default values and call pipeline function.
# Assign type information to the PipelineParam
pipeline_meta = _extract_pipeline_metadata(pipeline_func)
pipeline_meta.name = pipeline_name or pipeline_meta.name
pipeline_meta.description = pipeline_description or pipeline_meta.description
pipeline_name = sanitize_k8s_name(pipeline_meta.name)
# Need to first clear the default value of dsl.PipelineParams. Otherwise, it
# will be resolved immediately in place when being to each component.
default_param_values = OrderedDict()
if self._pipeline_root_param:
params_list.append(self._pipeline_root_param)
if self._pipeline_name_param:
params_list.append(self._pipeline_name_param)
for param in params_list:
default_param_values[param.name] = param.value
param.value = None
args_list = []
kwargs_dict = dict()
signature = inspect.signature(pipeline_func)
for arg_name, arg in signature.parameters.items():
arg_type = None
for input in pipeline_meta.inputs or []:
if arg_name == input.name:
arg_type = input.type
break
param = dsl.PipelineParam(
sanitize_k8s_name(arg_name, True), param_type=arg_type)
if arg.kind == inspect.Parameter.KEYWORD_ONLY:
kwargs_dict[arg_name] = param
else:
args_list.append(param)
with dsl.Pipeline(pipeline_name) as dsl_pipeline:
pipeline_func(*args_list, **kwargs_dict)
pipeline_conf = pipeline_conf or dsl_pipeline.conf # Configuration passed to the compiler is overriding. Unfortunately, it's not trivial to detect whether the dsl_pipeline.conf was ever modified.
self._validate_exit_handler(dsl_pipeline)
self._sanitize_and_inject_artifact(dsl_pipeline, pipeline_conf)
# Fill in the default values by merging two param lists.
args_list_with_defaults = OrderedDict()
if pipeline_meta.inputs:
args_list_with_defaults = OrderedDict([
(sanitize_k8s_name(input_spec.name, True), input_spec.default)
for input_spec in pipeline_meta.inputs
])
if params_list:
# Or, if args are provided by params_list, fill in pipeline_meta.
for k, v in default_param_values.items():
args_list_with_defaults[k] = v
pipeline_meta.inputs = pipeline_meta.inputs or []
for param in params_list:
pipeline_meta.inputs.append(
InputSpec(
name=param.name,
type=param.param_type,
default=default_param_values[param.name]))
op_transformers = [add_pod_env]
pod_labels = {
_SDK_VERSION_LABEL: kfp.__version__,
_SDK_ENV_LABEL: _SDK_ENV_DEFAULT
}
op_transformers.append(add_pod_labels(pod_labels))
op_transformers.extend(pipeline_conf.op_transformers)
if self._mode == dsl.PipelineExecutionMode.V2_COMPATIBLE:
# Add self._pipeline_name_param and self._pipeline_root_param to ops inputs
# if they don't exist already.
for op in dsl_pipeline.ops.values():
insert_pipeline_name_param = True
insert_pipeline_root_param = True
for param in op.inputs:
if param.name == self._pipeline_name_param.name:
insert_pipeline_name_param = False
elif param.name == self._pipeline_root_param.name:
insert_pipeline_root_param = False
if insert_pipeline_name_param:
op.inputs.append(self._pipeline_name_param)
if insert_pipeline_root_param:
op.inputs.append(self._pipeline_root_param)
workflow = self._create_pipeline_workflow(
args_list_with_defaults,
dsl_pipeline,
op_transformers,
pipeline_conf,
)
from ._data_passing_rewriter import fix_big_data_passing
workflow = fix_big_data_passing(workflow)
if pipeline_conf and pipeline_conf.data_passing_method != None:
workflow = pipeline_conf.data_passing_method(workflow)
metadata = workflow.setdefault('metadata', {})
annotations = metadata.setdefault('annotations', {})
labels = metadata.setdefault('labels', {})
annotations[_SDK_VERSION_LABEL] = kfp.__version__
annotations[
'pipelines.kubeflow.org/pipeline_compilation_time'] = datetime.datetime.now(
).isoformat()
annotations['pipelines.kubeflow.org/pipeline_spec'] = json.dumps(
pipeline_meta.to_dict(), sort_keys=True)
if self._mode == dsl.PipelineExecutionMode.V2_COMPATIBLE:
annotations['pipelines.kubeflow.org/v2_pipeline'] = "true"
labels['pipelines.kubeflow.org/v2_pipeline'] = "true"
# Labels might be logged better than annotations so adding some information here as well
labels[_SDK_VERSION_LABEL] = kfp.__version__
return workflow
def compile(self,
pipeline_func,
package_path,
type_check: bool = True,
pipeline_conf: Optional[dsl.PipelineConf] = None):
"""Compile the given pipeline function into workflow yaml.
Args:
pipeline_func: Pipeline functions with @dsl.pipeline decorator.
package_path: The output workflow tar.gz file path. for example,
"~/a.tar.gz"
type_check: Whether to enable the type check or not, default: True.
pipeline_conf: PipelineConf instance. Can specify op transforms, image
pull secrets and other pipeline-level configuration options. Overrides
any configuration that may be set by the pipeline.
"""
pipeline_root_dir = getattr(pipeline_func, 'pipeline_root', None)
if (pipeline_root_dir is not None or
self._mode == dsl.PipelineExecutionMode.V2_COMPATIBLE):
self._pipeline_root_param = dsl.PipelineParam(
name=dsl.ROOT_PARAMETER_NAME, value=pipeline_root_dir or '')
if self._mode == dsl.PipelineExecutionMode.V2_COMPATIBLE:
pipeline_name = getattr(pipeline_func, '_component_human_name', '')
if not pipeline_name:
raise ValueError(
'@dsl.pipeline decorator name field is required in v2 compatible mode'
)
# pipeline names have one of the following formats:
# * pipeline/<name>
# * namespace/<ns>/pipeline/<name>
# when compiling, we will only have pipeline/<name>, but it will be overriden
# when uploading the pipeline to KFP API server.
self._pipeline_name_param = dsl.PipelineParam(
name='pipeline-name', value=f'pipeline/{pipeline_name}')
import kfp.deprecated as kfp
type_check_old_value = kfp.TYPE_CHECK
compiling_for_v2_old_value = kfp.COMPILING_FOR_V2
kfp.COMPILING_FOR_V2 = self._mode in [
dsl.PipelineExecutionMode.V2_COMPATIBLE,
dsl.PipelineExecutionMode.V2_ENGINE,
]
try:
kfp.TYPE_CHECK = type_check
self._create_and_write_workflow(
pipeline_func=pipeline_func,
pipeline_conf=pipeline_conf,
package_path=package_path)
finally:
kfp.TYPE_CHECK = type_check_old_value
kfp.COMPILING_FOR_V2 = compiling_for_v2_old_value
@staticmethod
def _write_workflow(workflow: Dict[Text, Any], package_path: Text = None):
"""Dump pipeline workflow into yaml spec and write out in the format
specified by the user.
Args:
workflow: Workflow spec of the pipline, dict.
package_path: file path to be written. If not specified, a yaml_text string will be returned.
"""
yaml_text = dump_yaml(workflow)
if package_path is None:
return yaml_text
if package_path.endswith('.tar.gz') or package_path.endswith('.tgz'):
from contextlib import closing
from io import BytesIO
with tarfile.open(package_path, "w:gz") as tar:
with closing(BytesIO(yaml_text.encode())) as yaml_file:
tarinfo = tarfile.TarInfo('pipeline.yaml')
tarinfo.size = len(yaml_file.getvalue())
tar.addfile(tarinfo, fileobj=yaml_file)
elif package_path.endswith('.zip'):
with zipfile.ZipFile(package_path, "w") as zip:
zipinfo = zipfile.ZipInfo('pipeline.yaml')
zipinfo.compress_type = zipfile.ZIP_DEFLATED
zip.writestr(zipinfo, yaml_text)
elif package_path.endswith('.yaml') or package_path.endswith('.yml'):
with open(package_path, 'w') as yaml_file:
yaml_file.write(yaml_text)
else:
raise ValueError('The output path ' + package_path +
' should ends with one of the following formats: '
'[.tar.gz, .tgz, .zip, .yaml, .yml]')
def _create_and_write_workflow(self,
pipeline_func: Callable,
pipeline_name: Text = None,
pipeline_description: Text = None,
params_list: List[dsl.PipelineParam] = None,
pipeline_conf: dsl.PipelineConf = None,
package_path: Text = None) -> None:
"""Compile the given pipeline function and dump it to specified file
format."""
workflow = self._create_workflow(pipeline_func, pipeline_name,
pipeline_description, params_list,
pipeline_conf)
self._write_workflow(workflow, package_path)
_validate_workflow(workflow)
def _validate_workflow(workflow: dict):
workflow = workflow.copy()
# Working around Argo lint issue
for argument in workflow['spec'].get('arguments', {}).get('parameters', []):
if 'value' not in argument:
argument['value'] = ''
yaml_text = dump_yaml(workflow)
if '{{pipelineparam' in yaml_text:
raise RuntimeError(
'''Internal compiler error: Found unresolved PipelineParam.
Please create a new issue at https://github.com/kubeflow/pipelines/issues attaching the pipeline code and the pipeline package.'''
)
# Running Argo lint if available
import shutil
argo_path = shutil.which('argo')
if argo_path:
has_working_argo_lint = False
try:
has_working_argo_lint = _run_argo_lint("""
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: hello-world-
spec:
entrypoint: whalesay
templates:
- name: whalesay
container:
image: docker/whalesay:latest""")
except:
warnings.warn(
"Cannot validate the compiled workflow. Found the argo program in PATH, but it's not usable. argo CLI v3.1.1+ should work."
)
if has_working_argo_lint:
_run_argo_lint(yaml_text)
def _run_argo_lint(yaml_text: str):
# Running Argo lint if available
import shutil
import subprocess
argo_path = shutil.which('argo')
if argo_path:
result = subprocess.run([
argo_path, '--offline=true', '--kinds=workflows', 'lint',
'/dev/stdin'
],
input=yaml_text.encode('utf-8'),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if result.returncode:
if re.match(
pattern=r'.+failed to resolve {{tasks\..+\.outputs\.artifacts\..+}}.+',
string=result.stderr.decode('utf-8')):
raise RuntimeError(
'Compiler has produced Argo-incompatible workflow due to '
'unresolvable input artifact(s). Please check whether inputPath has'
' been connected to outputUri placeholder, which is not supported '
'yet. Otherwise, please create a new issue at '
'https://github.com/kubeflow/pipelines/issues attaching the '
'pipeline code and the pipeline package. Error: {}'.format(
result.stderr.decode('utf-8')))
print(result)
raise RuntimeError(
'''Internal compiler error: Compiler has produced Argo-incompatible workflow.
Please create a new issue at https://github.com/kubeflow/pipelines/issues attaching the pipeline code and the pipeline package.
Error: {}'''.format(result.stdout.decode('utf-8')))
return True
return False
|
|
#
# Bindings.py -- Bindings classes for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
import math
from ginga.misc import Bunch, Settings, Callback
from ginga import AutoCuts, trcalc
from ginga import cmap, imap
class ImageViewBindings(object):
"""
Mouse Operation and Bindings
"""
def __init__(self, logger, settings=None):
super(ImageViewBindings, self).__init__()
self.logger = logger
self.canpan = False
self.canzoom = False
self._ispanning = False
self.cancut = False
self.cancmap = False
self.canflip = False
self.canrotate = False
# For panning
self._pantype = 1
self._start_x = None
self._start_y = None
self._start_panx = 0
self._start_pany = 0
self._start_scale_x = 0
self._start_scale_y = 0
self._start_rot = 0
if settings == None:
# No settings passed. Set up defaults.
settings = Settings.SettingGroup(name='bindings',
logger=self.logger)
self.initialize_settings(settings)
self.settings = settings
self.autocuts = AutoCuts.ZScale(self.logger)
self.features = dict(
# name, attr pairs
pan='canpan', zoom='canzoom', cuts='cancut', cmap='cancmap',
flip='canflip', rotate='canrotate')
def initialize_settings(self, settings):
settings.addSettings(
# You should rarely have to change these.
btn_nobtn = 0x0,
btn_left = 0x1,
btn_middle= 0x2,
btn_right = 0x4,
# Set up our standard modifiers
mod_shift = ['shift_l', 'shift_r'],
mod_ctrl = ['control_l', 'control_r'],
mod_draw = ['meta_right'],
# Define our custom modifiers
dmod_draw = ['space', 'oneshot', None],
dmod_cmapwarp = ['/', 'oneshot', None],
#dmod_cutlo = ['<', 'oneshot', "Cut low"],
#dmod_cuthi = ['>', 'oneshot', "Cut high"],
dmod_cutall = ['.', 'oneshot', None],
dmod_rotate = ['r', 'oneshot', None],
dmod_freepan = ['q', 'oneshot', None],
# KEYBOARD
kp_zoom_in = ['+', '='],
kp_zoom_out = ['-', '_'],
kp_zoom = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0'],
kp_zoom_inv = ['!', '@', '#', '$', '%', '^', '&', '*', '(', ')'],
kp_zoom_fit = ['backquote'],
kp_autozoom_on = ['doublequote'],
kp_autozoom_override = ['singlequote'],
kp_dist = ['s'],
kp_dist_reset = ['S'],
kp_pan_set = ['p'],
kp_center = ['c'],
kp_cut_255 = ['A'],
kp_cut_auto = ['a'],
kp_autocuts_on = [':'],
kp_autocuts_override = [';'],
kp_cmap_restore = ['?'],
kp_cmap_reset = [],
kp_imap_reset = [],
kp_flip_x = ['[', '{'],
kp_flip_y = [']', '}'],
kp_swap_xy = ['backslash', '|'],
kp_rotate_reset = ['R'],
kp_rotate_inc90 = ['e'],
kp_rotate_dec90 = ['E'],
kp_orient_lh = ['o'],
kp_orient_rh = ['O'],
kp_poly_add = ['x', 'draw+x'],
kp_poly_del = ['z', 'draw+z'],
kp_reset = ['escape'],
# SCROLLING/WHEEL
sc_pan = [],
sc_pan_fine = [],
sc_pan_coarse = [],
sc_zoom = ['scroll'],
sc_zoom_fine = ['shift+scroll'],
sc_zoom_coarse = ['ctrl+scroll'],
sc_contrast_fine = ['cutall+scroll'],
sc_contrast_coarse = [],
sc_dist = [],
sc_cmap = ['cmapwarp+scroll'],
sc_imap = [],
scroll_pan_acceleration = 1.0,
scroll_zoom_acceleration = 1.0,
pan_reverse = False,
zoom_scroll_reverse = False,
# MOUSE/BUTTON
ms_none = ['nobtn'],
ms_cursor = ['left'],
ms_wheel = [],
ms_draw = ['draw+left', 'right'],
ms_rotate = ['rotate+left'],
ms_cmapwarp = ['cmapwarp+left', 'ctrl+right'],
ms_cmaprest = ['ctrl+middle'],
ms_pan = ['ctrl+left'],
ms_freepan = ['freepan+left', 'middle'],
ms_cutlo = ['cutlo+left'],
ms_cuthi = ['cuthi+left'],
ms_cutall = ['cutall+left'],
ms_panset = ['shift+left'],
# GESTURES (Qt version only)
gs_pinch = [],
pinch_actions = ['zoom'],
pinch_zoom_acceleration = 1.0,
pinch_rotate_acceleration = 1.0,
)
def window_map(self, fitsimage):
self.to_default_mode(fitsimage)
def set_bindings(self, fitsimage):
fitsimage.add_callback('map', self.window_map)
bindmap = fitsimage.get_bindmap()
bindmap.clear_button_map()
bindmap.clear_event_map()
# Set up bindings
self.setup_settings_events(fitsimage, bindmap)
def set_modifier(self, fitsimage, name, modtype='oneshot'):
bindmap = fitsimage.get_bindmap()
bindmap.set_modifier(name, modtype=modtype)
def parse_combo(self, combo):
modifier, trigger = None, combo
if '+' in combo:
if combo.endswith('+'):
if not combo.startswith('+'):
# special case: probably contains the keystroke '+'
idx = combo.index['+']
modifier, trigger = combo[:idx], combo[idx+1:]
else:
modifier, trigger = combo.split('+')
return (modifier, trigger)
def setup_settings_events(self, fitsimage, bindmap):
d = self.settings.getDict()
if len(d) == 0:
self.initialize_settings(self.settings)
d = self.settings.getDict()
# First scan settings for buttons and modifiers
bindmap.clear_modifier_map()
for name, value in d.items():
if name.startswith('mod_'):
modname = name[4:]
for combo in value:
# NOTE: for now no chorded combinations
keyname = combo
bindmap.add_modifier(keyname, modname)
elif name.startswith('btn_'):
btnname = name[4:]
bindmap.map_button(value, btnname)
elif name.startswith('dmod_'):
modname = name[5:]
keyname, modtype, msg = value
bindmap.add_modifier(keyname, modname, modtype=modtype,
msg=msg)
# Add events
for name, value in d.items():
if len(name) <= 3:
continue
pfx = name[:3]
if not pfx in ('kp_', 'ms_', 'sc_', 'gs_'):
continue
evname = name[3:]
for combo in value:
modifier, trigger = self.parse_combo(combo)
bindmap.map_event(modifier, trigger, evname)
# Register for this symbolic event if we have a handler for it
try:
cb_method = getattr(self, name)
except AttributeError:
self.logger.warn("No method found matching '%s'" % (name))
cb_method = None
if pfx == 'kp_':
# keyboard event
event = 'keydown-%s' % (evname)
fitsimage.enable_callback(event)
if cb_method:
fitsimage.add_callback(event, cb_method)
elif pfx == 'ms_':
# mouse/button event
for action in ('down', 'move', 'up'):
event = '%s-%s' % (evname, action)
fitsimage.enable_callback(event)
if cb_method:
fitsimage.add_callback(event, cb_method)
elif pfx == 'sc_':
# scrolling event
event = '%s-scroll' % evname
fitsimage.enable_callback(event)
if cb_method:
fitsimage.add_callback(event, cb_method)
elif pfx == 'gs_':
if evname == 'pinch':
fitsimage.set_callback('pinch', cb_method)
def reset(self, fitsimage):
bindmap = fitsimage.get_bindmap()
bindmap.reset_modifier(fitsimage)
self.pan_stop(fitsimage)
fitsimage.onscreen_message(None)
##### ENABLERS #####
# These methods are a quick way to enable or disable certain user
# interface features in a ImageView window
def enable_pan(self, tf):
"""Enable the image to be panned interactively (True/False)."""
self.canpan = tf
def enable_zoom(self, tf):
"""Enable the image to be zoomed interactively (True/False)."""
self.canzoom = tf
def enable_cuts(self, tf):
"""Enable the cuts levels to be set interactively (True/False)."""
self.cancut = tf
def enable_cmap(self, tf):
"""Enable the color map to be warped interactively (True/False)."""
self.cancmap = tf
def enable_flip(self, tf):
"""Enable the image to be flipped interactively (True/False)."""
self.canflip = tf
def enable_rotate(self, tf):
"""Enable the image to be rotated interactively (True/False)."""
self.canrotate = tf
def enable(self, **kwdargs):
"""
General enable function encompassing all user interface features.
Usage (e.g.):
fitsimage.enable(rotate=False, flip=True)
"""
for feat, value in kwdargs:
feat = feat.lower()
if not feat in self.features:
raise ValueError("'%s' is not a feature. Must be one of %s" % (
feat, str(self.features)))
attr = self.features[feat]
setattr(self, attr, bool(value))
def enable_all(self, tf):
for feat, attr in self.features.items():
setattr(self, attr, bool(tf))
##### Help methods #####
# Methods used by the callbacks to do actions.
def get_new_pan(self, fitsimage, win_x, win_y, ptype=1):
if ptype == 1:
# This is a "free pan", similar to dragging the "lens"
# over the canvas.
dat_wd, dat_ht = fitsimage.get_data_size()
win_wd, win_ht = fitsimage.get_window_size()
if (win_x >= win_wd):
win_x = win_wd - 1
if (win_y >= win_ht):
win_y = win_ht - 1
# Figure out data x,y based on percentage of X axis
# and Y axis
off_x, off_y = fitsimage.canvas2offset(win_x, win_y)
max_x, max_y = fitsimage.canvas2offset(win_wd, win_ht)
wd_x = abs(max_x) * 2.0
ht_y = abs(max_y) * 2.0
panx = (off_x + abs(max_x)) / float(wd_x)
pany = (off_y + abs(max_y)) / float(ht_y)
# Account for user preference
if self.settings.get('pan_reverse', False):
panx = 1.0 - panx
pany = 1.0 - pany
data_x, data_y = panx * dat_wd, pany * dat_ht
return data_x, data_y
elif ptype == 2:
# This is a "drag pan", similar to dragging the canvas
# under the "lens" or "viewport".
if self._start_x == None:
# user has not held the mouse button yet
# return current pan values
return (self._start_panx, self._start_pany)
scale_x, scale_y = fitsimage.get_scale_xy()
off_x, off_y = fitsimage.canvas2offset(win_x, win_y)
delta_x = float(self._start_x - off_x) / scale_x
delta_y = float(self._start_y - off_y) / scale_y
data_x = self._start_panx + delta_x
data_y = self._start_pany + delta_y
return (data_x, data_y)
def _panset(self, fitsimage, data_x, data_y, msg=True, redraw=True):
try:
msg = self.settings.get('msg_panset', msg)
if msg:
fitsimage.onscreen_message("Pan position set", delay=0.4)
res = fitsimage.panset_xy(data_x, data_y, redraw=redraw)
return res
except ImageView.ImageViewCoordsError as e:
# coords are not within the data area
pass
def get_direction(self, direction, rev=False):
"""
Translate a direction in compass degrees into 'up' or 'down'.
"""
if (direction < 90.0) or (direction > 270.0):
if not rev:
return 'up'
else:
return 'down'
elif (90.0 < direction < 270.0):
if not rev:
return 'down'
else:
return 'up'
else:
return 'none'
def _tweak_colormap(self, fitsimage, x, y, mode):
win_wd, win_ht = fitsimage.get_window_size()
# translate Y cursor position as a percentage of the window
# height into a scaling factor
y_pct = (win_ht - y) / float(win_ht)
# I tried to mimic ds9's exponential scale feel along the Y-axis
def exp_scale(i):
return (1.0/(i**3))*0.0002 + (1.0/i)*0.085
scale_pct = exp_scale(1.0 - y_pct)
# translate X cursor position as a percentage of the window
# width into a shifting factor
shift_pct = x / float(win_wd) - 0.5
fitsimage.scaleNshift_cmap(scale_pct, shift_pct)
def _cutlow_pct(self, fitsimage, pct, msg=True):
msg = self.settings.get('msg_cuts', msg)
image = fitsimage.get_image()
minval, maxval = image.get_minmax()
spread = maxval - minval
loval, hival = fitsimage.get_cut_levels()
loval = loval + (pct * spread)
if msg:
fitsimage.onscreen_message("Cut low: %.4f" % (loval),
redraw=False)
fitsimage.cut_levels(loval, hival, redraw=True)
def _cutlow_xy(self, fitsimage, x, y, msg=True):
msg = self.settings.get('msg_cuts', msg)
win_wd, win_ht = fitsimage.get_window_size()
pct = float(x) / float(win_wd)
image = fitsimage.get_image()
minval, maxval = image.get_minmax()
spread = maxval - minval
loval, hival = fitsimage.get_cut_levels()
loval = minval + (pct * spread)
if msg:
fitsimage.onscreen_message("Cut low: %.4f" % (loval),
redraw=False)
fitsimage.cut_levels(loval, hival, redraw=True)
def _cuthigh_pct(self, fitsimage, pct, msg=True):
msg = self.settings.get('msg_cuts', msg)
image = fitsimage.get_image()
minval, maxval = image.get_minmax()
spread = maxval - minval
loval, hival = fitsimage.get_cut_levels()
hival = hival - (pct * spread)
if msg:
fitsimage.onscreen_message("Cut high: %.4f" % (hival),
redraw=False)
fitsimage.cut_levels(loval, hival, redraw=True)
def _cuthigh_xy(self, fitsimage, x, y, msg=True):
msg = self.settings.get('msg_cuts', msg)
win_wd, win_ht = fitsimage.get_window_size()
pct = 1.0 - (float(x) / float(win_wd))
image = fitsimage.get_image()
minval, maxval = image.get_minmax()
spread = maxval - minval
loval, hival = fitsimage.get_cut_levels()
hival = maxval - (pct * spread)
if msg:
fitsimage.onscreen_message("Cut high: %.4f" % (hival),
redraw=False)
fitsimage.cut_levels(loval, hival, redraw=True)
def _cutboth_xy(self, fitsimage, x, y, msg=True):
msg = self.settings.get('msg_cuts', msg)
win_wd, win_ht = fitsimage.get_window_size()
xpct = 1.0 - (float(x) / float(win_wd))
#ypct = 1.0 - (float(y) / float(win_ht))
ypct = (float(win_ht - y) / float(win_ht))
spread = self._hival - self._loval
hival = self._hival - (xpct * spread)
loval = self._loval + (ypct * spread)
if msg:
fitsimage.onscreen_message("Cut low: %.4f high: %.4f" % (
loval, hival), redraw=False)
fitsimage.cut_levels(loval, hival, redraw=True)
def _cut_pct(self, fitsimage, pct, msg=True):
msg = self.settings.get('msg_cuts', msg)
image = fitsimage.get_image()
minval, maxval = image.get_minmax()
spread = maxval - minval
loval, hival = fitsimage.get_cut_levels()
loval = loval + (pct * spread)
hival = hival - (pct * spread)
if msg:
fitsimage.onscreen_message("Cut low: %.4f high: %.4f" % (
loval, hival), delay=1.0, redraw=False)
fitsimage.cut_levels(loval, hival, redraw=True)
def _adjust_contrast(self, fitsimage, direction, pct, msg=True):
direction = self.get_direction(direction)
if direction == 'up':
self._cut_pct(fitsimage, pct, msg=msg)
elif direction == 'down':
self._cut_pct(fitsimage, -pct, msg=msg)
def _scale_image(self, fitsimage, direction, factor, msg=True):
msg = self.settings.get('msg_zoom', msg)
rev = self.settings.get('zoom_scroll_reverse', False)
scale_x, scale_y = fitsimage.get_scale_xy()
direction = self.get_direction(direction, rev=rev)
if direction == 'up':
mult = 1.0 + factor
elif direction == 'down':
mult = 1.0 - factor
scale_x, scale_y = scale_x * mult, scale_y * mult
fitsimage.scale_to(scale_x, scale_y)
if msg:
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=0.4)
def _cycle_dist(self, fitsimage, msg, direction='down'):
if self.cancmap:
msg = self.settings.get('msg_dist', msg)
rgbmap = fitsimage.get_rgbmap()
algs = rgbmap.get_hash_algorithms()
algname = rgbmap.get_hash_algorithm()
idx = algs.index(algname)
if direction == 'down':
idx = (idx + 1) % len(algs)
else:
idx = idx - 1
if idx < 0: idx = len(algs) - 1
algname = algs[idx]
rgbmap.set_hash_algorithm(algname)
if msg:
fitsimage.onscreen_message("Color dist: %s" % (algname),
delay=1.0)
def _reset_dist(self, fitsimage, msg):
if self.cancmap:
msg = self.settings.get('msg_dist', msg)
rgbmap = fitsimage.get_rgbmap()
algname = 'linear'
rgbmap.set_hash_algorithm(algname)
if msg:
fitsimage.onscreen_message("Color dist: %s" % (algname),
delay=1.0)
def _cycle_cmap(self, fitsimage, msg, direction='down'):
if self.cancmap:
msg = self.settings.get('msg_cmap', msg)
rgbmap = fitsimage.get_rgbmap()
cm = rgbmap.get_cmap()
cmapname = cm.name
cmapnames = cmap.get_names()
idx = cmapnames.index(cmapname)
if direction == 'down':
idx = (idx + 1) % len(cmapnames)
else:
idx = idx - 1
if idx < 0: idx = len(cmapnames) - 1
cmapname = cmapnames[idx]
rgbmap.set_cmap(cmap.get_cmap(cmapname))
if msg:
fitsimage.onscreen_message("Color map: %s" % (cmapname),
delay=1.0)
def _reset_cmap(self, fitsimage, msg):
if self.cancmap:
msg = self.settings.get('msg_cmap', msg)
rgbmap = fitsimage.get_rgbmap()
# default
cmapname = 'ramp'
rgbmap.set_cmap(cmap.get_cmap(cmapname))
if msg:
fitsimage.onscreen_message("Color map: %s" % (cmapname),
delay=1.0)
def _cycle_imap(self, fitsimage, msg, direction='down'):
if self.cancmap:
msg = self.settings.get('msg_imap', msg)
rgbmap = fitsimage.get_rgbmap()
im = rgbmap.get_imap()
imapname = im.name
imapnames = imap.get_names()
idx = imapnames.index(imapname)
if direction == 'down':
idx = (idx + 1) % len(imapnames)
else:
idx = idx - 1
if idx < 0: idx = len(imapnames) - 1
imapname = imapnames[idx]
rgbmap.set_imap(imap.get_imap(imapname))
if msg:
fitsimage.onscreen_message("Intensity map: %s" % (imapname),
delay=1.0)
def _reset_imap(self, fitsimage, msg):
if self.cancmap:
msg = self.settings.get('msg_imap', msg)
rgbmap = fitsimage.get_rgbmap()
# default
imapname = 'ramp'
rgbmap.set_imap(imap.get_imap(imapname))
if msg:
fitsimage.onscreen_message("Intensity map: %s" % (imapname),
delay=1.0)
def _rotate_xy(self, fitsimage, x, y, msg=True):
msg = self.settings.get('msg_rotate', msg)
win_wd, win_ht = fitsimage.get_window_size()
pct = float(x) / float(win_wd)
deg = 360.0 * pct
if msg:
fitsimage.onscreen_message("Rotate: %.2f" % (deg),
redraw=False)
fitsimage.rotate(deg)
def _rotate_inc(self, fitsimage, inc_deg, msg=True):
msg = self.settings.get('msg_rotate_inc', msg)
cur_rot_deg = fitsimage.get_rotation()
rot_deg = math.fmod(cur_rot_deg + inc_deg, 360.0)
fitsimage.rotate(rot_deg)
if msg:
fitsimage.onscreen_message("Rotate Inc: (%.2f) %.2f" % (
inc_deg, rot_deg), delay=1.0)
def _orient(self, fitsimage, righthand=False, msg=True):
msg = self.settings.get('msg_orient', msg)
image = fitsimage.get_image()
(x, y, xn, yn, xe, ye) = image.calc_compass_center()
degn = math.degrees(math.atan2(xn - x, yn - y))
self.logger.info("degn=%f xe=%f ye=%f" % (
degn, xe, ye))
# rotate east point also by degn
xe2, ye2 = trcalc.rotate_pt(xe, ye, degn, xoff=x, yoff=y)
dege = math.degrees(math.atan2(xe2 - x, ye2 - y))
self.logger.info("dege=%f xe2=%f ye2=%f" % (
dege, xe2, ye2))
# if right-hand image, flip it to make left hand
xflip = righthand
if dege > 0.0:
xflip = not xflip
if xflip:
degn = - degn
fitsimage.transform(xflip, False, False)
fitsimage.rotate(degn)
if msg:
fitsimage.onscreen_message("Orient: rot=%.2f flipx=%s" % (
degn, str(xflip)), delay=1.0)
def to_default_mode(self, fitsimage):
self._ispanning = False
fitsimage.switch_cursor('pick')
def pan_start(self, fitsimage, ptype=1):
# If already panning then ignore multiple keystrokes
if self._ispanning:
return
self._pantype = ptype
fitsimage.switch_cursor('pan')
self._ispanning = True
def pan_set_origin(self, fitsimage, win_x, win_y, data_x, data_y):
self._start_x, self._start_y = fitsimage.canvas2offset(win_x, win_y)
self._start_panx, self._start_pany = fitsimage.get_pan()
def pan_stop(self, fitsimage):
self._ispanning = False
self._start_x = None
self._pantype = 1
self.to_default_mode(fitsimage)
def restore_colormap(self, fitsimage, msg=True):
msg = self.settings.get('msg_cmap', msg)
rgbmap = fitsimage.get_rgbmap()
rgbmap.reset_sarr()
if msg:
fitsimage.onscreen_message("Restored color map", delay=0.5)
return True
##### KEYBOARD ACTION CALLBACKS #####
def kp_pan_set(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canpan:
self._panset(fitsimage, data_x, data_y, redraw=True,
msg=msg)
return True
def kp_center(self, fitsimage, keyname, data_x, data_y):
if self.canpan:
fitsimage.center_image()
return True
def kp_zoom_out(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
fitsimage.zoom_out()
if msg:
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=1.0)
return True
def kp_zoom_in(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
fitsimage.zoom_in()
if msg:
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=1.0)
return True
def kp_zoom(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
keylist = self.settings.get('kp_zoom')
zoomval = (keylist.index(keyname) + 1)
fitsimage.zoom_to(zoomval)
if msg:
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=1.0)
return True
def kp_zoom_inv(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
keylist = self.settings.get('kp_zoom_inv')
zoomval = - (keylist.index(keyname) + 1)
fitsimage.zoom_to(zoomval)
if msg:
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=1.0)
return True
def kp_zoom_fit(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
fitsimage.zoom_fit()
if msg:
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=1.0)
return True
def kp_autozoom_on(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
fitsimage.enable_autozoom('on')
if msg:
fitsimage.onscreen_message('Autozoom On', delay=1.0)
return True
def kp_autozoom_override(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
fitsimage.enable_autozoom('override')
if msg:
fitsimage.onscreen_message('Autozoom Override', delay=1.0)
return True
def kp_cut_255(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.cancut:
msg = self.settings.get('msg_cuts', msg)
fitsimage.cut_levels(0.0, 255.0, no_reset=True)
return True
def kp_cut_auto(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.cancut:
msg = self.settings.get('msg_cuts', msg)
if msg:
fitsimage.onscreen_message("Auto cut levels", delay=1.0)
fitsimage.auto_levels()
return True
def kp_autocuts_on(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.cancut:
msg = self.settings.get('msg_cuts', msg)
fitsimage.enable_autocuts('on')
if msg:
fitsimage.onscreen_message('Autocuts On', delay=1.0)
return True
def kp_autocuts_override(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.cancut:
msg = self.settings.get('msg_cuts', msg)
fitsimage.enable_autocuts('override')
if msg:
fitsimage.onscreen_message('Autocuts Override', delay=1.0)
return True
def kp_cmap_restore(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.cancmap:
msg = self.settings.get('msg_cmap', msg)
self.restore_colormap(fitsimage, msg=msg)
return True
def kp_flip_x(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canflip:
msg = self.settings.get('msg_transform', msg)
flipX, flipY, swapXY = fitsimage.get_transforms()
if keyname == '[':
flipx = not flipX
else:
flipx = False
fitsimage.transform(flipx, flipY, swapXY)
if msg:
fitsimage.onscreen_message("Flip X=%s" % flipx, delay=1.0)
return True
def kp_flip_y(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canflip:
msg = self.settings.get('msg_transform', msg)
flipX, flipY, swapXY = fitsimage.get_transforms()
if keyname == ']':
flipy = not flipY
else:
flipy = False
fitsimage.transform(flipX, flipy, swapXY)
if msg:
fitsimage.onscreen_message("Flip Y=%s" % flipy, delay=1.0)
return True
def kp_swap_xy(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canflip:
msg = self.settings.get('msg_transform', msg)
flipX, flipY, swapXY = fitsimage.get_transforms()
if keyname == 'backslash':
swapxy = not swapXY
else:
swapxy = False
fitsimage.transform(flipX, flipY, swapxy)
if msg:
fitsimage.onscreen_message("Swap XY=%s" % swapxy, delay=1.0)
return True
def kp_dist(self, fitsimage, keyname, data_x, data_y, msg=True):
self._cycle_dist(fitsimage, msg)
return True
def kp_dist_reset(self, fitsimage, keyname, data_x, data_y, msg=True):
self._reset_dist(fitsimage, msg)
return True
def kp_cmap_reset(self, fitsimage, keyname, data_x, data_y, msg=True):
self._reset_cmap(fitsimage, msg)
return True
def kp_imap_reset(self, fitsimage, keyname, data_x, data_y, msg=True):
self._reset_imap(fitsimage, msg)
return True
def kp_rotate_reset(self, fitsimage, keyname, data_x, data_y):
if self.canrotate:
fitsimage.rotate(0.0)
return True
def kp_rotate_inc90(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canrotate:
self._rotate_inc(fitsimage, 90.0, msg=msg)
return True
def kp_rotate_dec90(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canrotate:
self._rotate_inc(fitsimage, -90.0, msg=msg)
return True
def kp_orient_lh(self, fitsimage, keyname, data_x, data_y, msg=True):
if self.canrotate:
self._orient(fitsimage, righthand=False, msg=msg)
return True
def kp_orient_rh(self, fitsimage, keyname, data_x, data_y,
msg=True):
if self.canrotate:
self._orient(fitsimage, righthand=True, msg=msg)
return True
def kp_reset(self, fitsimage, keyname, data_x, data_y):
self.reset(fitsimage)
return True
##### MOUSE ACTION CALLBACKS #####
# def ms_none(self, fitsimage, action, data_x, data_y):
# return False
# def ms_cursor(self, fitsimage, action, data_x, data_y):
# return False
# def ms_wheel(self, fitsimage, action, data_x, data_y):
# return False
# def ms_draw(self, fitsimage, action, data_x, data_y):
# return False
def ms_rotate(self, fitsimage, action, data_x, data_y, msg=True):
"""Rotate the image by dragging the cursor left or right.
"""
if not self.canrotate:
return True
msg = self.settings.get('msg_rotate', msg)
x, y = fitsimage.get_last_win_xy()
if action == 'move':
self._rotate_xy(fitsimage, x, y)
elif action == 'down':
if msg:
fitsimage.onscreen_message("Rotate (drag mouse L-R)",
delay=1.0)
self._start_x = x
else:
fitsimage.onscreen_message(None)
return True
def ms_cmapwarp(self, fitsimage, action, data_x, data_y, msg=True):
"""Shift the colormap by dragging the cursor left or right.
Stretch the colormap by dragging the cursor up or down.
"""
if not self.cancmap:
return True
msg = self.settings.get('msg_cmap', msg)
x, y = fitsimage.get_last_win_xy()
if not fitsimage._originUpper:
y = fitsimage._imgwin_ht - y
if action == 'move':
self._tweak_colormap(fitsimage, x, y, 'preview')
elif action == 'down':
self._start_x, self._start_y = x, y
if msg:
fitsimage.onscreen_message("Shift and stretch colormap (drag mouse)",
delay=1.0)
else:
fitsimage.onscreen_message(None)
return True
def ms_cmaprest(self, fitsimage, action, data_x, data_y, msg=True):
"""An interactive way to restore the colormap settings after
a warp operation.
"""
if self.cancmap and (action == 'down'):
self.restore_colormap(fitsimage, msg=msg)
return True
def ms_pan(self, fitsimage, action, data_x, data_y):
"""A 'drag' or proportional pan, where the image is panned by
'dragging the canvas' up or down. The amount of the pan is
proportionate to the length of the drag.
"""
if not self.canpan:
return True
x, y = fitsimage.get_last_win_xy()
if action == 'move':
data_x, data_y = self.get_new_pan(fitsimage, x, y,
ptype=self._pantype)
fitsimage.panset_xy(data_x, data_y, redraw=True)
elif action == 'down':
self.pan_set_origin(fitsimage, x, y, data_x, data_y)
self.pan_start(fitsimage, ptype=2)
else:
self.pan_stop(fitsimage)
return True
def ms_freepan(self, fitsimage, action, data_x, data_y):
"""A 'free' pan, where the image is panned by dragging the cursor
towards the area you want to see in the image. The entire image is
pannable by dragging towards each corner of the window.
"""
if not self.canpan:
return True
x, y = fitsimage.get_last_win_xy()
if action == 'move':
data_x, data_y = self.get_new_pan(fitsimage, x, y,
ptype=self._pantype)
fitsimage.panset_xy(data_x, data_y, redraw=True)
elif action == 'down':
self.pan_start(fitsimage, ptype=1)
else:
self.pan_stop(fitsimage)
return True
def ms_cutlo(self, fitsimage, action, data_x, data_y):
"""An interactive way to set the low cut level.
"""
if not self.cancut:
return True
x, y = fitsimage.get_last_win_xy()
if action == 'move':
self._cutlow_xy(fitsimage, x, y)
elif action == 'down':
self._start_x, self._start_y = x, y
self._loval, self._hival = fitsimage.get_cut_levels()
else:
fitsimage.onscreen_message(None)
return True
def ms_cuthi(self, fitsimage, action, data_x, data_y):
"""An interactive way to set the high cut level.
"""
if not self.cancut:
return True
x, y = fitsimage.get_last_win_xy()
if action == 'move':
self._cuthigh_xy(fitsimage, x, y)
elif action == 'down':
self._start_x, self._start_y = x, y
self._loval, self._hival = fitsimage.get_cut_levels()
else:
fitsimage.onscreen_message(None)
return True
def ms_cutall(self, fitsimage, action, data_x, data_y):
"""An interactive way to set the low AND high cut levels.
"""
if not self.cancut:
return True
x, y = fitsimage.get_last_win_xy()
if not fitsimage._originUpper:
y = fitsimage._imgwin_ht - y
if action == 'move':
self._cutboth_xy(fitsimage, x, y)
elif action == 'down':
self._start_x, self._start_y = x, y
image = fitsimage.get_image()
self._loval, self._hival = self.autocuts.calc_cut_levels(image)
else:
fitsimage.onscreen_message(None)
return True
def ms_panset(self, fitsimage, action, data_x, data_y,
msg=True):
"""An interactive way to set the pan position. The location
(data_x, data_y) will be centered in the window.
"""
if self.canpan and (action == 'down'):
self._panset(fitsimage, data_x, data_y, redraw=True,
msg=msg)
return True
##### SCROLL ACTION CALLBACKS #####
def sc_contrast_coarse(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
"""Adjust contrast interactively by setting the low AND high cut
levels. This function adjusts it coarsely.
"""
if self.cancut:
self._adjust_contrast(fitsimage, direction, 0.01, msg=msg)
return True
def sc_contrast_fine(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
"""Adjust contrast interactively by setting the low AND high cut
levels. This function adjusts it finely.
"""
if self.cancut:
self._adjust_contrast(fitsimage, direction, 0.001, msg=msg)
return True
def sc_zoom(self, fitsimage, direction, amount, data_x, data_y, msg=True):
"""Interactively zoom the image by scrolling motion.
This zooms by the zoom steps configured under Preferences.
"""
if self.canzoom:
msg = self.settings.get('msg_zoom', msg)
rev = self.settings.get('zoom_scroll_reverse', False)
direction = self.get_direction(direction, rev=rev)
if direction == 'up':
fitsimage.zoom_in()
elif direction == 'down':
fitsimage.zoom_out()
if msg:
fitsimage.onscreen_message(fitsimage.get_scale_text(),
delay=0.4)
return True
def sc_zoom_coarse(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
"""Interactively zoom the image by scrolling motion.
This zooms by adjusting the scale in x and y coarsely.
"""
if self.canzoom:
zoom_accel = self.settings.get('scroll_zoom_acceleration', 1.0)
amount = zoom_accel * 0.20
self._scale_image(fitsimage, direction, amount, msg=msg)
return True
def sc_zoom_fine(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
"""Interactively zoom the image by scrolling motion.
This zooms by adjusting the scale in x and y coarsely.
"""
if self.canzoom:
zoom_accel = self.settings.get('scroll_zoom_acceleration', 1.0)
amount = zoom_accel * 0.08
self._scale_image(fitsimage, direction, 0.08, msg=msg)
return True
def sc_pan(self, fitsimage, direction, amount, data_x, data_y, msg=True):
"""Interactively pan the image by scrolling motion.
"""
if not self.canpan:
return True
# User has "Pan Reverse" preference set?
rev = self.settings.get('pan_reverse', False)
if rev:
direction = math.fmod(direction + 180.0, 360.0)
pan_accel = self.settings.get('scroll_pan_acceleration', 1.0)
num_degrees = amount * pan_accel
ang_rad = math.radians(90.0 - direction)
# Calculate distance of pan amount, based on current scale
wd, ht = fitsimage.get_data_size()
# pageSize = min(wd, ht)
((x0, y0), (x1, y1), (x2, y2), (x3, y3)) = fitsimage.get_pan_rect()
page_size = min(abs(x2 - x0), abs(y2 - y0))
distance = (num_degrees / 360.0) * page_size
self.logger.debug("angle=%f ang_rad=%f distance=%f" % (
direction, ang_rad, distance))
# Calculate new pan position
pan_x, pan_y = fitsimage.get_pan()
new_x = pan_x + math.cos(ang_rad) * distance
new_y = pan_y + math.sin(ang_rad) * distance
# cap pan position
new_x = min(max(new_x, 0.0), wd)
new_y = min(max(new_y, 0.0), ht)
# Because pan position is reported +0.5
new_x, new_y = new_x - 0.5, new_y - 0.5
#print "data x,y=%f,%f new x, y=%f,%f" % (pan_x, pan_y, new_x, new_y)
fitsimage.panset_xy(new_x, new_y, redraw=True)
# For checking result
#pan_x, pan_y = fitsimage.get_pan()
#print "new pan x,y=%f, %f" % (pan_x, pan_y)
return True
def sc_pan_coarse(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
amount = amount / 2.0
return self.sc_pan(fitsimage, direction, amount, data_x, data_y,
msg=msg)
def sc_pan_fine(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
amount = amount / 5.0
return self.sc_pan(fitsimage, direction, amount, data_x, data_y,
msg=msg)
def sc_dist(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
direction = self.get_direction(direction)
self._cycle_dist(fitsimage, msg, direction=direction)
return True
def sc_cmap(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
direction = self.get_direction(direction)
self._cycle_cmap(fitsimage, msg, direction=direction)
return True
def sc_imap(self, fitsimage, direction, amount, data_x, data_y,
msg=True):
direction = self.get_direction(direction)
self._cycle_imap(fitsimage, msg, direction=direction)
return True
##### GESTURE ACTION CALLBACKS #####
def gs_pinch(self, fitsimage, state, rot_deg, scale, msg=True):
pinch_actions = self.settings.get('pinch_actions', [])
if state == 'start':
self._start_scale_x, self._start_scale_y = fitsimage.get_scale_xy()
self._start_rot = fitsimage.get_rotation()
else:
msg_str = None
if self.canzoom and ('zoom' in pinch_actions):
scale_accel = self.settings.get('pinch_zoom_acceleration', 1.0)
scale = scale * scale_accel
scale_x, scale_y = (self._start_scale_x * scale,
self._start_scale_y * scale)
fitsimage.scale_to(scale_x, scale_y, redraw=False)
msg_str = fitsimage.get_scale_text()
msg = self.settings.get('msg_zoom', True)
if self.canrotate and ('rotate' in pinch_actions):
deg = self._start_rot - rot_deg
rotate_accel = self.settings.get('pinch_rotate_acceleration', 1.0)
deg = rotate_accel * deg
fitsimage.rotate(deg)
if msg_str == None:
msg_str = "Rotate: %.2f" % (deg)
msg = self.settings.get('msg_rotate', msg)
if msg and (msg_str != None):
fitsimage.onscreen_message(msg_str, delay=0.4)
return True
def gs_pan(self, fitsimage, state, dx, dy):
if not self.canpan:
return True
x, y = fitsimage.get_last_win_xy()
if state == 'move':
data_x, data_y = self.get_new_pan(fitsimage, x, y,
ptype=self._pantype)
fitsimage.panset_xy(data_x, data_y, redraw=True)
elif state == 'start':
data_x, data_y = fitsimage.get_last_data_xy()
self.pan_set_origin(fitsimage, x, y, data_x, data_y)
self.pan_start(fitsimage, ptype=2)
else:
self.pan_stop(fitsimage)
return True
class BindingMapError(Exception):
pass
class BindingMapper(Callback.Callbacks):
"""The BindingMapper class maps physical events (key presses, button
clicks, mouse movement, etc) into logical events. By registering for
logical events, plugins and other event handling code doesn't need to
care about the physical controls bindings. The bindings can be changed
and everything continues to work.
"""
def __init__(self, logger, btnmap=None, modmap=None):
Callback.Callbacks.__init__(self)
self.logger = logger
# For event mapping
self.eventmap = {}
self._kbdmod = None
self._kbdmod_types = ('held', 'oneshot', 'locked')
self._kbdmod_type = 'held'
self._delayed_reset = False
# Set up button mapping
if btnmap == None:
btnmap = { 0x1: 'cursor', 0x2: 'wheel', 0x4: 'draw' }
self.btnmap = btnmap
self._button = 0
# Set up modifier mapping
if modmap == None:
self.modmap = {}
for keyname in ('shift_l', 'shift_r'):
self.add_modifier(keyname, 'shift')
for keyname in ('control_l', 'control_r'):
self.add_modifier(keyname, 'ctrl')
for keyname in ('meta_right',):
self.add_modifier(keyname, 'draw')
else:
self.modmap = modmap
# For callbacks
for name in ('mode-set', ):
self.enable_callback(name)
def set_modifier_map(self, modmap):
self.modmap = modmap
def clear_modifier_map(self):
self.modmap = {}
def current_modifier(self):
return self._kbdmod
def get_modifiers(self):
res = set([])
for keyname, bnch in self.modmap.items():
res.add(bnch.name)
return res
def add_modifier(self, keyname, modname, modtype='held', msg=None):
assert modtype in self._kbdmod_types, \
ValueError("Bad modifier type '%s': must be one of %s" % (
modtype, self._kbdmod_types))
bnch = Bunch.Bunch(name=modname, type=modtype, msg=msg)
self.modmap[keyname] = bnch
self.modmap['mod_%s' % modname] = bnch
def set_modifier(self, name, modtype='oneshot'):
assert modtype in self._kbdmod_types, \
ValueError("Bad modifier type '%s': must be one of %s" % (
modtype, self._kbdmod_types))
self._kbdmod = name
self._kbdmod_type = modtype
self.make_callback('mode-set', self._kbdmod, self._kbdmod_type)
def reset_modifier(self, fitsimage):
try:
bnch = self.modmap['mod_%s' % self._kbdmod]
except:
bnch = None
self._kbdmod = None
self._kbdmod_type = 'held'
self._delayed_reset = False
# clear onscreen message, if any
if (bnch != None) and (bnch.msg != None):
fitsimage.onscreen_message(None)
self.make_callback('mode-set', self._kbdmod, self._kbdmod_type)
def clear_button_map(self):
self.btnmap = {}
def map_button(self, btncode, alias):
"""For remapping the buttons to different names. 'btncode' is a
fixed button code and 'alias' is a logical name.
"""
self.btnmap[btncode] = alias
def get_buttons(self):
res = set([])
for keyname, alias in self.btnmap.items():
res.add(alias)
return res
def clear_event_map(self):
self.eventmap = {}
def map_event(self, modifier, alias, eventname):
self.eventmap[(modifier, alias)] = Bunch.Bunch(name=eventname)
def register_for_events(self, fitsimage):
# Add callbacks for interesting events
fitsimage.add_callback('motion', self.window_motion)
fitsimage.add_callback('button-press', self.window_button_press)
fitsimage.add_callback('button-release', self.window_button_release)
fitsimage.add_callback('key-press', self.window_key_press)
fitsimage.add_callback('key-release', self.window_key_release)
## fitsimage.add_callback('drag-drop', self.window_drag_drop)
fitsimage.add_callback('scroll', self.window_scroll)
## fitsimage.add_callback('map', self.window_map)
## fitsimage.add_callback('focus', self.window_focus)
## fitsimage.add_callback('enter', self.window_enter)
## fitsimage.add_callback('leave', self.window_leave)
def window_map(self, fitsimage):
pass
def window_focus(self, fitsimage, hasFocus):
return True
def window_enter(self, fitsimage):
return True
def window_leave(self, fitsimage):
return True
def window_key_press(self, fitsimage, keyname):
self.logger.debug("keyname=%s" % (keyname))
# Is this a modifier key?
if keyname in self.modmap:
bnch = self.modmap[keyname]
if self._kbdmod_type == 'locked':
if bnch.name == self._kbdmod:
self.reset_modifier(fitsimage)
return True
if self._delayed_reset:
if bnch.name == self._kbdmod:
self._delayed_reset = False
return False
# if there is not a modifier active now,
# activate this one
if self._kbdmod == None:
self.set_modifier(bnch.name, bnch.type)
if bnch.msg != None:
fitsimage.onscreen_message(bnch.msg)
return True
try:
# TEMP: hack to get around the issue of how keynames
# are generated. This assumes standard modifiers are
# mapped to names "shift" and "ctrl"
if (self._kbdmod in ('shift', 'ctrl')) or (keyname == 'escape'):
idx = (None, keyname)
else:
idx = (self._kbdmod, keyname)
self.logger.debug("idx=%s" % (str(idx)))
emap = self.eventmap[idx]
except KeyError:
return False
cbname = 'keydown-%s' % (emap.name)
last_x, last_y = fitsimage.get_last_data_xy()
return fitsimage.make_callback(cbname, keyname, last_x, last_y)
def window_key_release(self, fitsimage, keyname):
self.logger.debug("keyname=%s" % (keyname))
try:
idx = (self._kbdmod, keyname)
emap = self.eventmap[idx]
except KeyError:
emap = None
# Is this a modifier key?
if keyname in self.modmap:
bnch = self.modmap[keyname]
if self._kbdmod == bnch.name:
# <-- the current modifier key is being released
if bnch.type == 'held':
if self._button == 0:
# if no button is being held, then reset modifier
self.reset_modifier(fitsimage)
else:
self._delayed_reset = True
return True
# release modifier if this is a oneshot modifier
## if self._kbdmod_type == 'oneshot':
## self.reset_modifier(fitsimage)
if emap == None:
return False
cbname = 'keyup-%s' % (emap.name)
last_x, last_y = fitsimage.get_last_data_xy()
return fitsimage.make_callback(cbname, keyname, last_x, last_y)
def window_button_press(self, fitsimage, btncode, data_x, data_y):
self.logger.debug("x,y=%d,%d btncode=%s" % (data_x, data_y,
hex(btncode)))
try:
self._button |= btncode
button = self.btnmap[btncode]
idx = (self._kbdmod, button)
self.logger.debug("Event map for %s" % (str(idx)))
emap = self.eventmap[idx]
except KeyError:
#self.logger.warn("No button map binding for %s" % (str(btncode)))
return False
cbname = '%s-down' % (emap.name)
self.logger.debug("making callback for %s (mod=%s)" % (
cbname, self._kbdmod))
return fitsimage.make_callback(cbname, 'down', data_x, data_y)
def window_motion(self, fitsimage, btncode, data_x, data_y):
try:
button = self.btnmap[btncode]
idx = (self._kbdmod, button)
emap = self.eventmap[idx]
except KeyError:
return False
cbname = '%s-move' % (emap.name)
return fitsimage.make_callback(cbname, 'move', data_x, data_y)
def window_button_release(self, fitsimage, btncode, data_x, data_y):
self.logger.debug("x,y=%d,%d button=%s" % (data_x, data_y,
hex(btncode)))
try:
self._button &= ~btncode
button = self.btnmap[btncode]
idx = (self._kbdmod, button)
# release modifier if this is a oneshot modifier
if (self._kbdmod_type == 'oneshot') or (self._delayed_reset):
self.reset_modifier(fitsimage)
emap = self.eventmap[idx]
except KeyError:
#self.logger.warn("No button map binding for %s" % (str(btncode)))
return False
cbname = '%s-up' % (emap.name)
return fitsimage.make_callback(cbname, 'up', data_x, data_y)
def window_scroll(self, fitsimage, direction, amount, data_x, data_y):
try:
idx = (self._kbdmod, 'scroll')
emap = self.eventmap[idx]
except KeyError:
return False
cbname = '%s-scroll' % (emap.name)
return fitsimage.make_callback(cbname, direction, amount,
data_x, data_y)
#END
|
|
import argparse
import os
import shlex
import sys
import lit.util
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('test_paths',
nargs='+',
metavar="TEST_PATH",
help='File or path to include in the test suite')
parser.add_argument("--version",
dest="show_version",
help="Show version and exit",
action="store_true")
parser.add_argument("-j", "--threads", "--workers",
dest="workers",
metavar="N",
help="Number of workers used for testing",
type=_positive_int,
default=lit.util.detectCPUs())
parser.add_argument("--config-prefix",
dest="configPrefix",
metavar="NAME",
help="Prefix for 'lit' config files")
parser.add_argument("-D", "--param",
dest="user_params",
metavar="NAME=VAL",
help="Add 'NAME' = 'VAL' to the user defined parameters",
action="append",
default=[])
format_group = parser.add_argument_group("Output Format")
# FIXME: I find these names very confusing, although I like the
# functionality.
format_group.add_argument("-q", "--quiet",
help="Suppress no error output",
action="store_true")
format_group.add_argument("-s", "--succinct",
help="Reduce amount of output",
action="store_true")
format_group.add_argument("-v", "--verbose",
dest="showOutput",
help="Show test output for failures",
action="store_true")
format_group.add_argument("-vv", "--echo-all-commands",
dest="echoAllCommands",
action="store_true",
help="Echo all commands as they are executed to stdout. In case of "
"failure, last command shown will be the failing one.")
format_group.add_argument("-a", "--show-all",
dest="showAllOutput",
help="Display all commandlines and output",
action="store_true")
format_group.add_argument("-o", "--output",
dest="output_path",
help="Write test results to the provided path",
metavar="PATH")
format_group.add_argument("--no-progress-bar",
dest="useProgressBar",
help="Do not use curses based progress bar",
action="store_false")
format_group.add_argument("--show-unsupported",
help="Show unsupported tests",
action="store_true")
format_group.add_argument("--show-xfail",
help="Show tests that were expected to fail",
action="store_true")
execution_group = parser.add_argument_group("Test Execution")
execution_group.add_argument("--path",
help="Additional paths to add to testing environment",
action="append",
default=[])
execution_group.add_argument("--vg",
dest="useValgrind",
help="Run tests under valgrind",
action="store_true")
execution_group.add_argument("--vg-leak",
dest="valgrindLeakCheck",
help="Check for memory leaks under valgrind",
action="store_true")
execution_group.add_argument("--vg-arg",
dest="valgrindArgs",
metavar="ARG",
help="Specify an extra argument for valgrind",
action="append",
default=[])
execution_group.add_argument("--time-tests",
dest="timeTests",
help="Track elapsed wall time for each test",
action="store_true")
execution_group.add_argument("--no-execute",
dest="noExecute",
help="Don't execute any tests (assume PASS)",
action="store_true")
execution_group.add_argument("--xunit-xml-output",
dest="xunit_output_file",
help="Write XUnit-compatible XML test reports to the specified file")
execution_group.add_argument("--timeout",
dest="maxIndividualTestTime",
help="Maximum time to spend running a single test (in seconds). "
"0 means no time limit. [Default: 0]",
type=_non_negative_int) # TODO(yln): --[no-]test-timeout, instead of 0 allowed
execution_group.add_argument("--max-failures",
help="Stop execution after the given number of failures.",
type=_positive_int)
execution_group.add_argument("--allow-empty-runs",
help="Do not fail the run if all tests are filtered out",
action="store_true")
selection_group = parser.add_argument_group("Test Selection")
selection_group.add_argument("--max-tests",
metavar="N",
help="Maximum number of tests to run",
type=_positive_int)
selection_group.add_argument("--max-time", #TODO(yln): --timeout
dest="timeout",
metavar="N",
help="Maximum time to spend testing (in seconds)",
type=_positive_int)
selection_group.add_argument("--shuffle", # TODO(yln): --order=random
help="Run tests in random order", # default or 'by-path' (+ isEarlyTest())
action="store_true")
selection_group.add_argument("-i", "--incremental", # TODO(yln): --order=failing-first
help="Run modified and failing tests first (updates mtimes)",
action="store_true")
selection_group.add_argument("--filter",
metavar="REGEX",
type=_case_insensitive_regex,
help="Only run tests with paths matching the given regular expression",
default=os.environ.get("LIT_FILTER"))
selection_group.add_argument("--num-shards", # TODO(yln): --shards N/M
dest="numShards",
metavar="M",
help="Split testsuite into M pieces and only run one",
type=_positive_int,
default=os.environ.get("LIT_NUM_SHARDS"))
selection_group.add_argument("--run-shard",
dest="runShard",
metavar="N",
help="Run shard #N of the testsuite",
type=_positive_int,
default=os.environ.get("LIT_RUN_SHARD"))
debug_group = parser.add_argument_group("Debug and Experimental Options")
debug_group.add_argument("--debug",
help="Enable debugging (for 'lit' development)",
action="store_true")
debug_group.add_argument("--show-suites",
dest="showSuites",
help="Show discovered test suites",
action="store_true")
debug_group.add_argument("--show-tests",
dest="showTests",
help="Show all discovered tests",
action="store_true")
# LIT is special: environment variables override command line arguments.
env_args = shlex.split(os.environ.get("LIT_OPTS", ""))
args = sys.argv[1:] + env_args
opts = parser.parse_args(args)
# Validate command line options
if opts.echoAllCommands:
opts.showOutput = True
# TODO(python3): Could be enum
if opts.shuffle:
opts.order = 'random'
elif opts.incremental:
opts.order = 'failing-first'
else:
opts.order = 'default'
if opts.numShards or opts.runShard:
if not opts.numShards or not opts.runShard:
parser.error("--num-shards and --run-shard must be used together")
if opts.runShard > opts.numShards:
parser.error("--run-shard must be between 1 and --num-shards (inclusive)")
opts.shard = (opts.runShard, opts.numShards)
else:
opts.shard = None
return opts
def _positive_int(arg):
return _int(arg, 'positive', lambda i: i > 0)
def _non_negative_int(arg):
return _int(arg, 'non-negative', lambda i: i >= 0)
def _int(arg, kind, pred):
desc = "requires {} integer, but found '{}'"
try:
i = int(arg)
except ValueError:
raise _error(desc, kind, arg)
if not pred(i):
raise _error(desc, kind, arg)
return i
def _case_insensitive_regex(arg):
import re
try:
return re.compile(arg, re.IGNORECASE)
except re.error as reason:
raise _error("invalid regular expression: '{}', {}", arg, reason)
def _error(desc, *args):
msg = desc.format(*args)
return argparse.ArgumentTypeError(msg)
|
|
from __future__ import unicode_literals
from django.contrib.auth import authenticate, get_user_model
from django.contrib.auth.tokens import default_token_generator
from django.db.models import Q
from django.db.models.manager import Manager
from django import forms
from django.utils.http import int_to_base36
from django.utils.translation import ugettext, ugettext_lazy as _
from mezzanine.accounts import (get_profile_model, get_profile_user_fieldname,
get_profile_for_user, ProfileNotConfigured)
from mezzanine.conf import settings
from mezzanine.core.forms import Html5Mixin
from mezzanine.utils.urls import slugify, unique_slug
User = get_user_model()
_exclude_fields = tuple(getattr(settings,
"ACCOUNTS_PROFILE_FORM_EXCLUDE_FIELDS", ()))
# If a profile model has been configured with the ``AUTH_PROFILE_MODULE``
# setting, create a model form for it that will have its fields added to
# ``ProfileForm``.
try:
class ProfileFieldsForm(forms.ModelForm):
class Meta:
model = get_profile_model()
exclude = (get_profile_user_fieldname(),) + _exclude_fields
except ProfileNotConfigured:
pass
if settings.ACCOUNTS_NO_USERNAME:
_exclude_fields += ("username",)
username_label = _("Email address")
else:
username_label = _("Username or email address")
class LoginForm(Html5Mixin, forms.Form):
"""
Fields for login.
"""
username = forms.CharField(label=username_label)
password = forms.CharField(label=_("Password"),
widget=forms.PasswordInput(render_value=False))
def clean(self):
"""
Authenticate the given username/email and password. If the fields
are valid, store the authenticated user for returning via save().
"""
username = self.cleaned_data.get("username")
password = self.cleaned_data.get("password")
self._user = authenticate(username=username, password=password)
if self._user is None:
raise forms.ValidationError(
ugettext("Invalid username/email and password"))
elif not self._user.is_active:
raise forms.ValidationError(ugettext("Your account is inactive"))
return self.cleaned_data
def save(self):
"""
Just return the authenticated user - used for logging in.
"""
return getattr(self, "_user", None)
class ProfileForm(Html5Mixin, forms.ModelForm):
"""
ModelForm for auth.User - used for signup and profile update.
If a Profile model is defined via ``AUTH_PROFILE_MODULE``, its
fields are injected into the form.
"""
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput(render_value=False))
password2 = forms.CharField(label=_("Password (again)"),
widget=forms.PasswordInput(render_value=False))
class Meta:
model = User
fields = ("first_name", "last_name", "email", "username")
exclude = _exclude_fields
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
self._signup = self.instance.id is None
user_fields = User._meta.get_all_field_names()
try:
self.fields["username"].help_text = ugettext(
"Only letters, numbers, dashes or underscores please")
except KeyError:
pass
for field in self.fields:
# Make user fields required.
if field in user_fields:
self.fields[field].required = True
# Disable auto-complete for password fields.
# Password isn't required for profile update.
if field.startswith("password"):
self.fields[field].widget.attrs["autocomplete"] = "off"
self.fields[field].widget.attrs.pop("required", "")
if not self._signup:
self.fields[field].required = False
if field == "password1":
self.fields[field].help_text = ugettext(
"Leave blank unless you want "
"to change your password")
# Add any profile fields to the form.
try:
profile_fields_form = self.get_profile_fields_form()
profile_fields = profile_fields_form().fields
self.fields.update(profile_fields)
if not self._signup:
user_profile = get_profile_for_user(self.instance)
for field in profile_fields:
value = getattr(user_profile, field)
# Check for multiple initial values, i.e. a m2m field
if isinstance(value, Manager):
value = value.all()
self.initial[field] = value
except ProfileNotConfigured:
pass
def clean_username(self):
"""
Ensure the username doesn't exist or contain invalid chars.
We limit it to slugifiable chars since it's used as the slug
for the user's profile view.
"""
username = self.cleaned_data.get("username")
if username.lower() != slugify(username).lower():
raise forms.ValidationError(
ugettext("Username can only contain letters, numbers, dashes "
"or underscores."))
lookup = {"username__iexact": username}
try:
User.objects.exclude(id=self.instance.id).get(**lookup)
except User.DoesNotExist:
return username
raise forms.ValidationError(
ugettext("This username is already registered"))
def clean_password2(self):
"""
Ensure the password fields are equal, and match the minimum
length defined by ``ACCOUNTS_MIN_PASSWORD_LENGTH``.
"""
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1:
errors = []
if password1 != password2:
errors.append(ugettext("Passwords do not match"))
if len(password1) < settings.ACCOUNTS_MIN_PASSWORD_LENGTH:
errors.append(
ugettext("Password must be at least %s characters") %
settings.ACCOUNTS_MIN_PASSWORD_LENGTH)
if errors:
self._errors["password1"] = self.error_class(errors)
return password2
def clean_email(self):
"""
Ensure the email address is not already registered.
"""
email = self.cleaned_data.get("email")
qs = User.objects.exclude(id=self.instance.id).filter(email=email)
if len(qs) == 0:
return email
raise forms.ValidationError(
ugettext("This email is already registered"))
def save(self, *args, **kwargs):
"""
Create the new user. If no username is supplied (may be hidden
via ``ACCOUNTS_PROFILE_FORM_EXCLUDE_FIELDS`` or
``ACCOUNTS_NO_USERNAME``), we generate a unique username, so
that if profile pages are enabled, we still have something to
use as the profile's slug.
"""
kwargs["commit"] = False
user = super(ProfileForm, self).save(*args, **kwargs)
try:
self.cleaned_data["username"]
except KeyError:
if not self.instance.username:
try:
username = ("%(first_name)s %(last_name)s" %
self.cleaned_data).strip()
except KeyError:
username = ""
if not username:
username = self.cleaned_data["email"].split("@")[0]
qs = User.objects.exclude(id=self.instance.id)
user.username = unique_slug(qs, "username", slugify(username))
password = self.cleaned_data.get("password1")
if password:
user.set_password(password)
elif self._signup:
try:
user.set_unusable_password()
except AttributeError:
# This could happen if using a custom user model that
# doesn't inherit from Django's AbstractBaseUser.
pass
user.save()
try:
profile = get_profile_for_user(user)
profile_form = self.get_profile_fields_form()
profile_form(self.data, self.files, instance=profile).save()
except ProfileNotConfigured:
pass
if self._signup:
if (settings.ACCOUNTS_VERIFICATION_REQUIRED or
settings.ACCOUNTS_APPROVAL_REQUIRED):
user.is_active = False
user.save()
else:
token = default_token_generator.make_token(user)
user = authenticate(uidb36=int_to_base36(user.id),
token=token,
is_active=True)
return user
def get_profile_fields_form(self):
try:
return ProfileFieldsForm
except NameError:
raise ProfileNotConfigured
class PasswordResetForm(Html5Mixin, forms.Form):
"""
Validates the user's username or email for sending a login
token for authenticating to change their password.
"""
username = forms.CharField(label=username_label)
def clean(self):
username = self.cleaned_data.get("username")
username_or_email = Q(username=username) | Q(email=username)
try:
user = User.objects.get(username_or_email, is_active=True)
except User.DoesNotExist:
raise forms.ValidationError(
ugettext("Invalid username/email"))
else:
self._user = user
return self.cleaned_data
def save(self):
"""
Just return the authenticated user - used for sending login
email.
"""
return getattr(self, "_user", None)
|
|
import urlparse
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.http import Http404
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.utils.translation import ugettext as _
try:
from django.utils.timezone import now # Django 1.4 aware datetimes
except ImportError:
from datetime import datetime
now = datetime.now
from postman.fields import autocompleter_app
from postman.forms import WriteForm, AnonymousWriteForm, QuickReplyForm, FullReplyForm
from postman.models import Message, get_order_by
from postman.urls import OPTION_MESSAGES
from postman.utils import format_subject, format_body
##########
# Helpers
##########
def _get_referer(request):
"""Return the HTTP_REFERER, if existing."""
if 'HTTP_REFERER' in request.META:
sr = urlparse.urlsplit(request.META['HTTP_REFERER'])
return urlparse.urlunsplit(('', '', sr.path, sr.query, sr.fragment))
########
# Views
########
def _folder(request, folder_name, view_name, option, template_name):
"""Code common to the folders."""
kwargs = {}
if option:
kwargs.update(option=option)
order_by = get_order_by(request.GET)
if order_by:
kwargs.update(order_by=order_by)
msgs = getattr(Message.objects, folder_name)(request.user, **kwargs)
return render_to_response(template_name, {
'pm_messages': msgs, # avoid 'messages', already used by contrib.messages
'by_conversation': option is None,
'by_message': option == OPTION_MESSAGES,
'by_conversation_url': reverse(view_name),
'by_message_url': reverse(view_name, args=[OPTION_MESSAGES]),
'current_url': request.get_full_path(),
'gets': request.GET, # useful to postman_order_by template tag
}, context_instance=RequestContext(request))
@login_required
def inbox(request, option=None, template_name='postman/inbox.html'):
"""
Display the list of received messages for the current user.
Optional arguments:
``option``: display option:
OPTION_MESSAGES to view all messages
default to None to view only the last message for each conversation
``template_name``: the name of the template to use
"""
return _folder(request, 'inbox', 'postman_inbox', option, template_name)
@login_required
def sent(request, option=None, template_name='postman/sent.html'):
"""
Display the list of sent messages for the current user.
Optional arguments: refer to inbox()
"""
return _folder(request, 'sent', 'postman_sent', option, template_name)
@login_required
def archives(request, option=None, template_name='postman/archives.html'):
"""
Display the list of archived messages for the current user.
Optional arguments: refer to inbox()
"""
return _folder(request, 'archives', 'postman_archives', option, template_name)
@login_required
def trash(request, option=None, template_name='postman/trash.html'):
"""
Display the list of deleted messages for the current user.
Optional arguments: refer to inbox()
"""
return _folder(request, 'trash', 'postman_trash', option, template_name)
def write(request, recipients=None, form_classes=(WriteForm, AnonymousWriteForm),
autocomplete_channels=None, template_name='postman/write.html',
success_url=None, user_filter=None, exchange_filter=None,
max=None, auto_moderators=[]):
"""
Display a form to compose a message.
Optional arguments:
``recipients``: a colon-separated list of usernames
``form_classes``: a 2-tuple of form classes
``autocomplete_channels``: a channel name or a 2-tuple of names
``template_name``: the name of the template to use
``success_url``: where to redirect to after a successful POST
``user_filter``: a filter for recipients
``exchange_filter``: a filter for exchanges between a sender and a recipient
``max``: an upper limit for the recipients number
``auto_moderators``: a list of auto-moderation functions
"""
## hack to allow use of ajax_select for multiple users:
if request.method == 'POST':
request.POST._mutable = True
post_recipients = request.POST['recipients']
post_recipients = post_recipients.replace(',', '|')
# why this try?
# this try/except handles the case when a user submits the form with some
# error (missing subject, for instance) and then, after being told about
# the error she/he corrects it and resubmits. In this case, the post
# recipients where already been fixed and the hack is not necessary.
try:
post_ids = [int(n) for n in post_recipients.split('|') if n != u'']
users = User.objects.filter(id__in=post_ids)
request.POST['recipients'] = ','.join([user.username for user in users])
except:
try:
# handle the case when something like u'|u|s|e|r|1|0|' comes in
# request.POST['recipients']
request.POST['recipients'] = post_recipients.replace('|', '')
#request.POST['recipients'] = User.objects.get(username=aux).username
except:
pass
request.POST._mutable = False
user = request.user
form_class = form_classes[0] if user.is_authenticated() else form_classes[1]
if isinstance(autocomplete_channels, tuple) and len(autocomplete_channels) == 2:
channel = autocomplete_channels[user.is_anonymous()]
else:
channel = autocomplete_channels
next_url = _get_referer(request)
if request.method == 'POST':
form = form_class(request.POST, sender=user, channel=channel,
user_filter=user_filter, exchange_filter=exchange_filter,
max=max)
#import ipdb; ipdb.set_trace()
if form.is_valid():
is_successful = form.save(auto_moderators=auto_moderators)
if is_successful:
messages.success(request, _("Message successfully sent."), fail_silently=True)
else:
messages.warning(request, _("Message rejected for at least one recipient."), fail_silently=True)
return redirect(request.GET.get('next', success_url or next_url or 'postman_inbox'))
else:
initial = dict(request.GET.items()) # allow optional initializations by query string
if recipients:
# order_by() is not mandatory, but: a) it doesn't hurt; b) it eases the test suite
# and anyway the original ordering cannot be respected.
usernames = list(User.objects.values_list('username', flat=True).filter(
is_active=True,
username__in=[r.strip() for r in recipients.split(':') if r and not r.isspace()],
).order_by('username'))
if usernames:
initial.update(recipients=', '.join(usernames))
form = form_class(initial=initial, channel=channel)
return render_to_response(template_name, {
'form': form,
'autocompleter_app': autocompleter_app,
'next_url': request.GET.get('next', next_url),
}, context_instance=RequestContext(request))
if getattr(settings, 'POSTMAN_DISALLOW_ANONYMOUS', False):
write = login_required(write)
@login_required
def reply(request, message_id, form_class=FullReplyForm,
formatters=(format_subject, format_body), autocomplete_channel=None,
template_name='postman/reply.html', success_url=None, user_filter=None,
exchange_filter=None, max=None, auto_moderators=[]):
"""
Display a form to compose a reply.
Optional arguments:
``form_class``: the form class to use
``formatters``: a 2-tuple of functions to prefill the subject and body fields
``autocomplete_channel``: a channel name
``template_name``: the name of the template to use
``success_url``: where to redirect to after a successful POST
``user_filter``: a filter for recipients
``exchange_filter``: a filter for exchanges between a sender and a recipient
``max``: an upper limit for the recipients number
``auto_moderators``: a list of auto-moderation functions
"""
user = request.user
perms = Message.objects.perms(user)
parent = get_object_or_404(Message, perms, pk=message_id)
initial = parent.quote(*formatters)
next_url = _get_referer(request)
if request.method == 'POST':
post = request.POST.copy()
if 'subject' not in post: # case of the quick reply form
post['subject'] = initial['subject']
form = form_class(post, sender=user, recipient=parent.sender or parent.email,
channel=autocomplete_channel, user_filter=user_filter,
exchange_filter=exchange_filter, max=max)
if form.is_valid():
is_successful = form.save(parent=parent, auto_moderators=auto_moderators)
if is_successful:
messages.success(request, _("Message successfully sent."), fail_silently=True)
else:
messages.warning(request, _("Message rejected for at least one recipient."), fail_silently=True)
return redirect(request.GET.get('next', success_url or next_url or 'postman_inbox'))
else:
initial.update(request.GET.items()) # allow overwriting of the defaults by query string
form = form_class(initial=initial, channel=autocomplete_channel)
return render_to_response(template_name, {
'form': form,
'recipient': parent.obfuscated_sender,
'autocompleter_app': autocompleter_app,
'next_url': request.GET.get('next', next_url),
}, context_instance=RequestContext(request))
def _view(request, filter, form_class=QuickReplyForm, formatters=(format_subject,format_body),
template_name='postman/view.html'):
"""
Code common to the by-message and by-conversation views.
Optional arguments:
``form_class``: the form class to use
``formatters``: a 2-tuple of functions to prefill the subject and body fields
``template_name``: the name of the template to use
"""
user = request.user
msgs = Message.objects.thread(user, filter)
if msgs:
Message.objects.set_read(user, filter)
# are all messages archived ?
for m in msgs:
if not getattr(m, ('sender' if m.sender == user else 'recipient') + '_archived'):
archived = False
break
else:
archived = True
# look for the more recent received message (and non-deleted to comply with the future perms() control), if any
for m in reversed(msgs):
if m.recipient == user and not m.recipient_deleted_at:
received = m
break
else:
received = None
return render_to_response(template_name, {
'pm_messages': msgs,
'archived': archived,
'reply_to_pk': received.pk if received else None,
'form': form_class(initial=received.quote(*formatters)) if received else None,
'next_url': request.GET.get('next', reverse('postman_inbox')),
}, context_instance=RequestContext(request))
raise Http404
@login_required
def view(request, message_id, *args, **kwargs):
"""Display one specific message."""
return _view(request, Q(pk=message_id), *args, **kwargs)
@login_required
def view_conversation(request, thread_id, *args, **kwargs):
"""Display a conversation."""
return _view(request, Q(thread=thread_id), *args, **kwargs)
def _update(request, field_bit, success_msg, field_value=None, success_url=None):
"""
Code common to the archive/delete/undelete actions.
Arguments:
``field_bit``: a part of the name of the field to update
``success_msg``: the displayed text in case of success
Optional arguments:
``field_value``: the value to set in the field
``success_url``: where to redirect to after a successful POST
"""
if not request.method == 'POST':
raise Http404
next_url = _get_referer(request) or 'postman_inbox'
pks = request.POST.getlist('pks')
tpks = request.POST.getlist('tpks')
if pks or tpks:
user = request.user
filter = Q(pk__in=pks) | Q(thread__in=tpks)
recipient_rows = Message.objects.as_recipient(user, filter).update(**{'recipient_{0}'.format(field_bit): field_value})
sender_rows = Message.objects.as_sender(user, filter).update(**{'sender_{0}'.format(field_bit): field_value})
if not (recipient_rows or sender_rows):
raise Http404 # abnormal enough, like forged ids
messages.success(request, success_msg, fail_silently=True)
return redirect(request.GET.get('next', success_url or next_url))
else:
messages.warning(request, _("Select at least one object."), fail_silently=True)
return redirect(next_url)
@login_required
def archive(request, *args, **kwargs):
"""Mark messages/conversations as archived."""
return _update(request, 'archived', _("Messages or conversations successfully archived."), True, *args, **kwargs)
@login_required
def delete(request, *args, **kwargs):
"""Mark messages/conversations as deleted."""
return _update(request, 'deleted_at', _("Messages or conversations successfully deleted."), now(), *args, **kwargs)
@login_required
def undelete(request, *args, **kwargs):
"""Revert messages/conversations from marked as deleted."""
return _update(request, 'deleted_at', _("Messages or conversations successfully recovered."), *args, **kwargs)
def _mark_as(request, read=True, *args, **kwargs):
if not request.method == 'POST':
raise Http404
next_url = _get_referer(request) or 'postman_inbox'
pks = request.POST.getlist('pks')
tpks = request.POST.getlist('tpks')
if pks or tpks:
user = request.user
filter_ = Q(pk__in=pks) | Q(thread__in=tpks)
mgs_to_change = Message.objects.as_recipient(user, filter_)
if read:
mgs_to_change.update(read_at=now())
else:
mgs_to_change.update(read_at=None)
return redirect(request.GET.get('next', next_url))
else:
messages.error(request, (u"Couldn't mark messages as unread. Sorry."))
return redirect(request.GET.get('next'))
@login_required
def mark_as_read(request, *args, **kwargs):
"""Mark messages/conversations as readed."""
return _mark_as(request, True, *args, **kwargs)
@login_required
def mark_as_unread(request, *args, **kwargs):
"""Mark messages/conversations as unreaded."""
return _mark_as(request, False, *args, **kwargs)
|
|
"""The tests for the device tracker component."""
# pylint: disable=protected-access,too-many-public-methods
import unittest
from unittest.mock import patch
from datetime import datetime, timedelta
import os
from blumate.loader import get_component
import blumate.util.dt as dt_util
from blumate.const import (
ATTR_ENTITY_ID, ATTR_ENTITY_PICTURE, ATTR_FRIENDLY_NAME, ATTR_HIDDEN,
STATE_HOME, STATE_NOT_HOME, CONF_PLATFORM)
import blumate.components.device_tracker as device_tracker
from tests.common import (
get_test_home_assistant, fire_time_changed, fire_service_discovered)
class TestComponentsDeviceTracker(unittest.TestCase):
"""Test the Device tracker."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.yaml_devices = self.hass.config.path(device_tracker.YAML_DEVICES)
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
try:
os.remove(self.yaml_devices)
except FileNotFoundError:
pass
self.hass.stop()
def test_is_on(self):
"""Test is_on method."""
entity_id = device_tracker.ENTITY_ID_FORMAT.format('test')
self.hass.states.set(entity_id, STATE_HOME)
self.assertTrue(device_tracker.is_on(self.hass, entity_id))
self.hass.states.set(entity_id, STATE_NOT_HOME)
self.assertFalse(device_tracker.is_on(self.hass, entity_id))
def test_reading_yaml_config(self):
"""Test the rendering of the YAML configuration."""
dev_id = 'test'
device = device_tracker.Device(
self.hass, timedelta(seconds=180), 0, True, dev_id,
'AB:CD:EF:GH:IJ', 'Test name', 'http://test.picture', True)
device_tracker.update_config(self.yaml_devices, dev_id, device)
self.assertTrue(device_tracker.setup(self.hass, {}))
config = device_tracker.load_config(self.yaml_devices, self.hass,
device.consider_home, 0)[0]
self.assertEqual(device.dev_id, config.dev_id)
self.assertEqual(device.track, config.track)
self.assertEqual(device.mac, config.mac)
self.assertEqual(device.config_picture, config.config_picture)
self.assertEqual(device.away_hide, config.away_hide)
self.assertEqual(device.consider_home, config.consider_home)
def test_setup_without_yaml_file(self):
"""Test with no YAML file."""
self.assertTrue(device_tracker.setup(self.hass, {}))
def test_adding_unknown_device_to_config(self):
"""Test the adding of unknown devices to configuration file."""
scanner = get_component('device_tracker.test').SCANNER
scanner.reset()
scanner.come_home('DEV1')
self.assertTrue(device_tracker.setup(self.hass, {
device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}}))
config = device_tracker.load_config(self.yaml_devices, self.hass,
timedelta(seconds=0), 0)
assert len(config) == 1
assert config[0].dev_id == 'dev1'
assert config[0].track
def test_discovery(self):
"""Test discovery."""
scanner = get_component('device_tracker.test').SCANNER
with patch.dict(device_tracker.DISCOVERY_PLATFORMS, {'test': 'test'}):
with patch.object(scanner, 'scan_devices') as mock_scan:
self.assertTrue(device_tracker.setup(self.hass, {
device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}}))
fire_service_discovered(self.hass, 'test', {})
self.assertTrue(mock_scan.called)
def test_update_stale(self):
"""Test stalled update."""
scanner = get_component('device_tracker.test').SCANNER
scanner.reset()
scanner.come_home('DEV1')
register_time = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC)
scan_time = datetime(2015, 9, 15, 23, 1, tzinfo=dt_util.UTC)
with patch('blumate.components.device_tracker.dt_util.utcnow',
return_value=register_time):
self.assertTrue(device_tracker.setup(self.hass, {
'device_tracker': {
'platform': 'test',
'consider_home': 59,
}}))
self.assertEqual(STATE_HOME,
self.hass.states.get('device_tracker.dev1').state)
scanner.leave_home('DEV1')
with patch('blumate.components.device_tracker.dt_util.utcnow',
return_value=scan_time):
fire_time_changed(self.hass, scan_time)
self.hass.pool.block_till_done()
self.assertEqual(STATE_NOT_HOME,
self.hass.states.get('device_tracker.dev1').state)
def test_entity_attributes(self):
"""Test the entity attributes."""
dev_id = 'test_entity'
entity_id = device_tracker.ENTITY_ID_FORMAT.format(dev_id)
friendly_name = 'Paulus'
picture = 'http://placehold.it/200x200'
device = device_tracker.Device(
self.hass, timedelta(seconds=180), 0, True, dev_id, None,
friendly_name, picture, away_hide=True)
device_tracker.update_config(self.yaml_devices, dev_id, device)
self.assertTrue(device_tracker.setup(self.hass, {}))
attrs = self.hass.states.get(entity_id).attributes
self.assertEqual(friendly_name, attrs.get(ATTR_FRIENDLY_NAME))
self.assertEqual(picture, attrs.get(ATTR_ENTITY_PICTURE))
def test_device_hidden(self):
"""Test hidden devices."""
dev_id = 'test_entity'
entity_id = device_tracker.ENTITY_ID_FORMAT.format(dev_id)
device = device_tracker.Device(
self.hass, timedelta(seconds=180), 0, True, dev_id, None,
away_hide=True)
device_tracker.update_config(self.yaml_devices, dev_id, device)
scanner = get_component('device_tracker.test').SCANNER
scanner.reset()
self.assertTrue(device_tracker.setup(self.hass, {
device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}}))
self.assertTrue(self.hass.states.get(entity_id)
.attributes.get(ATTR_HIDDEN))
def test_group_all_devices(self):
"""Test grouping of devices."""
dev_id = 'test_entity'
entity_id = device_tracker.ENTITY_ID_FORMAT.format(dev_id)
device = device_tracker.Device(
self.hass, timedelta(seconds=180), 0, True, dev_id, None,
away_hide=True)
device_tracker.update_config(self.yaml_devices, dev_id, device)
scanner = get_component('device_tracker.test').SCANNER
scanner.reset()
self.assertTrue(device_tracker.setup(self.hass, {
device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}}))
state = self.hass.states.get(device_tracker.ENTITY_ID_ALL_DEVICES)
self.assertIsNotNone(state)
self.assertEqual(STATE_NOT_HOME, state.state)
self.assertSequenceEqual((entity_id,),
state.attributes.get(ATTR_ENTITY_ID))
@patch('blumate.components.device_tracker.DeviceTracker.see')
def test_see_service(self, mock_see):
"""Test the see service."""
self.assertTrue(device_tracker.setup(self.hass, {}))
mac = 'AB:CD:EF:GH'
dev_id = 'some_device'
host_name = 'example.com'
location_name = 'Work'
gps = [.3, .8]
device_tracker.see(self.hass, mac, dev_id, host_name, location_name,
gps)
self.hass.pool.block_till_done()
mock_see.assert_called_once_with(
mac=mac, dev_id=dev_id, host_name=host_name,
location_name=location_name, gps=gps)
def test_not_write_duplicate_yaml_keys(self):
"""Test that the device tracker will not generate invalid YAML."""
self.assertTrue(device_tracker.setup(self.hass, {}))
device_tracker.see(self.hass, 'mac_1', host_name='hello')
device_tracker.see(self.hass, 'mac_2', host_name='hello')
self.hass.pool.block_till_done()
config = device_tracker.load_config(self.yaml_devices, self.hass,
timedelta(seconds=0), 0)
assert len(config) == 2
def test_not_allow_invalid_dev_id(self):
"""Test that the device tracker will not allow invalid dev ids."""
self.assertTrue(device_tracker.setup(self.hass, {}))
device_tracker.see(self.hass, dev_id='hello-world')
config = device_tracker.load_config(self.yaml_devices, self.hass,
timedelta(seconds=0), 0)
assert len(config) == 0
|
|
#!/usr/bin/env python
#
# Use the raw transactions API to spend PIVs received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a pivxd or pivx-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the pivx data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/PIVX/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "PIVX")
return os.path.expanduser("~/.pivx")
def read_bitcoin_config(dbdir):
"""Read the pivx.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "pivx.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a pivx JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 51475 if testnet else 51473
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the pivxd we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(pivxd):
info = pivxd.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
pivxd.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = pivxd.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(pivxd):
address_summary = dict()
address_to_account = dict()
for info in pivxd.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = pivxd.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = pivxd.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-pivx-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(pivxd, fromaddresses, toaddress, amount, fee):
all_coins = list_available(pivxd)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to pivxd.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = pivxd.createrawtransaction(inputs, outputs)
signed_rawtx = pivxd.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(pivxd, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = pivxd.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(pivxd, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = pivxd.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(pivxd, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get PIVs from")
parser.add_option("--to", dest="to", default=None,
help="address to get send PIVs to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of pivx.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
pivxd = connect_JSON(config)
if options.amount is None:
address_summary = list_available(pivxd)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(pivxd) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(pivxd, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(pivxd, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = pivxd.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
|
|
"""Script to check the configuration file."""
import argparse
import logging
import os
from collections import OrderedDict, namedtuple
from glob import glob
from typing import Dict, List, Sequence
from unittest.mock import patch
import attr
import voluptuous as vol
from homeassistant import bootstrap, core, loader
from homeassistant.config import (
get_default_config_dir, CONF_CORE, CORE_CONFIG_SCHEMA,
CONF_PACKAGES, merge_packages_config, _format_config_error,
find_config_file, load_yaml_config_file,
extract_domain_configs, config_per_platform)
from homeassistant.util import yaml
from homeassistant.exceptions import HomeAssistantError
REQUIREMENTS = ('colorlog==4.0.2',)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=protected-access
MOCKS = {
'load': ("homeassistant.util.yaml.load_yaml", yaml.load_yaml),
'load*': ("homeassistant.config.load_yaml", yaml.load_yaml),
'secrets': ("homeassistant.util.yaml.secret_yaml", yaml.secret_yaml),
}
SILENCE = (
'homeassistant.scripts.check_config.yaml.clear_secret_cache',
)
PATCHES = {}
C_HEAD = 'bold'
ERROR_STR = 'General Errors'
def color(the_color, *args, reset=None):
"""Color helper."""
from colorlog.escape_codes import escape_codes, parse_colors
try:
if not args:
assert reset is None, "You cannot reset if nothing being printed"
return parse_colors(the_color)
return parse_colors(the_color) + ' '.join(args) + \
escape_codes[reset or 'reset']
except KeyError as k:
raise ValueError("Invalid color {} in {}".format(str(k), the_color))
def run(script_args: List) -> int:
"""Handle ensure config commandline script."""
parser = argparse.ArgumentParser(
description="Check Home Assistant configuration.")
parser.add_argument(
'--script', choices=['check_config'])
parser.add_argument(
'-c', '--config',
default=get_default_config_dir(),
help="Directory that contains the Home Assistant configuration")
parser.add_argument(
'-i', '--info', nargs='?',
default=None, const='all',
help="Show a portion of the config")
parser.add_argument(
'-f', '--files',
action='store_true',
help="Show used configuration files")
parser.add_argument(
'-s', '--secrets',
action='store_true',
help="Show secret information")
args, unknown = parser.parse_known_args()
if unknown:
print(color('red', "Unknown arguments:", ', '.join(unknown)))
config_dir = os.path.join(os.getcwd(), args.config)
print(color('bold', "Testing configuration at", config_dir))
res = check(config_dir, args.secrets)
domain_info = []
if args.info:
domain_info = args.info.split(',')
if args.files:
print(color(C_HEAD, 'yaml files'), '(used /',
color('red', 'not used') + ')')
deps = os.path.join(config_dir, 'deps')
yaml_files = [f for f in glob(os.path.join(config_dir, '**/*.yaml'),
recursive=True)
if not f.startswith(deps)]
for yfn in sorted(yaml_files):
the_color = '' if yfn in res['yaml_files'] else 'red'
print(color(the_color, '-', yfn))
if res['except']:
print(color('bold_white', 'Failed config'))
for domain, config in res['except'].items():
domain_info.append(domain)
print(' ', color('bold_red', domain + ':'),
color('red', '', reset='red'))
dump_dict(config, reset='red')
print(color('reset'))
if domain_info:
if 'all' in domain_info:
print(color('bold_white', 'Successful config (all)'))
for domain, config in res['components'].items():
print(' ', color(C_HEAD, domain + ':'))
dump_dict(config)
else:
print(color('bold_white', 'Successful config (partial)'))
for domain in domain_info:
if domain == ERROR_STR:
continue
print(' ', color(C_HEAD, domain + ':'))
dump_dict(res['components'].get(domain, None))
if args.secrets:
flatsecret = {}
for sfn, sdict in res['secret_cache'].items():
sss = []
for skey in sdict:
if skey in flatsecret:
_LOGGER.error('Duplicated secrets in files %s and %s',
flatsecret[skey], sfn)
flatsecret[skey] = sfn
sss.append(color('green', skey) if skey in res['secrets']
else skey)
print(color(C_HEAD, 'Secrets from', sfn + ':'), ', '.join(sss))
print(color(C_HEAD, 'Used Secrets:'))
for skey, sval in res['secrets'].items():
if sval is None:
print(' -', skey + ':', color('red', "not found"))
continue
print(' -', skey + ':', sval, color('cyan', '[from:', flatsecret
.get(skey, 'keyring') + ']'))
return len(res['except'])
def check(config_dir, secrets=False):
"""Perform a check by mocking hass load functions."""
logging.getLogger('homeassistant.loader').setLevel(logging.CRITICAL)
res = {
'yaml_files': OrderedDict(), # yaml_files loaded
'secrets': OrderedDict(), # secret cache and secrets loaded
'except': OrderedDict(), # exceptions raised (with config)
'components': None, # successful components
'secret_cache': None,
}
# pylint: disable=possibly-unused-variable
def mock_load(filename):
"""Mock hass.util.load_yaml to save config file names."""
res['yaml_files'][filename] = True
return MOCKS['load'][1](filename)
# pylint: disable=possibly-unused-variable
def mock_secrets(ldr, node):
"""Mock _get_secrets."""
try:
val = MOCKS['secrets'][1](ldr, node)
except HomeAssistantError:
val = None
res['secrets'][node.value] = val
return val
# Patches to skip functions
for sil in SILENCE:
PATCHES[sil] = patch(sil)
# Patches with local mock functions
for key, val in MOCKS.items():
if not secrets and key == 'secrets':
continue
# The * in the key is removed to find the mock_function (side_effect)
# This allows us to use one side_effect to patch multiple locations
mock_function = locals()['mock_' + key.replace('*', '')]
PATCHES[key] = patch(val[0], side_effect=mock_function)
# Start all patches
for pat in PATCHES.values():
pat.start()
if secrets:
# Ensure !secrets point to the patched function
yaml.yaml.SafeLoader.add_constructor('!secret', yaml.secret_yaml)
try:
hass = core.HomeAssistant()
hass.config.config_dir = config_dir
res['components'] = hass.loop.run_until_complete(
check_ha_config_file(hass))
res['secret_cache'] = OrderedDict(yaml.__SECRET_CACHE)
for err in res['components'].errors:
domain = err.domain or ERROR_STR
res['except'].setdefault(domain, []).append(err.message)
if err.config:
res['except'].setdefault(domain, []).append(err.config)
except Exception as err: # pylint: disable=broad-except
_LOGGER.exception("BURB")
print(color('red', 'Fatal error while loading config:'), str(err))
res['except'].setdefault(ERROR_STR, []).append(str(err))
finally:
# Stop all patches
for pat in PATCHES.values():
pat.stop()
if secrets:
# Ensure !secrets point to the original function
yaml.yaml.SafeLoader.add_constructor('!secret', yaml.secret_yaml)
bootstrap.clear_secret_cache()
return res
def line_info(obj, **kwargs):
"""Display line config source."""
if hasattr(obj, '__config_file__'):
return color('cyan', "[source {}:{}]"
.format(obj.__config_file__, obj.__line__ or '?'),
**kwargs)
return '?'
def dump_dict(layer, indent_count=3, listi=False, **kwargs):
"""Display a dict.
A friendly version of print yaml.yaml.dump(config).
"""
def sort_dict_key(val):
"""Return the dict key for sorting."""
key = str(val[0]).lower()
return '0' if key == 'platform' else key
indent_str = indent_count * ' '
if listi or isinstance(layer, list):
indent_str = indent_str[:-1] + '-'
if isinstance(layer, Dict):
for key, value in sorted(layer.items(), key=sort_dict_key):
if isinstance(value, (dict, list)):
print(indent_str, str(key) + ':', line_info(value, **kwargs))
dump_dict(value, indent_count + 2)
else:
print(indent_str, str(key) + ':', value)
indent_str = indent_count * ' '
if isinstance(layer, Sequence):
for i in layer:
if isinstance(i, dict):
dump_dict(i, indent_count + 2, True)
else:
print(' ', indent_str, i)
CheckConfigError = namedtuple(
'CheckConfigError', "message domain config")
@attr.s
class HomeAssistantConfig(OrderedDict):
"""Configuration result with errors attribute."""
errors = attr.ib(default=attr.Factory(list))
def add_error(self, message, domain=None, config=None):
"""Add a single error."""
self.errors.append(CheckConfigError(str(message), domain, config))
return self
async def check_ha_config_file(hass):
"""Check if Home Assistant configuration file is valid."""
config_dir = hass.config.config_dir
result = HomeAssistantConfig()
def _pack_error(package, component, config, message):
"""Handle errors from packages: _log_pkg_error."""
message = "Package {} setup failed. Component {} {}".format(
package, component, message)
domain = 'homeassistant.packages.{}.{}'.format(package, component)
pack_config = core_config[CONF_PACKAGES].get(package, config)
result.add_error(message, domain, pack_config)
def _comp_error(ex, domain, config):
"""Handle errors from components: async_log_exception."""
result.add_error(
_format_config_error(ex, domain, config), domain, config)
# Load configuration.yaml
try:
config_path = await hass.async_add_executor_job(
find_config_file, config_dir)
if not config_path:
return result.add_error("File configuration.yaml not found.")
config = await hass.async_add_executor_job(
load_yaml_config_file, config_path)
except HomeAssistantError as err:
return result.add_error(
"Error loading {}: {}".format(config_path, err))
finally:
yaml.clear_secret_cache()
# Extract and validate core [homeassistant] config
try:
core_config = config.pop(CONF_CORE, {})
core_config = CORE_CONFIG_SCHEMA(core_config)
result[CONF_CORE] = core_config
except vol.Invalid as err:
result.add_error(err, CONF_CORE, core_config)
core_config = {}
# Merge packages
await merge_packages_config(
hass, config, core_config.get(CONF_PACKAGES, {}), _pack_error)
core_config.pop(CONF_PACKAGES, None)
# Filter out repeating config sections
components = set(key.split(' ')[0] for key in config.keys())
# Process and validate config
for domain in components:
try:
integration = await loader.async_get_integration(hass, domain)
except loader.IntegrationNotFound:
result.add_error("Integration not found: {}".format(domain))
continue
try:
component = integration.get_component()
except ImportError:
result.add_error("Component not found: {}".format(domain))
continue
if hasattr(component, 'CONFIG_SCHEMA'):
try:
config = component.CONFIG_SCHEMA(config)
result[domain] = config[domain]
except vol.Invalid as ex:
_comp_error(ex, domain, config)
continue
component_platform_schema = getattr(
component, 'PLATFORM_SCHEMA_BASE',
getattr(component, 'PLATFORM_SCHEMA', None))
if component_platform_schema is None:
continue
platforms = []
for p_name, p_config in config_per_platform(config, domain):
# Validate component specific platform schema
try:
p_validated = component_platform_schema( # type: ignore
p_config)
except vol.Invalid as ex:
_comp_error(ex, domain, config)
continue
# Not all platform components follow same pattern for platforms
# So if p_name is None we are not going to validate platform
# (the automation component is one of them)
if p_name is None:
platforms.append(p_validated)
continue
try:
p_integration = await loader.async_get_integration(hass,
p_name)
except loader.IntegrationNotFound:
result.add_error(
"Integration {} not found when trying to verify its {} "
"platform.".format(p_name, domain))
continue
try:
platform = p_integration.get_platform(domain)
except ImportError:
result.add_error(
"Platform not found: {}.{}".format(domain, p_name))
continue
# Validate platform specific schema
if hasattr(platform, 'PLATFORM_SCHEMA'):
try:
p_validated = platform.PLATFORM_SCHEMA(p_validated)
except vol.Invalid as ex:
_comp_error(
ex, '{}.{}'.format(domain, p_name), p_validated)
continue
platforms.append(p_validated)
# Remove config for current component and add validated config back in.
for filter_comp in extract_domain_configs(config, domain):
del config[filter_comp]
result[domain] = platforms
return result
|
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import serialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class RatePlanList(ListResource):
""" """
def __init__(self, version):
"""
Initialize the RatePlanList
:param Version version: Version that contains the resource
:returns: twilio.rest.wireless.v1.rate_plan.RatePlanList
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanList
"""
super(RatePlanList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/RatePlans'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams RatePlanInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.wireless.v1.rate_plan.RatePlanInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists RatePlanInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.wireless.v1.rate_plan.RatePlanInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of RatePlanInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return RatePlanPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of RatePlanInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return RatePlanPage(self._version, response, self._solution)
def create(self, unique_name=values.unset, friendly_name=values.unset,
data_enabled=values.unset, data_limit=values.unset,
data_metering=values.unset, messaging_enabled=values.unset,
voice_enabled=values.unset, national_roaming_enabled=values.unset,
international_roaming=values.unset,
national_roaming_data_limit=values.unset,
international_roaming_data_limit=values.unset):
"""
Create a new RatePlanInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode friendly_name: A string to describe the resource
:param bool data_enabled: Whether SIMs can use GPRS/3G/4G/LTE data connectivity
:param unicode data_limit: The total data usage in Megabytes that the Network allows during one month on the home network
:param unicode data_metering: The model used to meter data usage
:param bool messaging_enabled: Whether SIMs can make, send, and receive SMS using Commands
:param bool voice_enabled: Whether SIMs can make and receive voice calls
:param bool national_roaming_enabled: Whether SIMs can roam on networks other than the home network in the United States
:param unicode international_roaming: The services that SIMs capable of using GPRS/3G/4G/LTE data connectivity can use outside of the United States
:param unicode national_roaming_data_limit: The total data usage in Megabytes that the Network allows during one month on non-home networks in the United States
:param unicode international_roaming_data_limit: The total data usage (download and upload combined) in Megabytes that the Network allows during one month when roaming outside the United States
:returns: Newly created RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
data = values.of({
'UniqueName': unique_name,
'FriendlyName': friendly_name,
'DataEnabled': data_enabled,
'DataLimit': data_limit,
'DataMetering': data_metering,
'MessagingEnabled': messaging_enabled,
'VoiceEnabled': voice_enabled,
'NationalRoamingEnabled': national_roaming_enabled,
'InternationalRoaming': serialize.map(international_roaming, lambda e: e),
'NationalRoamingDataLimit': national_roaming_data_limit,
'InternationalRoamingDataLimit': international_roaming_data_limit,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return RatePlanInstance(self._version, payload, )
def get(self, sid):
"""
Constructs a RatePlanContext
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.wireless.v1.rate_plan.RatePlanContext
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanContext
"""
return RatePlanContext(self._version, sid=sid, )
def __call__(self, sid):
"""
Constructs a RatePlanContext
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.wireless.v1.rate_plan.RatePlanContext
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanContext
"""
return RatePlanContext(self._version, sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Wireless.V1.RatePlanList>'
class RatePlanPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the RatePlanPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.wireless.v1.rate_plan.RatePlanPage
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanPage
"""
super(RatePlanPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of RatePlanInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
return RatePlanInstance(self._version, payload, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Wireless.V1.RatePlanPage>'
class RatePlanContext(InstanceContext):
""" """
def __init__(self, version, sid):
"""
Initialize the RatePlanContext
:param Version version: Version that contains the resource
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.wireless.v1.rate_plan.RatePlanContext
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanContext
"""
super(RatePlanContext, self).__init__(version)
# Path Solution
self._solution = {'sid': sid, }
self._uri = '/RatePlans/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch a RatePlanInstance
:returns: Fetched RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return RatePlanInstance(self._version, payload, sid=self._solution['sid'], )
def update(self, unique_name=values.unset, friendly_name=values.unset):
"""
Update the RatePlanInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode friendly_name: A string to describe the resource
:returns: Updated RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
data = values.of({'UniqueName': unique_name, 'FriendlyName': friendly_name, })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return RatePlanInstance(self._version, payload, sid=self._solution['sid'], )
def delete(self):
"""
Deletes the RatePlanInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Wireless.V1.RatePlanContext {}>'.format(context)
class RatePlanInstance(InstanceResource):
""" """
def __init__(self, version, payload, sid=None):
"""
Initialize the RatePlanInstance
:returns: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
super(RatePlanInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'unique_name': payload.get('unique_name'),
'account_sid': payload.get('account_sid'),
'friendly_name': payload.get('friendly_name'),
'data_enabled': payload.get('data_enabled'),
'data_metering': payload.get('data_metering'),
'data_limit': deserialize.integer(payload.get('data_limit')),
'messaging_enabled': payload.get('messaging_enabled'),
'voice_enabled': payload.get('voice_enabled'),
'national_roaming_enabled': payload.get('national_roaming_enabled'),
'national_roaming_data_limit': deserialize.integer(payload.get('national_roaming_data_limit')),
'international_roaming': payload.get('international_roaming'),
'international_roaming_data_limit': deserialize.integer(payload.get('international_roaming_data_limit')),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: RatePlanContext for this RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanContext
"""
if self._context is None:
self._context = RatePlanContext(self._version, sid=self._solution['sid'], )
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def unique_name(self):
"""
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
"""
return self._properties['unique_name']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def friendly_name(self):
"""
:returns: The string that you assigned to describe the resource
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def data_enabled(self):
"""
:returns: Whether SIMs can use GPRS/3G/4G/LTE data connectivity
:rtype: bool
"""
return self._properties['data_enabled']
@property
def data_metering(self):
"""
:returns: The model used to meter data usage
:rtype: unicode
"""
return self._properties['data_metering']
@property
def data_limit(self):
"""
:returns: The total data usage in Megabytes that the Network allows during one month on the home network
:rtype: unicode
"""
return self._properties['data_limit']
@property
def messaging_enabled(self):
"""
:returns: Whether SIMs can make, send, and receive SMS using Commands
:rtype: bool
"""
return self._properties['messaging_enabled']
@property
def voice_enabled(self):
"""
:returns: Whether SIMs can make and receive voice calls
:rtype: bool
"""
return self._properties['voice_enabled']
@property
def national_roaming_enabled(self):
"""
:returns: Whether SIMs can roam on networks other than the home network in the United States
:rtype: bool
"""
return self._properties['national_roaming_enabled']
@property
def national_roaming_data_limit(self):
"""
:returns: The total data usage in Megabytes that the Network allows during one month on non-home networks in the United States
:rtype: unicode
"""
return self._properties['national_roaming_data_limit']
@property
def international_roaming(self):
"""
:returns: The services that SIMs capable of using GPRS/3G/4G/LTE data connectivity can use outside of the United States
:rtype: unicode
"""
return self._properties['international_roaming']
@property
def international_roaming_data_limit(self):
"""
:returns: The total data usage (download and upload combined) in Megabytes that the Network allows during one month when roaming outside the United States
:rtype: unicode
"""
return self._properties['international_roaming_data_limit']
@property
def date_created(self):
"""
:returns: The date when the resource was created, given as GMT in ISO 8601 format
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date when the resource was last updated, given as GMT in ISO 8601 format
:rtype: datetime
"""
return self._properties['date_updated']
@property
def url(self):
"""
:returns: The absolute URL of the resource
:rtype: unicode
"""
return self._properties['url']
def fetch(self):
"""
Fetch a RatePlanInstance
:returns: Fetched RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
return self._proxy.fetch()
def update(self, unique_name=values.unset, friendly_name=values.unset):
"""
Update the RatePlanInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode friendly_name: A string to describe the resource
:returns: Updated RatePlanInstance
:rtype: twilio.rest.wireless.v1.rate_plan.RatePlanInstance
"""
return self._proxy.update(unique_name=unique_name, friendly_name=friendly_name, )
def delete(self):
"""
Deletes the RatePlanInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Wireless.V1.RatePlanInstance {}>'.format(context)
|
|
# -*- coding:utf-8 -*-
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import errno
import multiprocessing
import re
import os
import signal
import sys
import subprocess
from color import Coloring
from command import Command, MirrorSafeCommand
import platform_utils
_CAN_COLOR = [
'branch',
'diff',
'grep',
'log',
]
class ForallColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'forall')
self.project = self.printer('project', attr='bold')
class Forall(Command, MirrorSafeCommand):
common = False
helpSummary = "Run a shell command in each project"
helpUsage = """
%prog [<project>...] -c <command> [<arg>...]
%prog -r str1 [str2] ... -c <command> [<arg>...]"
"""
helpDescription = """
Executes the same shell command in each project.
The -r option allows running the command only on projects matching
regex or wildcard expression.
# Output Formatting
The -p option causes '%prog' to bind pipes to the command's stdin,
stdout and stderr streams, and pipe all output into a continuous
stream that is displayed in a single pager session. Project headings
are inserted before the output of each command is displayed. If the
command produces no output in a project, no heading is displayed.
The formatting convention used by -p is very suitable for some
types of searching, e.g. `repo forall -p -c git log -SFoo` will
print all commits that add or remove references to Foo.
The -v option causes '%prog' to display stderr messages if a
command produces output only on stderr. Normally the -p option
causes command output to be suppressed until the command produces
at least one byte of output on stdout.
# Environment
pwd is the project's working directory. If the current client is
a mirror client, then pwd is the Git repository.
REPO_PROJECT is set to the unique name of the project.
REPO_PATH is the path relative the the root of the client.
REPO_REMOTE is the name of the remote system from the manifest.
REPO_LREV is the name of the revision from the manifest, translated
to a local tracking branch. If you need to pass the manifest
revision to a locally executed git command, use REPO_LREV.
REPO_RREV is the name of the revision from the manifest, exactly
as written in the manifest.
REPO_COUNT is the total number of projects being iterated.
REPO_I is the current (1-based) iteration count. Can be used in
conjunction with REPO_COUNT to add a simple progress indicator to your
command.
REPO__* are any extra environment variables, specified by the
"annotation" element under any project element. This can be useful
for differentiating trees based on user-specific criteria, or simply
annotating tree details.
shell positional arguments ($1, $2, .., $#) are set to any arguments
following <command>.
Example: to list projects:
%prog -c 'echo $REPO_PROJECT'
Notice that $REPO_PROJECT is quoted to ensure it is expanded in
the context of running <command> instead of in the calling shell.
Unless -p is used, stdin, stdout, stderr are inherited from the
terminal and are not redirected.
If -e is used, when a command exits unsuccessfully, '%prog' will abort
without iterating through the remaining projects.
"""
def _Options(self, p):
def cmd(option, opt_str, value, parser):
setattr(parser.values, option.dest, list(parser.rargs))
while parser.rargs:
del parser.rargs[0]
p.add_option('-r', '--regex',
dest='regex', action='store_true',
help="Execute the command only on projects matching regex or wildcard expression")
p.add_option('-i', '--inverse-regex',
dest='inverse_regex', action='store_true',
help="Execute the command only on projects not matching regex or "
"wildcard expression")
p.add_option('-g', '--groups',
dest='groups',
help="Execute the command only on projects matching the specified groups")
p.add_option('-c', '--command',
help='Command (and arguments) to execute',
dest='command',
action='callback',
callback=cmd)
p.add_option('-e', '--abort-on-errors',
dest='abort_on_errors', action='store_true',
help='Abort if a command exits unsuccessfully')
p.add_option('--ignore-missing', action='store_true',
help='Silently skip & do not exit non-zero due missing '
'checkouts')
g = p.add_option_group('Output')
g.add_option('-p',
dest='project_header', action='store_true',
help='Show project headers before output')
g.add_option('-v', '--verbose',
dest='verbose', action='store_true',
help='Show command error messages')
g.add_option('-j', '--jobs',
dest='jobs', action='store', type='int', default=1,
help='number of commands to execute simultaneously')
def WantPager(self, opt):
return opt.project_header and opt.jobs == 1
def _SerializeProject(self, project):
""" Serialize a project._GitGetByExec instance.
project._GitGetByExec is not pickle-able. Instead of trying to pass it
around between processes, make a dict ourselves containing only the
attributes that we need.
"""
if not self.manifest.IsMirror:
lrev = project.GetRevisionId()
else:
lrev = None
return {
'name': project.name,
'relpath': project.relpath,
'remote_name': project.remote.name,
'lrev': lrev,
'rrev': project.revisionExpr,
'annotations': dict((a.name, a.value) for a in project.annotations),
'gitdir': project.gitdir,
'worktree': project.worktree,
'upstream': project.upstream,
'dest_branch': project.dest_branch,
}
def ValidateOptions(self, opt, args):
if not opt.command:
self.Usage()
def Execute(self, opt, args):
cmd = [opt.command[0]]
shell = True
if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]):
shell = False
if shell:
cmd.append(cmd[0])
cmd.extend(opt.command[1:])
if opt.project_header \
and not shell \
and cmd[0] == 'git':
# If this is a direct git command that can enable colorized
# output and the user prefers coloring, add --color into the
# command line because we are going to wrap the command into
# a pipe and git won't know coloring should activate.
#
for cn in cmd[1:]:
if not cn.startswith('-'):
break
else:
cn = None
if cn and cn in _CAN_COLOR:
class ColorCmd(Coloring):
def __init__(self, config, cmd):
Coloring.__init__(self, config, cmd)
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
cmd.insert(cmd.index(cn) + 1, '--color')
mirror = self.manifest.IsMirror
rc = 0
smart_sync_manifest_name = "smart_sync_override.xml"
smart_sync_manifest_path = os.path.join(
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
if os.path.isfile(smart_sync_manifest_path):
self.manifest.Override(smart_sync_manifest_path)
if opt.regex:
projects = self.FindProjects(args)
elif opt.inverse_regex:
projects = self.FindProjects(args, inverse=True)
else:
projects = self.GetProjects(args, groups=opt.groups)
os.environ['REPO_COUNT'] = str(len(projects))
pool = multiprocessing.Pool(opt.jobs, InitWorker)
try:
config = self.manifest.manifestProject.config
results_it = pool.imap(
DoWorkWrapper,
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
pool.close()
for r in results_it:
rc = rc or r
if r != 0 and opt.abort_on_errors:
raise Exception('Aborting due to previous error')
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
# Catch KeyboardInterrupt raised inside and outside of workers
print('Interrupted - terminating the pool')
pool.terminate()
rc = rc or errno.EINTR
except Exception as e:
# Catch any other exceptions raised
print('Got an error, terminating the pool: %s: %s' %
(type(e).__name__, e),
file=sys.stderr)
pool.terminate()
rc = rc or getattr(e, 'errno', 1)
finally:
pool.join()
if rc != 0:
sys.exit(rc)
def ProjectArgs(self, projects, mirror, opt, cmd, shell, config):
for cnt, p in enumerate(projects):
try:
project = self._SerializeProject(p)
except Exception as e:
print('Project list error on project %s: %s: %s' %
(p.name, type(e).__name__, e),
file=sys.stderr)
return
except KeyboardInterrupt:
print('Project list interrupted',
file=sys.stderr)
return
yield [mirror, opt, cmd, shell, cnt, config, project]
class WorkerKeyboardInterrupt(Exception):
""" Keyboard interrupt exception for worker processes. """
pass
def InitWorker():
signal.signal(signal.SIGINT, signal.SIG_IGN)
def DoWorkWrapper(args):
""" A wrapper around the DoWork() method.
Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
``Exception``-based exception to stop it flooding the console with stacktraces
and making the parent hang indefinitely.
"""
project = args.pop()
try:
return DoWork(project, *args)
except KeyboardInterrupt:
print('%s: Worker interrupted' % project['name'])
raise WorkerKeyboardInterrupt()
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
env = os.environ.copy()
def setenv(name, val):
if val is None:
val = ''
env[name] = val
setenv('REPO_PROJECT', project['name'])
setenv('REPO_PATH', project['relpath'])
setenv('REPO_REMOTE', project['remote_name'])
setenv('REPO_LREV', project['lrev'])
setenv('REPO_RREV', project['rrev'])
setenv('REPO_UPSTREAM', project['upstream'])
setenv('REPO_DEST_BRANCH', project['dest_branch'])
setenv('REPO_I', str(cnt + 1))
for name in project['annotations']:
setenv("REPO__%s" % (name), project['annotations'][name])
if mirror:
setenv('GIT_DIR', project['gitdir'])
cwd = project['gitdir']
else:
cwd = project['worktree']
if not os.path.exists(cwd):
# Allow the user to silently ignore missing checkouts so they can run on
# partial checkouts (good for infra recovery tools).
if opt.ignore_missing:
return 0
if ((opt.project_header and opt.verbose)
or not opt.project_header):
print('skipping %s/' % project['relpath'], file=sys.stderr)
return 1
if opt.project_header:
stdin = subprocess.PIPE
stdout = subprocess.PIPE
stderr = subprocess.PIPE
else:
stdin = None
stdout = None
stderr = None
p = subprocess.Popen(cmd,
cwd=cwd,
shell=shell,
env=env,
stdin=stdin,
stdout=stdout,
stderr=stderr)
if opt.project_header:
out = ForallColoring(config)
out.redirect(sys.stdout)
empty = True
errbuf = ''
p.stdin.close()
s_in = platform_utils.FileDescriptorStreams.create()
s_in.add(p.stdout, sys.stdout, 'stdout')
s_in.add(p.stderr, sys.stderr, 'stderr')
while not s_in.is_done:
in_ready = s_in.select()
for s in in_ready:
buf = s.read().decode()
if not buf:
s_in.remove(s)
s.close()
continue
if not opt.verbose:
if s.std_name == 'stderr':
errbuf += buf
continue
if empty and out:
if not cnt == 0:
out.nl()
if mirror:
project_header_path = project['name']
else:
project_header_path = project['relpath']
out.project('project %s/', project_header_path)
out.nl()
out.flush()
if errbuf:
sys.stderr.write(errbuf)
sys.stderr.flush()
errbuf = ''
empty = False
s.dest.write(buf)
s.dest.flush()
r = p.wait()
return r
|
|
from __future__ import division, print_function
from functools import partial
from itertools import product
import numpy as np
import scipy.sparse as sp
from sklearn.datasets import make_multilabel_classification
from sklearn.preprocessing import LabelBinarizer
from sklearn.utils.multiclass import type_of_target
from sklearn.utils.validation import check_random_state
from sklearn.utils import shuffle
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import ignore_warnings
from sklearn.metrics import accuracy_score
from sklearn.metrics import average_precision_score
from sklearn.metrics import brier_score_loss
from sklearn.metrics import cohen_kappa_score
from sklearn.metrics import confusion_matrix
from sklearn.metrics import coverage_error
from sklearn.metrics import explained_variance_score
from sklearn.metrics import f1_score
from sklearn.metrics import fbeta_score
from sklearn.metrics import hamming_loss
from sklearn.metrics import hinge_loss
from sklearn.metrics import jaccard_similarity_score
from sklearn.metrics import label_ranking_average_precision_score
from sklearn.metrics import label_ranking_loss
from sklearn.metrics import log_loss
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import mean_squared_error
from sklearn.metrics import median_absolute_error
from sklearn.metrics import precision_score
from sklearn.metrics import r2_score
from sklearn.metrics import recall_score
from sklearn.metrics import roc_auc_score
from sklearn.metrics import zero_one_loss
# TODO Curve are currently not coverd by invariance test
# from sklearn.metrics import precision_recall_curve
# from sklearn.metrics import roc_curve
from sklearn.metrics.base import _average_binary_score
# Note toward developers about metric testing
# -------------------------------------------
# It is often possible to write one general test for several metrics:
#
# - invariance properties, e.g. invariance to sample order
# - common behavior for an argument, e.g. the "normalize" with value True
# will return the mean of the metrics and with value False will return
# the sum of the metrics.
#
# In order to improve the overall metric testing, it is a good idea to write
# first a specific test for the given metric and then add a general test for
# all metrics that have the same behavior.
#
# Two types of datastructures are used in order to implement this system:
# dictionaries of metrics and lists of metrics wit common properties.
#
# Dictionaries of metrics
# ------------------------
# The goal of having those dictionaries is to have an easy way to call a
# particular metric and associate a name to each function:
#
# - REGRESSION_METRICS: all regression metrics.
# - CLASSIFICATION_METRICS: all classification metrics
# which compare a ground truth and the estimated targets as returned by a
# classifier.
# - THRESHOLDED_METRICS: all classification metrics which
# compare a ground truth and a score, e.g. estimated probabilities or
# decision function (format might vary)
#
# Those dictionaries will be used to test systematically some invariance
# properties, e.g. invariance toward several input layout.
#
REGRESSION_METRICS = {
"mean_absolute_error": mean_absolute_error,
"mean_squared_error": mean_squared_error,
"median_absolute_error": median_absolute_error,
"explained_variance_score": explained_variance_score,
"r2_score": r2_score,
}
CLASSIFICATION_METRICS = {
"accuracy_score": accuracy_score,
"unnormalized_accuracy_score": partial(accuracy_score, normalize=False),
"confusion_matrix": confusion_matrix,
"hamming_loss": hamming_loss,
"jaccard_similarity_score": jaccard_similarity_score,
"unnormalized_jaccard_similarity_score":
partial(jaccard_similarity_score, normalize=False),
"zero_one_loss": zero_one_loss,
"unnormalized_zero_one_loss": partial(zero_one_loss, normalize=False),
# These are needed to test averaging
"precision_score": precision_score,
"recall_score": recall_score,
"f1_score": f1_score,
"f2_score": partial(fbeta_score, beta=2),
"f0.5_score": partial(fbeta_score, beta=0.5),
"matthews_corrcoef_score": matthews_corrcoef,
"weighted_f0.5_score": partial(fbeta_score, average="weighted", beta=0.5),
"weighted_f1_score": partial(f1_score, average="weighted"),
"weighted_f2_score": partial(fbeta_score, average="weighted", beta=2),
"weighted_precision_score": partial(precision_score, average="weighted"),
"weighted_recall_score": partial(recall_score, average="weighted"),
"micro_f0.5_score": partial(fbeta_score, average="micro", beta=0.5),
"micro_f1_score": partial(f1_score, average="micro"),
"micro_f2_score": partial(fbeta_score, average="micro", beta=2),
"micro_precision_score": partial(precision_score, average="micro"),
"micro_recall_score": partial(recall_score, average="micro"),
"macro_f0.5_score": partial(fbeta_score, average="macro", beta=0.5),
"macro_f1_score": partial(f1_score, average="macro"),
"macro_f2_score": partial(fbeta_score, average="macro", beta=2),
"macro_precision_score": partial(precision_score, average="macro"),
"macro_recall_score": partial(recall_score, average="macro"),
"samples_f0.5_score": partial(fbeta_score, average="samples", beta=0.5),
"samples_f1_score": partial(f1_score, average="samples"),
"samples_f2_score": partial(fbeta_score, average="samples", beta=2),
"samples_precision_score": partial(precision_score, average="samples"),
"samples_recall_score": partial(recall_score, average="samples"),
"cohen_kappa_score": cohen_kappa_score,
}
THRESHOLDED_METRICS = {
"coverage_error": coverage_error,
"label_ranking_loss": label_ranking_loss,
"log_loss": log_loss,
"unnormalized_log_loss": partial(log_loss, normalize=False),
"hinge_loss": hinge_loss,
"brier_score_loss": brier_score_loss,
"roc_auc_score": roc_auc_score,
"weighted_roc_auc": partial(roc_auc_score, average="weighted"),
"samples_roc_auc": partial(roc_auc_score, average="samples"),
"micro_roc_auc": partial(roc_auc_score, average="micro"),
"macro_roc_auc": partial(roc_auc_score, average="macro"),
"average_precision_score": average_precision_score,
"weighted_average_precision_score":
partial(average_precision_score, average="weighted"),
"samples_average_precision_score":
partial(average_precision_score, average="samples"),
"micro_average_precision_score":
partial(average_precision_score, average="micro"),
"macro_average_precision_score":
partial(average_precision_score, average="macro"),
"label_ranking_average_precision_score":
label_ranking_average_precision_score,
}
ALL_METRICS = dict()
ALL_METRICS.update(THRESHOLDED_METRICS)
ALL_METRICS.update(CLASSIFICATION_METRICS)
ALL_METRICS.update(REGRESSION_METRICS)
# Lists of metrics with common properties
# ---------------------------------------
# Lists of metrics with common properties are used to test systematically some
# functionalities and invariance, e.g. SYMMETRIC_METRICS lists all metrics that
# are symmetric with respect to their input argument y_true and y_pred.
#
# When you add a new metric or functionality, check if a general test
# is already written.
# Metric undefined with "binary" or "multiclass" input
METRIC_UNDEFINED_MULTICLASS = [
"samples_f0.5_score", "samples_f1_score", "samples_f2_score",
"samples_precision_score", "samples_recall_score",
# Those metrics don't support multiclass outputs
"average_precision_score", "weighted_average_precision_score",
"micro_average_precision_score", "macro_average_precision_score",
"samples_average_precision_score",
"label_ranking_average_precision_score",
"roc_auc_score", "micro_roc_auc", "weighted_roc_auc",
"macro_roc_auc", "samples_roc_auc",
"coverage_error",
"brier_score_loss",
"label_ranking_loss",
]
# Metrics with an "average" argument
METRICS_WITH_AVERAGING = [
"precision_score", "recall_score", "f1_score", "f2_score", "f0.5_score"
]
# Treshold-based metrics with an "average" argument
THRESHOLDED_METRICS_WITH_AVERAGING = [
"roc_auc_score", "average_precision_score",
]
# Metrics with a "pos_label" argument
METRICS_WITH_POS_LABEL = [
"roc_curve",
"brier_score_loss",
"precision_score", "recall_score", "f1_score", "f2_score", "f0.5_score",
# pos_label support deprecated; to be removed in 0.18:
"weighted_f0.5_score", "weighted_f1_score", "weighted_f2_score",
"weighted_precision_score", "weighted_recall_score",
"micro_f0.5_score", "micro_f1_score", "micro_f2_score",
"micro_precision_score", "micro_recall_score",
"macro_f0.5_score", "macro_f1_score", "macro_f2_score",
"macro_precision_score", "macro_recall_score",
]
# Metrics with a "labels" argument
# TODO: Handle multi_class metrics that has a labels argument as well as a
# decision function argument. e.g hinge_loss
METRICS_WITH_LABELS = [
"confusion_matrix",
"precision_score", "recall_score", "f1_score", "f2_score", "f0.5_score",
"weighted_f0.5_score", "weighted_f1_score", "weighted_f2_score",
"weighted_precision_score", "weighted_recall_score",
"micro_f0.5_score", "micro_f1_score", "micro_f2_score",
"micro_precision_score", "micro_recall_score",
"macro_f0.5_score", "macro_f1_score", "macro_f2_score",
"macro_precision_score", "macro_recall_score",
"cohen_kappa_score",
]
# Metrics with a "normalize" option
METRICS_WITH_NORMALIZE_OPTION = [
"accuracy_score",
"jaccard_similarity_score",
"zero_one_loss",
]
# Threshold-based metrics with "multilabel-indicator" format support
THRESHOLDED_MULTILABEL_METRICS = [
"log_loss",
"unnormalized_log_loss",
"roc_auc_score", "weighted_roc_auc", "samples_roc_auc",
"micro_roc_auc", "macro_roc_auc",
"average_precision_score", "weighted_average_precision_score",
"samples_average_precision_score", "micro_average_precision_score",
"macro_average_precision_score",
"coverage_error", "label_ranking_loss",
]
# Classification metrics with "multilabel-indicator" format
MULTILABELS_METRICS = [
"accuracy_score", "unnormalized_accuracy_score",
"hamming_loss",
"jaccard_similarity_score", "unnormalized_jaccard_similarity_score",
"zero_one_loss", "unnormalized_zero_one_loss",
"precision_score", "recall_score", "f1_score", "f2_score", "f0.5_score",
"weighted_f0.5_score", "weighted_f1_score", "weighted_f2_score",
"weighted_precision_score", "weighted_recall_score",
"micro_f0.5_score", "micro_f1_score", "micro_f2_score",
"micro_precision_score", "micro_recall_score",
"macro_f0.5_score", "macro_f1_score", "macro_f2_score",
"macro_precision_score", "macro_recall_score",
"samples_f0.5_score", "samples_f1_score", "samples_f2_score",
"samples_precision_score", "samples_recall_score",
]
# Regression metrics with "multioutput-continuous" format support
MULTIOUTPUT_METRICS = [
"mean_absolute_error", "mean_squared_error", "r2_score",
"explained_variance_score"
]
# Symmetric with respect to their input arguments y_true and y_pred
# metric(y_true, y_pred) == metric(y_pred, y_true).
SYMMETRIC_METRICS = [
"accuracy_score", "unnormalized_accuracy_score",
"hamming_loss",
"jaccard_similarity_score", "unnormalized_jaccard_similarity_score",
"zero_one_loss", "unnormalized_zero_one_loss",
"f1_score", "weighted_f1_score", "micro_f1_score", "macro_f1_score",
"matthews_corrcoef_score", "mean_absolute_error", "mean_squared_error",
"median_absolute_error",
"cohen_kappa_score",
]
# Asymmetric with respect to their input arguments y_true and y_pred
# metric(y_true, y_pred) != metric(y_pred, y_true).
NOT_SYMMETRIC_METRICS = [
"explained_variance_score",
"r2_score",
"confusion_matrix",
"precision_score", "recall_score", "f2_score", "f0.5_score",
"weighted_f0.5_score", "weighted_f2_score", "weighted_precision_score",
"weighted_recall_score",
"micro_f0.5_score", "micro_f2_score", "micro_precision_score",
"micro_recall_score",
"macro_f0.5_score", "macro_f2_score", "macro_precision_score",
"macro_recall_score", "log_loss", "hinge_loss"
]
# No Sample weight support
METRICS_WITHOUT_SAMPLE_WEIGHT = [
"cohen_kappa_score",
"confusion_matrix",
"hamming_loss",
"matthews_corrcoef_score",
"median_absolute_error",
]
@ignore_warnings
def test_symmetry():
# Test the symmetry of score and loss functions
random_state = check_random_state(0)
y_true = random_state.randint(0, 2, size=(20, ))
y_pred = random_state.randint(0, 2, size=(20, ))
# We shouldn't forget any metrics
assert_equal(set(SYMMETRIC_METRICS).union(NOT_SYMMETRIC_METRICS,
THRESHOLDED_METRICS,
METRIC_UNDEFINED_MULTICLASS),
set(ALL_METRICS))
assert_equal(
set(SYMMETRIC_METRICS).intersection(set(NOT_SYMMETRIC_METRICS)),
set([]))
# Symmetric metric
for name in SYMMETRIC_METRICS:
metric = ALL_METRICS[name]
assert_almost_equal(metric(y_true, y_pred),
metric(y_pred, y_true),
err_msg="%s is not symmetric" % name)
# Not symmetric metrics
for name in NOT_SYMMETRIC_METRICS:
metric = ALL_METRICS[name]
assert_true(np.any(metric(y_true, y_pred) != metric(y_pred, y_true)),
msg="%s seems to be symmetric" % name)
@ignore_warnings
def test_sample_order_invariance():
random_state = check_random_state(0)
y_true = random_state.randint(0, 2, size=(20, ))
y_pred = random_state.randint(0, 2, size=(20, ))
y_true_shuffle, y_pred_shuffle = shuffle(y_true, y_pred, random_state=0)
for name, metric in ALL_METRICS.items():
if name in METRIC_UNDEFINED_MULTICLASS:
continue
assert_almost_equal(metric(y_true, y_pred),
metric(y_true_shuffle, y_pred_shuffle),
err_msg="%s is not sample order invariant"
% name)
@ignore_warnings
def test_sample_order_invariance_multilabel_and_multioutput():
random_state = check_random_state(0)
# Generate some data
y_true = random_state.randint(0, 2, size=(20, 25))
y_pred = random_state.randint(0, 2, size=(20, 25))
y_score = random_state.normal(size=y_true.shape)
y_true_shuffle, y_pred_shuffle, y_score_shuffle = shuffle(y_true,
y_pred,
y_score,
random_state=0)
for name in MULTILABELS_METRICS:
metric = ALL_METRICS[name]
assert_almost_equal(metric(y_true, y_pred),
metric(y_true_shuffle, y_pred_shuffle),
err_msg="%s is not sample order invariant"
% name)
for name in THRESHOLDED_MULTILABEL_METRICS:
metric = ALL_METRICS[name]
assert_almost_equal(metric(y_true, y_score),
metric(y_true_shuffle, y_score_shuffle),
err_msg="%s is not sample order invariant"
% name)
for name in MULTIOUTPUT_METRICS:
metric = ALL_METRICS[name]
assert_almost_equal(metric(y_true, y_score),
metric(y_true_shuffle, y_score_shuffle),
err_msg="%s is not sample order invariant"
% name)
assert_almost_equal(metric(y_true, y_pred),
metric(y_true_shuffle, y_pred_shuffle),
err_msg="%s is not sample order invariant"
% name)
@ignore_warnings
def test_format_invariance_with_1d_vectors():
random_state = check_random_state(0)
y1 = random_state.randint(0, 2, size=(20, ))
y2 = random_state.randint(0, 2, size=(20, ))
y1_list = list(y1)
y2_list = list(y2)
y1_1d, y2_1d = np.array(y1), np.array(y2)
assert_equal(y1_1d.ndim, 1)
assert_equal(y2_1d.ndim, 1)
y1_column = np.reshape(y1_1d, (-1, 1))
y2_column = np.reshape(y2_1d, (-1, 1))
y1_row = np.reshape(y1_1d, (1, -1))
y2_row = np.reshape(y2_1d, (1, -1))
for name, metric in ALL_METRICS.items():
if name in METRIC_UNDEFINED_MULTICLASS:
continue
measure = metric(y1, y2)
assert_almost_equal(metric(y1_list, y2_list), measure,
err_msg="%s is not representation invariant "
"with list" % name)
assert_almost_equal(metric(y1_1d, y2_1d), measure,
err_msg="%s is not representation invariant "
"with np-array-1d" % name)
assert_almost_equal(metric(y1_column, y2_column), measure,
err_msg="%s is not representation invariant "
"with np-array-column" % name)
# Mix format support
assert_almost_equal(metric(y1_1d, y2_list), measure,
err_msg="%s is not representation invariant "
"with mix np-array-1d and list" % name)
assert_almost_equal(metric(y1_list, y2_1d), measure,
err_msg="%s is not representation invariant "
"with mix np-array-1d and list" % name)
assert_almost_equal(metric(y1_1d, y2_column), measure,
err_msg="%s is not representation invariant "
"with mix np-array-1d and np-array-column"
% name)
assert_almost_equal(metric(y1_column, y2_1d), measure,
err_msg="%s is not representation invariant "
"with mix np-array-1d and np-array-column"
% name)
assert_almost_equal(metric(y1_list, y2_column), measure,
err_msg="%s is not representation invariant "
"with mix list and np-array-column"
% name)
assert_almost_equal(metric(y1_column, y2_list), measure,
err_msg="%s is not representation invariant "
"with mix list and np-array-column"
% name)
# These mix representations aren't allowed
assert_raises(ValueError, metric, y1_1d, y2_row)
assert_raises(ValueError, metric, y1_row, y2_1d)
assert_raises(ValueError, metric, y1_list, y2_row)
assert_raises(ValueError, metric, y1_row, y2_list)
assert_raises(ValueError, metric, y1_column, y2_row)
assert_raises(ValueError, metric, y1_row, y2_column)
# NB: We do not test for y1_row, y2_row as these may be
# interpreted as multilabel or multioutput data.
if (name not in (MULTIOUTPUT_METRICS + THRESHOLDED_MULTILABEL_METRICS +
MULTILABELS_METRICS)):
assert_raises(ValueError, metric, y1_row, y2_row)
@ignore_warnings
def test_invariance_string_vs_numbers_labels():
# Ensure that classification metrics with string labels
random_state = check_random_state(0)
y1 = random_state.randint(0, 2, size=(20, ))
y2 = random_state.randint(0, 2, size=(20, ))
y1_str = np.array(["eggs", "spam"])[y1]
y2_str = np.array(["eggs", "spam"])[y2]
pos_label_str = "spam"
labels_str = ["eggs", "spam"]
for name, metric in CLASSIFICATION_METRICS.items():
if name in METRIC_UNDEFINED_MULTICLASS:
continue
measure_with_number = metric(y1, y2)
# Ugly, but handle case with a pos_label and label
metric_str = metric
if name in METRICS_WITH_POS_LABEL:
metric_str = partial(metric_str, pos_label=pos_label_str)
measure_with_str = metric_str(y1_str, y2_str)
assert_array_equal(measure_with_number, measure_with_str,
err_msg="{0} failed string vs number invariance "
"test".format(name))
measure_with_strobj = metric_str(y1_str.astype('O'),
y2_str.astype('O'))
assert_array_equal(measure_with_number, measure_with_strobj,
err_msg="{0} failed string object vs number "
"invariance test".format(name))
if name in METRICS_WITH_LABELS:
metric_str = partial(metric_str, labels=labels_str)
measure_with_str = metric_str(y1_str, y2_str)
assert_array_equal(measure_with_number, measure_with_str,
err_msg="{0} failed string vs number "
"invariance test".format(name))
measure_with_strobj = metric_str(y1_str.astype('O'),
y2_str.astype('O'))
assert_array_equal(measure_with_number, measure_with_strobj,
err_msg="{0} failed string vs number "
"invariance test".format(name))
for name, metric in THRESHOLDED_METRICS.items():
if name in ("log_loss", "hinge_loss", "unnormalized_log_loss",
"brier_score_loss"):
# Ugly, but handle case with a pos_label and label
metric_str = metric
if name in METRICS_WITH_POS_LABEL:
metric_str = partial(metric_str, pos_label=pos_label_str)
measure_with_number = metric(y1, y2)
measure_with_str = metric_str(y1_str, y2)
assert_array_equal(measure_with_number, measure_with_str,
err_msg="{0} failed string vs number "
"invariance test".format(name))
measure_with_strobj = metric(y1_str.astype('O'), y2)
assert_array_equal(measure_with_number, measure_with_strobj,
err_msg="{0} failed string object vs number "
"invariance test".format(name))
else:
# TODO those metrics doesn't support string label yet
assert_raises(ValueError, metric, y1_str, y2)
assert_raises(ValueError, metric, y1_str.astype('O'), y2)
@ignore_warnings
def check_single_sample(name):
# Non-regression test: scores should work with a single sample.
# This is important for leave-one-out cross validation.
# Score functions tested are those that formerly called np.squeeze,
# which turns an array of size 1 into a 0-d array (!).
metric = ALL_METRICS[name]
# assert that no exception is thrown
for i, j in product([0, 1], repeat=2):
metric([i], [j])
@ignore_warnings
def check_single_sample_multioutput(name):
metric = ALL_METRICS[name]
for i, j, k, l in product([0, 1], repeat=4):
metric(np.array([[i, j]]), np.array([[k, l]]))
def test_single_sample():
for name in ALL_METRICS:
if name in METRIC_UNDEFINED_MULTICLASS or name in THRESHOLDED_METRICS:
# Those metrics are not always defined with one sample
# or in multiclass classification
continue
yield check_single_sample, name
for name in MULTIOUTPUT_METRICS + MULTILABELS_METRICS:
yield check_single_sample_multioutput, name
def test_multioutput_number_of_output_differ():
y_true = np.array([[1, 0, 0, 1], [0, 1, 1, 1], [1, 1, 0, 1]])
y_pred = np.array([[0, 0], [1, 0], [0, 0]])
for name in MULTIOUTPUT_METRICS:
metric = ALL_METRICS[name]
assert_raises(ValueError, metric, y_true, y_pred)
def test_multioutput_regression_invariance_to_dimension_shuffling():
# test invariance to dimension shuffling
random_state = check_random_state(0)
y_true = random_state.uniform(0, 2, size=(20, 5))
y_pred = random_state.uniform(0, 2, size=(20, 5))
for name in MULTIOUTPUT_METRICS:
metric = ALL_METRICS[name]
error = metric(y_true, y_pred)
for _ in range(3):
perm = random_state.permutation(y_true.shape[1])
assert_almost_equal(metric(y_true[:, perm], y_pred[:, perm]),
error,
err_msg="%s is not dimension shuffling "
"invariant" % name)
@ignore_warnings
def test_multilabel_representation_invariance():
# Generate some data
n_classes = 4
n_samples = 50
_, y1 = make_multilabel_classification(n_features=1, n_classes=n_classes,
random_state=0, n_samples=n_samples,
allow_unlabeled=True)
_, y2 = make_multilabel_classification(n_features=1, n_classes=n_classes,
random_state=1, n_samples=n_samples,
allow_unlabeled=True)
# To make sure at least one empty label is present
y1 += [0]*n_classes
y2 += [0]*n_classes
y1_sparse_indicator = sp.coo_matrix(y1)
y2_sparse_indicator = sp.coo_matrix(y2)
for name in MULTILABELS_METRICS:
metric = ALL_METRICS[name]
# XXX cruel hack to work with partial functions
if isinstance(metric, partial):
metric.__module__ = 'tmp'
metric.__name__ = name
measure = metric(y1, y2)
# Check representation invariance
assert_almost_equal(metric(y1_sparse_indicator,
y2_sparse_indicator),
measure,
err_msg="%s failed representation invariance "
"between dense and sparse indicator "
"formats." % name)
def test_raise_value_error_multilabel_sequences():
# make sure the multilabel-sequence format raises ValueError
multilabel_sequences = [
[[0, 1]],
[[1], [2], [0, 1]],
[(), (2), (0, 1)],
[[]],
[()],
np.array([[], [1, 2]], dtype='object')]
for name in MULTILABELS_METRICS:
metric = ALL_METRICS[name]
for seq in multilabel_sequences:
assert_raises(ValueError, metric, seq, seq)
def test_normalize_option_binary_classification(n_samples=20):
# Test in the binary case
random_state = check_random_state(0)
y_true = random_state.randint(0, 2, size=(n_samples, ))
y_pred = random_state.randint(0, 2, size=(n_samples, ))
for name in METRICS_WITH_NORMALIZE_OPTION:
metrics = ALL_METRICS[name]
measure = metrics(y_true, y_pred, normalize=True)
assert_greater(measure, 0,
msg="We failed to test correctly the normalize option")
assert_almost_equal(metrics(y_true, y_pred, normalize=False)
/ n_samples, measure)
def test_normalize_option_multiclasss_classification():
# Test in the multiclass case
random_state = check_random_state(0)
y_true = random_state.randint(0, 4, size=(20, ))
y_pred = random_state.randint(0, 4, size=(20, ))
n_samples = y_true.shape[0]
for name in METRICS_WITH_NORMALIZE_OPTION:
metrics = ALL_METRICS[name]
measure = metrics(y_true, y_pred, normalize=True)
assert_greater(measure, 0,
msg="We failed to test correctly the normalize option")
assert_almost_equal(metrics(y_true, y_pred, normalize=False)
/ n_samples, measure)
def test_normalize_option_multilabel_classification():
# Test in the multilabel case
n_classes = 4
n_samples = 100
# for both random_state 0 and 1, y_true and y_pred has at least one
# unlabelled entry
_, y_true = make_multilabel_classification(n_features=1,
n_classes=n_classes,
random_state=0,
allow_unlabeled=True,
n_samples=n_samples)
_, y_pred = make_multilabel_classification(n_features=1,
n_classes=n_classes,
random_state=1,
allow_unlabeled=True,
n_samples=n_samples)
# To make sure at least one empty label is present
y_true += [0]*n_classes
y_pred += [0]*n_classes
for name in METRICS_WITH_NORMALIZE_OPTION:
metrics = ALL_METRICS[name]
measure = metrics(y_true, y_pred, normalize=True)
assert_greater(measure, 0,
msg="We failed to test correctly the normalize option")
assert_almost_equal(metrics(y_true, y_pred, normalize=False)
/ n_samples, measure,
err_msg="Failed with %s" % name)
@ignore_warnings
def _check_averaging(metric, y_true, y_pred, y_true_binarize, y_pred_binarize,
is_multilabel):
n_samples, n_classes = y_true_binarize.shape
# No averaging
label_measure = metric(y_true, y_pred, average=None)
assert_array_almost_equal(label_measure,
[metric(y_true_binarize[:, i],
y_pred_binarize[:, i])
for i in range(n_classes)])
# Micro measure
micro_measure = metric(y_true, y_pred, average="micro")
assert_almost_equal(micro_measure, metric(y_true_binarize.ravel(),
y_pred_binarize.ravel()))
# Macro measure
macro_measure = metric(y_true, y_pred, average="macro")
assert_almost_equal(macro_measure, np.mean(label_measure))
# Weighted measure
weights = np.sum(y_true_binarize, axis=0, dtype=int)
if np.sum(weights) != 0:
weighted_measure = metric(y_true, y_pred, average="weighted")
assert_almost_equal(weighted_measure, np.average(label_measure,
weights=weights))
else:
weighted_measure = metric(y_true, y_pred, average="weighted")
assert_almost_equal(weighted_measure, 0)
# Sample measure
if is_multilabel:
sample_measure = metric(y_true, y_pred, average="samples")
assert_almost_equal(sample_measure,
np.mean([metric(y_true_binarize[i],
y_pred_binarize[i])
for i in range(n_samples)]))
assert_raises(ValueError, metric, y_true, y_pred, average="unknown")
assert_raises(ValueError, metric, y_true, y_pred, average="garbage")
def check_averaging(name, y_true, y_true_binarize, y_pred, y_pred_binarize,
y_score):
is_multilabel = type_of_target(y_true).startswith("multilabel")
metric = ALL_METRICS[name]
if name in METRICS_WITH_AVERAGING:
_check_averaging(metric, y_true, y_pred, y_true_binarize,
y_pred_binarize, is_multilabel)
elif name in THRESHOLDED_METRICS_WITH_AVERAGING:
_check_averaging(metric, y_true, y_score, y_true_binarize,
y_score, is_multilabel)
else:
raise ValueError("Metric is not recorded as having an average option")
def test_averaging_multiclass(n_samples=50, n_classes=3):
random_state = check_random_state(0)
y_true = random_state.randint(0, n_classes, size=(n_samples, ))
y_pred = random_state.randint(0, n_classes, size=(n_samples, ))
y_score = random_state.uniform(size=(n_samples, n_classes))
lb = LabelBinarizer().fit(y_true)
y_true_binarize = lb.transform(y_true)
y_pred_binarize = lb.transform(y_pred)
for name in METRICS_WITH_AVERAGING:
yield (check_averaging, name, y_true, y_true_binarize, y_pred,
y_pred_binarize, y_score)
def test_averaging_multilabel(n_classes=5, n_samples=40):
_, y = make_multilabel_classification(n_features=1, n_classes=n_classes,
random_state=5, n_samples=n_samples,
allow_unlabeled=False)
y_true = y[:20]
y_pred = y[20:]
y_score = check_random_state(0).normal(size=(20, n_classes))
y_true_binarize = y_true
y_pred_binarize = y_pred
for name in METRICS_WITH_AVERAGING + THRESHOLDED_METRICS_WITH_AVERAGING:
yield (check_averaging, name, y_true, y_true_binarize, y_pred,
y_pred_binarize, y_score)
def test_averaging_multilabel_all_zeroes():
y_true = np.zeros((20, 3))
y_pred = np.zeros((20, 3))
y_score = np.zeros((20, 3))
y_true_binarize = y_true
y_pred_binarize = y_pred
for name in METRICS_WITH_AVERAGING:
yield (check_averaging, name, y_true, y_true_binarize, y_pred,
y_pred_binarize, y_score)
# Test _average_binary_score for weight.sum() == 0
binary_metric = (lambda y_true, y_score, average="macro":
_average_binary_score(
precision_score, y_true, y_score, average))
_check_averaging(binary_metric, y_true, y_pred, y_true_binarize,
y_pred_binarize, is_multilabel=True)
def test_averaging_multilabel_all_ones():
y_true = np.ones((20, 3))
y_pred = np.ones((20, 3))
y_score = np.ones((20, 3))
y_true_binarize = y_true
y_pred_binarize = y_pred
for name in METRICS_WITH_AVERAGING:
yield (check_averaging, name, y_true, y_true_binarize, y_pred,
y_pred_binarize, y_score)
@ignore_warnings
def check_sample_weight_invariance(name, metric, y1, y2):
rng = np.random.RandomState(0)
sample_weight = rng.randint(1, 10, size=len(y1))
# check that unit weights gives the same score as no weight
unweighted_score = metric(y1, y2, sample_weight=None)
assert_almost_equal(
unweighted_score,
metric(y1, y2, sample_weight=np.ones(shape=len(y1))),
err_msg="For %s sample_weight=None is not equivalent to "
"sample_weight=ones" % name)
# check that the weighted and unweighted scores are unequal
weighted_score = metric(y1, y2, sample_weight=sample_weight)
assert_not_equal(
unweighted_score, weighted_score,
msg="Unweighted and weighted scores are unexpectedly "
"equal (%f) for %s" % (weighted_score, name))
# check that sample_weight can be a list
weighted_score_list = metric(y1, y2,
sample_weight=sample_weight.tolist())
assert_almost_equal(
weighted_score, weighted_score_list,
err_msg="Weighted scores for array and list sample_weight input are "
"not equal (%f != %f) for %s" % (
weighted_score, weighted_score_list, name))
# check that integer weights is the same as repeated samples
repeat_weighted_score = metric(
np.repeat(y1, sample_weight, axis=0),
np.repeat(y2, sample_weight, axis=0), sample_weight=None)
assert_almost_equal(
weighted_score, repeat_weighted_score,
err_msg="Weighting %s is not equal to repeating samples" % name)
# check that ignoring a fraction of the samples is equivalent to setting
# the corresponding weights to zero
sample_weight_subset = sample_weight[1::2]
sample_weight_zeroed = np.copy(sample_weight)
sample_weight_zeroed[::2] = 0
y1_subset = y1[1::2]
y2_subset = y2[1::2]
weighted_score_subset = metric(y1_subset, y2_subset,
sample_weight=sample_weight_subset)
weighted_score_zeroed = metric(y1, y2,
sample_weight=sample_weight_zeroed)
assert_almost_equal(
weighted_score_subset, weighted_score_zeroed,
err_msg=("Zeroing weights does not give the same result as "
"removing the corresponding samples (%f != %f) for %s" %
(weighted_score_zeroed, weighted_score_subset, name)))
if not name.startswith('unnormalized'):
# check that the score is invariant under scaling of the weights by a
# common factor
for scaling in [2, 0.3]:
assert_almost_equal(
weighted_score,
metric(y1, y2, sample_weight=sample_weight * scaling),
err_msg="%s sample_weight is not invariant "
"under scaling" % name)
# Check that if sample_weight.shape[0] != y_true.shape[0], it raised an
# error
assert_raises(Exception, metric, y1, y2,
sample_weight=np.hstack([sample_weight, sample_weight]))
def test_sample_weight_invariance(n_samples=50):
random_state = check_random_state(0)
# binary output
random_state = check_random_state(0)
y_true = random_state.randint(0, 2, size=(n_samples, ))
y_pred = random_state.randint(0, 2, size=(n_samples, ))
y_score = random_state.random_sample(size=(n_samples,))
for name in ALL_METRICS:
if (name in METRICS_WITHOUT_SAMPLE_WEIGHT or
name in METRIC_UNDEFINED_MULTICLASS):
continue
metric = ALL_METRICS[name]
if name in THRESHOLDED_METRICS:
yield check_sample_weight_invariance, name, metric, y_true, y_score
else:
yield check_sample_weight_invariance, name, metric, y_true, y_pred
# multiclass
random_state = check_random_state(0)
y_true = random_state.randint(0, 5, size=(n_samples, ))
y_pred = random_state.randint(0, 5, size=(n_samples, ))
y_score = random_state.random_sample(size=(n_samples, 5))
for name in ALL_METRICS:
if (name in METRICS_WITHOUT_SAMPLE_WEIGHT or
name in METRIC_UNDEFINED_MULTICLASS):
continue
metric = ALL_METRICS[name]
if name in THRESHOLDED_METRICS:
yield check_sample_weight_invariance, name, metric, y_true, y_score
else:
yield check_sample_weight_invariance, name, metric, y_true, y_pred
# multilabel indicator
_, ya = make_multilabel_classification(n_features=1, n_classes=20,
random_state=0, n_samples=100,
allow_unlabeled=False)
_, yb = make_multilabel_classification(n_features=1, n_classes=20,
random_state=1, n_samples=100,
allow_unlabeled=False)
y_true = np.vstack([ya, yb])
y_pred = np.vstack([ya, ya])
y_score = random_state.randint(1, 4, size=y_true.shape)
for name in (MULTILABELS_METRICS + THRESHOLDED_MULTILABEL_METRICS +
MULTIOUTPUT_METRICS):
if name in METRICS_WITHOUT_SAMPLE_WEIGHT:
continue
metric = ALL_METRICS[name]
if name in THRESHOLDED_METRICS:
yield (check_sample_weight_invariance, name, metric, y_true,
y_score)
else:
yield (check_sample_weight_invariance, name, metric, y_true,
y_pred)
def test_no_averaging_labels():
# test labels argument when not using averaging
# in multi-class and multi-label cases
y_true_multilabel = np.array([[1, 1, 0, 0], [1, 1, 0, 0]])
y_pred_multilabel = np.array([[0, 0, 1, 1], [0, 1, 1, 0]])
y_true_multiclass = np.array([0, 1, 2])
y_pred_multiclass = np.array([0, 2, 3])
labels = np.array([3, 0, 1, 2])
_, inverse_labels = np.unique(labels, return_inverse=True)
for name in METRICS_WITH_AVERAGING:
for y_true, y_pred in [[y_true_multiclass, y_pred_multiclass],
[y_true_multilabel, y_pred_multilabel]]:
if name not in MULTILABELS_METRICS and y_pred.shape[1] > 0:
continue
metric = ALL_METRICS[name]
score_labels = metric(y_true, y_pred, labels=labels, average=None)
score = metric(y_true, y_pred, average=None)
assert_array_equal(score_labels, score[inverse_labels])
|
|
import re
from typing import Dict, List, Optional
from django.db.models import BooleanField, Case, IntegerField, Q, Value, When
from django.shortcuts import reverse
from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema
from rest_framework import exceptions, permissions, throttling, viewsets
from rest_framework.decorators import action
from rest_framework.request import Request
from rest_framework.response import Response
from accounts.quotas import AccountQuotas
from jobs.api.helpers import redirect_to_job
from jobs.models import Job
from manager.api.helpers import (
HtmxCreateMixin,
HtmxDestroyMixin,
HtmxListMixin,
HtmxRetrieveMixin,
HtmxUpdateMixin,
filter_from_ident,
)
from projects.api.serializers import (
ProjectAgentCreateSerializer,
ProjectAgentSerializer,
ProjectAgentUpdateSerializer,
ProjectAuthorUpdateSerializer,
ProjectCreateSerializer,
ProjectDestroySerializer,
ProjectListSerializer,
ProjectRetrieveSerializer,
ProjectUpdateSerializer,
)
from projects.models.projects import Project, ProjectAgent, ProjectRole
from projects.models.sources import Source
from users.models import AnonUser, User, get_projects
def get_project(
identifiers: Dict[str, str], user: User, roles: Optional[List[ProjectRole]] = None,
):
"""
Get a project for the user, optionally requiring one or more roles.
Like GitHub, raises a `NotFound` exception if the user does not have permission
to avoid leaking the existence of a private project.
"""
filter = filter_from_ident(identifiers["project"])
account = identifiers.get("account")
if account:
filter.update(**filter_from_ident(account, prefix="account"))
elif "id" not in filter:
filter.update({"account__name": "temp"})
try:
project = get_projects(user).get(**filter)
except Project.DoesNotExist:
raise exceptions.NotFound
else:
if roles and project.role not in [role.name for role in roles]:
raise exceptions.NotFound
return project
class ProjectsCreateAnonThrottle(throttling.AnonRateThrottle):
"""
Throttle for temporary project creation by anonymous users.
"""
rate = "10/day"
class ProjectsViewSet(
HtmxListMixin,
HtmxCreateMixin,
HtmxRetrieveMixin,
HtmxUpdateMixin,
HtmxDestroyMixin,
viewsets.GenericViewSet,
):
"""
A view set for projects.
Provides basic CRUD views for projects.
"""
lookup_url_kwarg = "project"
object_name = "project"
queryset_name = "projects"
def get_permissions(self):
"""
Get the permissions that the current action requires.
Override defaults so that, `create`, `list` and `retrive` do not require
authentication (although other restrictions do apply for anon users).
"""
if self.action in ["create", "list", "retrieve"]:
return [permissions.AllowAny()]
return [permissions.IsAuthenticated()]
def get_throttles(self):
"""
Get the throttles to apply to the current request.
"""
if self.action == "create" and self.request.user.is_anonymous:
return [ProjectsCreateAnonThrottle()]
return super().get_throttles()
def get_queryset(self):
"""
Get the set of projects that the user has access to and which meet filter criteria.
Does not return temporary projects.
TODO: Currently this ignores an authenticated user's access to
projects inherited from membership of a team.
"""
queryset = get_projects(self.request.user).select_related("account")
account = self.request.GET.get("account")
if account:
queryset = queryset.filter(account_id=account)
role = self.request.GET.get("role")
if self.request.user.is_authenticated and role:
roles = re.split(r"\s*,\s*", role)
q = Q()
for part in roles:
match = re.match(r"([a-zA-Z]+)(\+)?", part)
if match:
role_name, and_above = match.groups()
if role_name.lower() == "member":
q |= Q(role__isnull=False)
else:
try:
project_role = ProjectRole.from_string(role_name)
except ValueError as exc:
raise exceptions.ValidationError({"role": str(exc)})
else:
if and_above:
q |= Q(
role__in=[
role.name
for role in ProjectRole.and_above(project_role)
]
)
else:
q |= Q(role=project_role.name)
else:
raise exceptions.ValidationError(
{"role": "Invalid role specification {}".format(part)}
)
queryset = queryset.filter(q)
public = self.request.GET.get("public")
if public:
if public.lower() in ["false", "no", "0"]:
queryset = queryset.filter(public=False)
else:
queryset = queryset.filter(public=True)
source = self.request.GET.get("source")
if source:
try:
query = Source.query_from_address(source, prefix="sources")
except ValueError as exc:
raise exceptions.ValidationError({"source": str(exc)})
else:
queryset = queryset.filter(query)
search = self.request.GET.get("search")
if search:
queryset = queryset.filter(
Q(name__icontains=search)
| Q(title__icontains=search)
| Q(description__icontains=search)
)
# Ordering favoring those that the user has a role
# on, has an image set, has a description set, etc
return (
queryset.filter(temporary=False)
.annotate(
role_rank=Case(
When(role=ProjectRole.OWNER.name, then=Value(6)),
When(role=ProjectRole.MANAGER.name, then=Value(5)),
When(role=ProjectRole.AUTHOR.name, then=Value(4)),
When(role=ProjectRole.EDITOR.name, then=Value(3)),
When(role=ProjectRole.REVIEWER.name, then=Value(2)),
When(role=ProjectRole.READER.name, then=Value(1)),
When(role__isnull=True, then=Value(0)),
output_field=IntegerField(),
)
if self.request.user.is_authenticated
else Value(0, output_field=IntegerField()),
has_image=Case(
When(image_file__isnull=False, then=Value(True)),
default=Value(False),
output_field=BooleanField(),
),
# Use regex filter here to exclude nulls, blanks and very short strings
has_title=Case(
When(title__regex=r"^.{1,}$", then=Value(True)),
default=Value(False),
output_field=BooleanField(),
),
has_description=Case(
When(description__regex=r"^.{1,}$", then=Value(True)),
default=Value(False),
output_field=BooleanField(),
),
)
.order_by(
"-featured",
"-has_image",
"-has_title",
"-has_description",
"-role_rank",
"-created",
)
)
def get_object(self):
"""
Get the project.
Read access control is done in the `get_project` function.
For `partial-update` and `destroy` does an additional
check that the user is a project AUTHOR, MANAGER or OWNER.
Note that there is a specific `partial_update` serializer for AUTHORs
which limits which fields of a project they can modify,
For "temp" account projects, ensure that the project was accessed
with a key (API) or by it's name (UI). This prevent access to a
temporary project by guessing it's integer id. e.g. `/temp/2123`
Because temporary objects do not have any users with roles,
anyone with their name can modify or delete them.
"""
if hasattr(self, "project"):
return self.project
project = get_project(self.kwargs, self.request.user)
if project.account.name == "temp":
if self.request.GET.get(
"key"
) == project.key or "name" in filter_from_ident(self.kwargs["project"]):
return project
else:
raise exceptions.NotFound
if (
self.action == "partial_update"
and project.role
not in [
ProjectRole.AUTHOR.name,
ProjectRole.MANAGER.name,
ProjectRole.OWNER.name,
]
) or (self.action == "destroy" and project.role != ProjectRole.OWNER.name):
raise exceptions.PermissionDenied
self.project = project
return self.project
def get_serializer_class(self):
"""Get the serializer class for the current action."""
if self.action == "partial_update":
if (
not getattr(self, "swagger_fake_view", False)
and self.get_object().role == ProjectRole.AUTHOR.name
):
return ProjectAuthorUpdateSerializer
else:
return ProjectUpdateSerializer
return {
"list": ProjectListSerializer,
"create": ProjectCreateSerializer,
"retrieve": ProjectRetrieveSerializer,
"destroy": ProjectDestroySerializer,
}.get(self.action, ProjectListSerializer)
def get_response_context(self, *args, **kwargs):
"""
Override to add account and account quota usage to the template context.
"""
context = super().get_response_context(*args, **kwargs)
project = kwargs.get("instance")
if project:
account = project.account
context["account"] = account
context[
"account_project_private_usage"
] = AccountQuotas.PROJECTS_PRIVATE.usage(account)
return context
def get_success_url(self, serializer):
"""
Get the URL to use in the Location header when an action is successful.
For `create`, redirects to the "main" page for the project.
"""
if self.action in ["create", "partial_update"]:
project = serializer.instance
return reverse(
"ui-projects-retrieve", args=[project.account.name, project.name]
)
else:
return None
# Most of the following views serve simply to provide docstrings
# from which API documentation is generated.
@swagger_auto_schema(
manual_parameters=[
openapi.Parameter(
"account",
openapi.IN_QUERY,
description="The integer of the id of the account that the project belongs to.",
type=openapi.TYPE_INTEGER,
),
openapi.Parameter(
"role",
openapi.IN_QUERY,
description="The role that the currently authenticated user has on the project "
'e.g. "editor", "owner" (for any role, use "member")',
type=openapi.TYPE_STRING,
),
openapi.Parameter(
"public",
openapi.IN_QUERY,
description="Whether or not the project is public.",
type=openapi.TYPE_BOOLEAN,
),
openapi.Parameter(
"search",
openapi.IN_QUERY,
description="A string to search for in the project `name`, `title` or `description`.",
type=openapi.TYPE_STRING,
),
openapi.Parameter(
"source",
openapi.IN_QUERY,
description="The address of a project source e.g. `github://<org>/<repo>`, `gdoc://<id>`.",
type=openapi.TYPE_STRING,
),
]
)
def list(self, request: Request, *args, **kwargs) -> Response:
"""
List projects.
Returns a list of projects that are accessible to the user, including those that are
public and those that the user is a member of (i.e. has a project role for).
The returned list can be filtered using query parameters, `account`, `role`, `public`,
`search`, `source`. The `role` filter applies to the currently authenticated user, and
as such has no effected for unauthenticated requests. Roles can be specified as a
comma separated list e.g. `role=author,manager,owner` or using to the `+` operator
to indicate the minimum required role e.g. `role=author+` (equivalent to the previous
example).
For example, to list all projects for which the authenticated user is a member and which
uses a particular Google Doc as a source:
GET /projects?role=member&source=gdoc://1BW6MubIyDirCGW9Wq-tSqCma8pioxBI6VpeLyXn5mZA
"""
return super().list(request, *args, **kwargs)
def create(self, request: Request, *args, **kwargs) -> Response:
"""
Create a project.
Receives details of the project.
Returns details of the new project.
"""
return super().create(request, *args, **kwargs)
def retrieve(self, request: Request, *args, **kwargs) -> Response:
"""
Retrieve a project.
Returns details of the project.
"""
return super().retrieve(request, *args, **kwargs)
def partial_update(self, request: Request, *args, **kwargs) -> Response:
"""
Update a project.
Receives details of the project.
Returns updated details of the project.
"""
return super().partial_update(request, *args, **kwargs)
def destroy(self, request: Request, *args, **kwargs) -> Response:
"""
Destroy a project.
Returns an empty response on success.
"""
return super().destroy(request, *args, **kwargs)
@swagger_auto_schema(responses={302: "Redirect to job"})
@action(detail=True, methods=["POST"])
def pull(self, request: Request, *args, **kwargs) -> Response:
"""
Pull the project.
Creates a pull job and redirects to it.
"""
project = self.get_object()
job = project.pull(request.user)
job.dispatch()
return redirect_to_job(job, accepts_html=self.accepts_html())
@swagger_auto_schema(responses={302: "Redirect to job"})
@action(detail=True, methods=["post"])
def session(self, request: Request, *args, **kwargs) -> Response:
"""
Get a session for the project.
If the user has already created, or is connected to,
a `session` job for this project, and that job is still running,
then returns that job. Otherwise, creates a new session.
"""
project = self.get_object()
try:
job = Job.objects.filter(
project=project,
snapshot__isnull=True,
is_active=True,
**(
{"users": request.user}
if request.user.is_authenticated
else {"anon_users__id": AnonUser.get_id(request)}
),
).order_by("-created")[0]
except IndexError:
job = project.session(request)
job.dispatch()
return redirect_to_job(job)
class ProjectsAgentsViewSet(
HtmxListMixin,
HtmxCreateMixin,
HtmxRetrieveMixin,
HtmxUpdateMixin,
HtmxDestroyMixin,
viewsets.GenericViewSet,
):
"""
A view set for projects agents (users or teams).
Provides basic CRUD views for project agents.
Uses `ProjectsViewSet.get_object` so that we can obtain the
role of the user for the project (including inherited role from the account).
"""
lookup_url_kwarg = "agent"
object_name = "agent"
queryset_name = "agents"
def get_project(self) -> Project:
"""Get the project and check that the user has permission to the perform action."""
project = ProjectsViewSet.init(
self.action, self.request, self.args, self.kwargs
).get_object()
if (
self.action in ["create", "partial_update", "destroy"]
and project.role not in [ProjectRole.MANAGER.name, ProjectRole.OWNER.name]
) or project.role is None:
raise exceptions.PermissionDenied
return project
def get_queryset(self):
"""Get project agents."""
project = self.get_project()
return ProjectAgent.objects.filter(project=project)
def get_object(self) -> ProjectAgent:
"""Get a project agent."""
try:
return self.get_queryset().filter(id=self.kwargs["agent"])[0]
except IndexError:
raise exceptions.NotFound
def get_serializer_class(self):
"""Get the serializer class for the current action."""
if self.action == "create":
# Call `get_project` to perform permission check
# Skip when doing API Schema generation
# (permission check should probably go elsewhere)
if not getattr(self, "swagger_fake_view", False):
self.get_project()
return ProjectAgentCreateSerializer
elif self.action == "partial_update":
return ProjectAgentUpdateSerializer
elif self.action == "destroy":
return None
else:
return ProjectAgentSerializer
def get_response_context(self, *args, **kwargs):
"""Override to provide additional context when rendering templates."""
return super().get_response_context(
queryset=self.get_queryset(), project=self.get_project(), *args, **kwargs
)
|
|
import os
import sublime
from sublime_plugin import WindowCommand, TextCommand
from ..git_command import GitCommand
from ...common import util
COMMIT_HELP_TEXT_EXTRA = """##
## You may also reference or close a GitHub issue with this commit. To do so,
## type `#` followed by the `tab` key. You will be shown a list of issues
## related to the current repo. You may also type `owner/repo#` plus the `tab`
## key to reference an issue in a different GitHub repo.
"""
COMMIT_HELP_TEXT_ALT = """
## To make a commit, type your commit message and close the window. To cancel
## the commit, delete the commit message and close the window. To sign off on
## the commit, press {key}-S.
""".format(key=util.super_key) + COMMIT_HELP_TEXT_EXTRA
COMMIT_HELP_TEXT = """
## To make a commit, type your commit message and press {key}-ENTER. To cancel
## the commit, close the window. To sign off on the commit, press {key}-S.
""".format(key=util.super_key) + COMMIT_HELP_TEXT_EXTRA
COMMIT_SIGN_TEXT = """
Signed-off-by: {name} <{email}>
"""
COMMIT_TITLE = "COMMIT: {}"
class GsCommitCommand(WindowCommand, GitCommand):
"""
Display a transient window to capture the user's desired commit message.
If the user is amending the previous commit, pre-populate the commit
message area with the previous commit message.
"""
def run(self, **kwargs):
sublime.set_timeout_async(lambda: self.run_async(**kwargs), 0)
def run_async(self, repo_path=None, include_unstaged=False, amend=False):
repo_path = repo_path or self.repo_path
view = self.window.new_file()
settings = view.settings()
settings.set("git_savvy.get_long_text_view", True)
settings.set("git_savvy.commit_view", True)
settings.set("git_savvy.commit_view.include_unstaged", include_unstaged)
settings.set("git_savvy.commit_view.amend", amend)
settings.set("git_savvy.repo_path", repo_path)
savvy_settings = sublime.load_settings("GitSavvy.sublime-settings")
if savvy_settings.get("use_syntax_for_commit_editmsg"):
syntax_file = util.file.get_syntax_for_file("COMMIT_EDITMSG")
view.set_syntax_file(syntax_file)
else:
view.set_syntax_file("Packages/GitSavvy/syntax/make_commit.sublime-syntax")
view.run_command("gs_handle_vintageous")
commit_on_close = savvy_settings.get("commit_on_close")
settings.set("git_savvy.commit_on_close", commit_on_close)
title = COMMIT_TITLE.format(os.path.basename(repo_path))
view.set_name(title)
if commit_on_close or not savvy_settings.get("prompt_on_abort_commit"):
view.set_scratch(True)
view.run_command("gs_commit_initialize_view")
class GsCommitInitializeViewCommand(TextCommand, GitCommand):
"""
Fill the view with the commit view help message, and optionally
the previous commit message if amending.
"""
def run(self, edit):
merge_msg_path = os.path.join(self.repo_path, ".git", "MERGE_MSG")
savvy_settings = sublime.load_settings("GitSavvy.sublime-settings")
help_text = (COMMIT_HELP_TEXT_ALT
if savvy_settings.get("commit_on_close")
else COMMIT_HELP_TEXT)
self.view.settings().set("git_savvy.commit_view.help_text", help_text)
include_unstaged = self.view.settings().get("git_savvy.commit_view.include_unstaged", False)
option_amend = self.view.settings().get("git_savvy.commit_view.amend")
if option_amend:
last_commit_message = self.git("log", "-1", "--pretty=%B").strip()
initial_text = last_commit_message + help_text
elif os.path.exists(merge_msg_path):
with util.file.safe_open(merge_msg_path, "r") as f:
initial_text = f.read() + help_text
else:
initial_text = help_text
commit_help_extra_file = savvy_settings.get("commit_help_extra_file") or ".commit_help"
commit_help_extra_path = os.path.join(self.repo_path, commit_help_extra_file)
if os.path.exists(commit_help_extra_path):
with util.file.safe_open(commit_help_extra_path, "r", encoding="utf-8") as f:
initial_text += f.read()
git_args = [
"diff",
"--no-color"
]
show_commit_diff = savvy_settings.get("show_commit_diff")
# for backward compatibility, check also if show_commit_diff is True
if show_commit_diff is True or show_commit_diff == "full":
git_args.append("--patch")
show_diffstat = savvy_settings.get("show_diffstat")
if show_commit_diff == "stat" or (show_commit_diff == "full" and show_diffstat):
git_args.append("--stat")
if not include_unstaged:
git_args.append("--cached")
if option_amend:
git_args.append("HEAD^")
elif include_unstaged:
git_args.append("HEAD")
initial_text += self.git(*git_args) if show_commit_diff else ''
self.view.run_command("gs_replace_view_text", {
"text": initial_text,
"nuke_cursors": True
})
class GsCommitViewDoCommitCommand(TextCommand, GitCommand):
"""
Take the text of the current view (minus the help message text) and
make a commit using the text for the commit message.
"""
def run(self, edit, message=None):
sublime.set_timeout_async(lambda: self.run_async(commit_message=message), 0)
def run_async(self, commit_message=None):
if commit_message is None:
view_text = self.view.substr(sublime.Region(0, self.view.size()))
help_text = self.view.settings().get("git_savvy.commit_view.help_text")
commit_message = view_text.split(help_text)[0]
include_unstaged = self.view.settings().get("git_savvy.commit_view.include_unstaged")
show_panel_overrides = \
sublime.load_settings("GitSavvy.sublime-settings").get("show_panel_for")
self.git(
"commit",
"-q" if "commit" not in show_panel_overrides else None,
"-a" if include_unstaged else None,
"--amend" if self.view.settings().get("git_savvy.commit_view.amend") else None,
"-F",
"-",
stdin=commit_message
)
# ensure view is not already closed (i.e.: when "commit_on_close" enabled)
is_commit_view = self.view.settings().get("git_savvy.commit_view")
if is_commit_view and self.view.window():
self.view.window().focus_view(self.view)
self.view.set_scratch(True) # ignore dirty on actual commit
self.view.window().run_command("close_file")
else:
sublime.set_timeout_async(
lambda: util.view.refresh_gitsavvy(sublime.active_window().active_view()))
class GsCommitViewSignCommand(TextCommand, GitCommand):
"""
Sign off on the commit with full name and email.
"""
def run(self, edit):
view_text = self.view.substr(sublime.Region(0, self.view.size()))
help_text = self.view.settings().get("git_savvy.commit_view.help_text")
view_text_list = view_text.split(help_text)
config_name = self.git("config", "user.name").strip()
config_email = self.git("config", "user.email").strip()
sign_text = COMMIT_SIGN_TEXT.format(name=config_name, email=config_email)
view_text_list[0] += sign_text
self.view.run_command("gs_replace_view_text", {
"text": help_text.join(view_text_list),
"nuke_cursors": True
})
class GsCommitViewCloseCommand(TextCommand, GitCommand):
"""
Perform commit action on commit view close if `commit_on_close` setting
is enabled.
"""
def run(self, edit):
savvy_settings = sublime.load_settings("GitSavvy.sublime-settings")
if savvy_settings.get("commit_on_close"):
view_text = self.view.substr(sublime.Region(0, self.view.size()))
help_text = self.view.settings().get("git_savvy.commit_view.help_text")
message_txt = (view_text.split(help_text)[0]
if help_text in view_text
else "")
message_txt = message_txt.strip()
if message_txt:
self.view.run_command("gs_commit_view_do_commit", {"message": message_txt})
|
|
import sys
sys.path = ['.'] + sys.path
from test.test_support import verbose, run_unittest
import re
from re import Scanner
import sys, os, traceback
from weakref import proxy
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefuly modeled to
# cover most of the code.
import unittest
class ReTests(unittest.TestCase):
def test_weakref(self):
s = 'QabbbcR'
x = re.compile('ab+c')
y = proxy(x)
self.assertEqual(x.findall('QabbbcR'), y.findall('QabbbcR'))
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_bug_1140(self):
# re.sub(x, y, u'') should return u'', not '', and
# re.sub(x, y, '') should return '', not u''.
# Also:
# re.sub(x, y, unicode(x)) should return unicode(y), and
# re.sub(x, y, str(x)) should return
# str(y) if isinstance(y, str) else unicode(y).
for x in 'x', u'x':
for y in 'y', u'y':
z = re.sub(x, y, u'')
self.assertEqual(z, u'')
self.assertEqual(type(z), unicode)
#
z = re.sub(x, y, '')
self.assertEqual(z, '')
self.assertEqual(type(z), str)
#
z = re.sub(x, y, unicode(x))
self.assertEqual(z, y)
self.assertEqual(type(z), unicode)
#
z = re.sub(x, y, str(x))
self.assertEqual(z, y)
self.assertEqual(type(z), type(y))
def test_bug_1661(self):
# Verify that flags do not get silently ignored with compiled patterns
pattern = re.compile('.')
self.assertRaises(ValueError, re.match, pattern, 'A', re.I)
self.assertRaises(ValueError, re.search, pattern, 'A', re.I)
self.assertRaises(ValueError, re.findall, pattern, 'A', re.I)
self.assertRaises(ValueError, re.compile, pattern, re.I)
def test_bug_3629(self):
# A regex that triggered a bug in the sre-code validator
re.compile("(?P<quote>)(?(quote))")
def test_sub_template_numeric_escape(self):
# bug 776311 and friends
self.assertEqual(re.sub('x', r'\0', 'x'), '\0')
self.assertEqual(re.sub('x', r'\000', 'x'), '\000')
self.assertEqual(re.sub('x', r'\001', 'x'), '\001')
self.assertEqual(re.sub('x', r'\008', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\009', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\111', 'x'), '\111')
self.assertEqual(re.sub('x', r'\117', 'x'), '\117')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\1111')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\111' + '1')
self.assertEqual(re.sub('x', r'\00', 'x'), '\x00')
self.assertEqual(re.sub('x', r'\07', 'x'), '\x07')
self.assertEqual(re.sub('x', r'\08', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\09', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\0a', 'x'), '\0' + 'a')
self.assertEqual(re.sub('x', r'\400', 'x'), '\0')
self.assertEqual(re.sub('x', r'\777', 'x'), '\377')
self.assertRaises(re.error, re.sub, 'x', r'\1', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\8', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\9', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\11', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\18', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\1a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\90', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\99', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\118', 'x') # r'\11' + '8'
self.assertRaises(re.error, re.sub, 'x', r'\11a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\181', 'x') # r'\18' + '1'
self.assertRaises(re.error, re.sub, 'x', r'\800', 'x') # r'\80' + '0'
# in python2.3 (etc), these loop endlessly in sre_parser.py
self.assertEqual(re.sub('(((((((((((x)))))))))))', r'\11', 'x'), 'x')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'),
'xz8')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'),
'xza')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<-1>', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c'])
self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:*)", ":a:b::c"),
['', ':', 'a', ':', 'b', '::', 'c'])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def test_qualified_re_split(self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""),
(":", ":"),
(":", "::")])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
self.assertEqual(re.match('a', 'a').groups(), ())
self.assertEqual(re.match('(a)', 'a').groups(), ('a',))
self.assertEqual(re.match(r'(a)', 'a').group(0), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a'))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
# A single group
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_groupref_exists(self):
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None)
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None)
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
# Tests for bug #1177831: exercise groups other than the first group
p = re.compile('(?P<g1>a)(?P<g2>b)?((?(g2)c|d))')
self.assertEqual(p.match('abc').groups(),
('a', 'b', 'c'))
self.assertEqual(p.match('ad').groups(),
('a', None, 'd'))
self.assertEqual(p.match('abd'), None)
self.assertEqual(p.match('ac'), None)
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None)
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None)
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertEqual(re.match("^(\w){1}$", "abc"), None)
self.assertEqual(re.match("^(\w){1}?$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None)
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^x{1}$", "xxx"), None)
self.assertEqual(re.match("^x{1}?$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertEqual(re.match("^x{}$", "xxx"), None)
self.assertNotEqual(re.match("^x{}$", "x{}"), None)
def test_getattr(self):
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertNotEqual(re.match("(a)", "a").re, None)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\b(b.)\b",
u"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
u"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"^abc$", u"\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"\nabc\n", re.M), None)
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.UNICODE).group(0), "1aa! a")
def test_bigcharset(self):
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222").group(1), u"\u2222")
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222", re.UNICODE).group(1), u"\u2222")
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def test_re_escape(self):
p=""
for i in range(0, 256):
p = p + chr(i)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)) is not None,
True)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)).span(), (0,1))
pat=re.compile(re.escape(p))
self.assertEqual(pat.match(p) is not None, True)
self.assertEqual(pat.match(p).span(), (0,256))
def test_pickling(self):
import pickle
self.pickle_test(pickle)
import cPickle
self.pickle_test(cPickle)
# old pickles expect the _compile() reconstructor in sre module
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "The sre module is deprecated",
DeprecationWarning)
from sre import _compile
def pickle_test(self, pickle):
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)')
s = pickle.dumps(oldpat)
newpat = pickle.loads(s)
self.assertEqual(oldpat, newpat)
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertNotEqual(re.compile('^pattern$', flag), None)
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None)
self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None)
self.assertRaises(re.error, re.match, "\911", "")
def test_sre_character_class_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"[\%03o]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\%03o0]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\%03o8]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02x]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02x0]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02xz]" % i, chr(i)), None)
self.assertRaises(re.error, re.match, "[\911]", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
# bug described in patches 527371/672491
self.assertEqual(re.match(r'(a)?a','a').lastindex, None)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
# bug 545855 -- This pattern failed to cause a compile error as it
# should, instead provoking a TypeError.
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# bugs 418626 at al. -- Testing Greg Chapman's addition of op code
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still used to hit the recursion limit, before the
# non-recursive scheme was implemented.
self.assertEqual(re.search('(a|b)*?c', 10000*'ab'+'cd').end(0), 20001)
def test_bug_612074(self):
pat=u"["+re.escape(u"\u2039")+u"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty cases that used to overflow the straightforward recursive
# implementation of repeated groups.
self.assertEqual(re.match('(x)*', 50000*'x').group(1), 'x')
self.assertEqual(re.match('(x)*y', 50000*'x'+'y').group(1), 'x')
self.assertEqual(re.match('(x)*?y', 50000*'x'+'y').group(1), 'x')
def test_scanner(self):
def s_ident(scanner, token): return token
def s_operator(scanner, token): return "op%s" % token
def s_float(scanner, token): return float(token)
def s_int(scanner, token): return int(token)
scanner = Scanner([
(r"[a-zA-Z_]\w*", s_ident),
(r"\d+\.\d*", s_float),
(r"\d+", s_int),
(r"=|\+|-|\*|/", s_operator),
(r"\s+", None),
])
self.assertNotEqual(scanner.scanner.scanner("").pattern, None)
self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
(['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
try:
unicode
except NameError:
return # no problem if we have no unicode
class my_unicode(unicode): pass
pat = re.compile(my_unicode("abc"))
self.assertEqual(pat.match("xyz"), None)
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
def test_bug_926075(self):
try:
unicode
except NameError:
return # no problem if we have no unicode
self.assert_(re.compile('bug_926075') is not
re.compile(eval("u'bug_926075'")))
def test_bug_931848(self):
try:
unicode
except NameError:
pass
pattern = eval('u"[\u002E\u3002\uFF0E\uFF61]"')
self.assertEqual(re.compile(pattern).split("a.b.c"),
['a','b','c'])
def test_bug_581080(self):
iter = re.finditer(r"\s", "a b")
self.assertEqual(iter.next().span(), (1,2))
self.assertRaises(StopIteration, iter.next)
scanner = re.compile(r"\s").scanner("a b")
self.assertEqual(scanner.search().span(), (1, 2))
self.assertEqual(scanner.search(), None)
def test_bug_817234(self):
iter = re.finditer(r".*", "asdf")
self.assertEqual(iter.next().span(), (0, 4))
self.assertEqual(iter.next().span(), (4, 4))
self.assertRaises(StopIteration, iter.next)
def test_empty_array(self):
# SF buf 1647541
import array
for typecode in 'cbBuhHiIlLfd':
a = array.array(typecode)
self.assertEqual(re.compile("bla").match(a), None)
self.assertEqual(re.compile("").match(a).groups(), ())
def test_inline_flags(self):
# Bug #1700
upper_char = unichr(0x1ea0) # Latin Capital Letter A with Dot Bellow
lower_char = unichr(0x1ea1) # Latin Small Letter A with Dot Bellow
p = re.compile(upper_char, re.I | re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile(lower_char, re.I | re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + upper_char, re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + lower_char, re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + upper_char)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + lower_char)
q = p.match(upper_char)
self.assertNotEqual(q, None)
def test_dollar_matches_twice(self):
"$ matches the end of string, and just before the terminating \n"
pattern = re.compile('$')
self.assertEqual(pattern.sub('#', 'a\nb\n'), 'a\nb#\n#')
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a\nb\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
pattern = re.compile('$', re.MULTILINE)
self.assertEqual(pattern.sub('#', 'a\nb\n' ), 'a#\nb#\n#' )
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a#\nb#\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
def run_re_tests():
from test.re_tests import benchmarks, tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print 'Running re_tests test suite'
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError, ('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print '=== Syntax error:', t
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print '*** Unexpected error ***', t
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error, msg:
print '=== Unexpected exception', t, repr(msg)
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print '=== Succeeded incorrectly', t
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print '=== grouping error', t,
print repr(repl) + ' should be ' + repr(expected)
else:
print '=== Failed incorrectly', t
# Try the match on a unicode string, and check that it
# still succeeds.
try:
result = obj.search(unicode(s, "latin-1"))
if result is None:
print '=== Fails on unicode match', t
except NameError:
continue # 1.5.2
except TypeError:
continue # unicode test case
# Try the match on a unicode pattern, and check that it
# still succeeds.
obj=re.compile(unicode(pattern, "latin-1"))
result = obj.search(s)
if result is None:
print '=== Fails on unicode pattern match', t
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
# string), so we'll ignore patterns that feature it.
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print '=== Failed on range-limited match', t
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print '=== Fails on case-insensitive match', t
# Try the match with LOCALE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print '=== Fails on locale-sensitive match', t
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print '=== Fails on unicode-sensitive match', t
def test_main():
run_unittest(ReTests)
run_re_tests()
if __name__ == "__main__":
test_main()
|
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This auth module is intended to allow OpenStack client-tools to select from a
variety of authentication strategies, including NoAuth (the default), and
Keystone (an identity management system).
> auth_plugin = AuthPlugin(creds)
> auth_plugin.authenticate()
> auth_plugin.auth_token
abcdefg
> auth_plugin.management_url
http://service_endpoint/
"""
import httplib2
from keystoneclient import service_catalog as ks_service_catalog
from oslo_log import log as logging
from oslo_serialization import jsonutils
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
import six.moves.urllib.parse as urlparse
from glance.common import exception
from glance.i18n import _
LOG = logging.getLogger(__name__)
class BaseStrategy(object):
def __init__(self):
self.auth_token = None
# TODO(sirp): Should expose selecting public/internal/admin URL.
self.management_url = None
def authenticate(self):
raise NotImplementedError
@property
def is_authenticated(self):
raise NotImplementedError
@property
def strategy(self):
raise NotImplementedError
class NoAuthStrategy(BaseStrategy):
def authenticate(self):
pass
@property
def is_authenticated(self):
return True
@property
def strategy(self):
return 'noauth'
class KeystoneStrategy(BaseStrategy):
MAX_REDIRECTS = 10
def __init__(self, creds, insecure=False, configure_via_auth=True):
self.creds = creds
self.insecure = insecure
self.configure_via_auth = configure_via_auth
super(KeystoneStrategy, self).__init__()
def check_auth_params(self):
# Ensure that supplied credential parameters are as required
for required in ('username', 'password', 'auth_url',
'strategy'):
if self.creds.get(required) is None:
raise exception.MissingCredentialError(required=required)
if self.creds['strategy'] != 'keystone':
raise exception.BadAuthStrategy(expected='keystone',
received=self.creds['strategy'])
# For v2.0 also check tenant is present
if self.creds['auth_url'].rstrip('/').endswith('v2.0'):
if self.creds.get("tenant") is None:
raise exception.MissingCredentialError(required='tenant')
def authenticate(self):
"""Authenticate with the Keystone service.
There are a few scenarios to consider here:
1. Which version of Keystone are we using? v1 which uses headers to
pass the credentials, or v2 which uses a JSON encoded request body?
2. Keystone may respond back with a redirection using a 305 status
code.
3. We may attempt a v1 auth when v2 is what's called for. In this
case, we rewrite the url to contain /v2.0/ and retry using the v2
protocol.
"""
def _authenticate(auth_url):
# If OS_AUTH_URL is missing a trailing slash add one
if not auth_url.endswith('/'):
auth_url += '/'
token_url = urlparse.urljoin(auth_url, "tokens")
# 1. Check Keystone version
is_v2 = auth_url.rstrip('/').endswith('v2.0')
if is_v2:
self._v2_auth(token_url)
else:
self._v1_auth(token_url)
self.check_auth_params()
auth_url = self.creds['auth_url']
for redirect_iter in range(self.MAX_REDIRECTS):
try:
_authenticate(auth_url)
except exception.AuthorizationRedirect as e:
# 2. Keystone may redirect us
auth_url = e.url
except exception.AuthorizationFailure:
# 3. In some configurations nova makes redirection to
# v2.0 keystone endpoint. Also, new location does not
# contain real endpoint, only hostname and port.
if 'v2.0' not in auth_url:
auth_url = urlparse.urljoin(auth_url, 'v2.0/')
else:
# If we successfully auth'd, then memorize the correct auth_url
# for future use.
self.creds['auth_url'] = auth_url
break
else:
# Guard against a redirection loop
raise exception.MaxRedirectsExceeded(redirects=self.MAX_REDIRECTS)
def _v1_auth(self, token_url):
creds = self.creds
headers = {
'X-Auth-User': creds['username'],
'X-Auth-Key': creds['password']
}
tenant = creds.get('tenant')
if tenant:
headers['X-Auth-Tenant'] = tenant
resp, resp_body = self._do_request(token_url, 'GET', headers=headers)
def _management_url(self, resp):
for url_header in ('x-image-management-url',
'x-server-management-url',
'x-glance'):
try:
return resp[url_header]
except KeyError as e:
not_found = e
raise not_found
if resp.status in (200, 204):
try:
if self.configure_via_auth:
self.management_url = _management_url(self, resp)
self.auth_token = resp['x-auth-token']
except KeyError:
raise exception.AuthorizationFailure()
elif resp.status == 305:
raise exception.AuthorizationRedirect(uri=resp['location'])
elif resp.status == 400:
raise exception.AuthBadRequest(url=token_url)
elif resp.status == 401:
raise exception.NotAuthenticated()
elif resp.status == 404:
raise exception.AuthUrlNotFound(url=token_url)
else:
raise Exception(_('Unexpected response: %s') % resp.status)
def _v2_auth(self, token_url):
creds = self.creds
creds = {
"auth": {
"tenantName": creds['tenant'],
"passwordCredentials": {
"username": creds['username'],
"password": creds['password']
}
}
}
headers = {'Content-Type': 'application/json'}
req_body = jsonutils.dumps(creds)
resp, resp_body = self._do_request(
token_url, 'POST', headers=headers, body=req_body)
if resp.status == 200:
resp_auth = jsonutils.loads(resp_body)['access']
creds_region = self.creds.get('region')
if self.configure_via_auth:
endpoint = get_endpoint(resp_auth['serviceCatalog'],
endpoint_region=creds_region)
self.management_url = endpoint
self.auth_token = resp_auth['token']['id']
elif resp.status == 305:
raise exception.RedirectException(resp['location'])
elif resp.status == 400:
raise exception.AuthBadRequest(url=token_url)
elif resp.status == 401:
raise exception.NotAuthenticated()
elif resp.status == 404:
raise exception.AuthUrlNotFound(url=token_url)
else:
raise Exception(_('Unexpected response: %s') % resp.status)
@property
def is_authenticated(self):
return self.auth_token is not None
@property
def strategy(self):
return 'keystone'
def _do_request(self, url, method, headers=None, body=None):
headers = headers or {}
conn = httplib2.Http()
conn.force_exception_to_status_code = True
conn.disable_ssl_certificate_validation = self.insecure
headers['User-Agent'] = 'glance-client'
resp, resp_body = conn.request(url, method, headers=headers, body=body)
return resp, resp_body
def get_plugin_from_strategy(strategy, creds=None, insecure=False,
configure_via_auth=True):
if strategy == 'noauth':
return NoAuthStrategy()
elif strategy == 'keystone':
return KeystoneStrategy(creds, insecure,
configure_via_auth=configure_via_auth)
else:
raise Exception(_("Unknown auth strategy '%s'") % strategy)
def get_endpoint(service_catalog, service_type='image', endpoint_region=None,
endpoint_type='publicURL'):
"""
Select an endpoint from the service catalog
We search the full service catalog for services
matching both type and region. If the client
supplied no region then any 'image' endpoint
is considered a match. There must be one -- and
only one -- successful match in the catalog,
otherwise we will raise an exception.
"""
endpoints = ks_service_catalog.ServiceCatalogV2(
{'serviceCatalog': service_catalog}
).get_urls(service_type=service_type,
region_name=endpoint_region,
endpoint_type=endpoint_type)
if endpoints is None:
raise exception.NoServiceEndpoint()
elif len(endpoints) == 1:
return endpoints[0]
else:
raise exception.RegionAmbiguity(region=endpoint_region)
|
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 16 15:42:56 2015
@author: ibackus
"""
import warnings
import pynbody as pb
SimArray = pb.array.SimArray
import numpy as np
from diskpy.utils import match_units
from diskpy.pdmath import binned_mean
def Q(snapshot, molecular_mass = 2.0, bins=100, use_velocity=False, \
use_omega=True):
"""Calculates the Toomre Q as a function of r, assuming radial temperature
profile and kappa ~= omega
Parameters
----------
snapshot : tipsy snapshot
molecular_mass : float
Mean molecular mass (for sound speed). Default = 2.0
bins : int or array
Either the number of bins or the bin edges
use_velocity : Bool
Determines whether to use the particles' velocities to calculate orbital
velocity. Useful if the circular orbital velocities are set in the
snapshot.
use_omega : Bool
Default=True. Use omega as a proxy for kappa to reduce noise
Returns
-------
Q : array
Toomre Q as a function of r
r_edges : array
Radial bin edges
"""
# Physical constants
kB = SimArray([1.0],'k')
G = SimArray([1.0],'G')
# Calculate surface density
sig, r_edges = sigma(snapshot, bins)
# Calculate sound speed
m = match_units(molecular_mass,'m_p')[0]
c_s_all = np.sqrt(kB*snapshot.g['temp']/m)
# Bin/average sound speed
dummy, c_s, dummy2 = binned_mean(snapshot.g['rxy'], c_s_all, binedges=r_edges)
if use_omega:
# Calculate keplerian angular velocity (as a proxy for the epicyclic
# frequency, which is a noisy calculation)
if use_velocity:
# Calculate directly from particle's velocity
dummy, omega, dummy2 = binned_mean(snapshot.g['rxy'], \
snapshot.g['vt']/snapshot.g['rxy'], binedges=r_edges)
else:
# Estimate, from forces, using pynbody
p = pb.analysis.profile.Profile(snapshot, bins=r_edges)
omega = p['omega']
kappa_calc = omega
else:
if use_velocity:
# Calculate directly from particle's velocities
kappa_calc, dummy = kappa(snapshot, r_edges)
else:
# Estimate, from forces, using pynbody
p = pb.analysis.profile.Profile(snapshot, bins=r_edges)
kappa_calc = p['kappa']
return (kappa_calc*c_s/(np.pi*G*sig)).in_units('1'), r_edges
def Qeff(snapshot, molecular_mass = 2.0, bins=100, use_velocity=False, \
use_omega=True, alpha=0.18, beta=2.2):
"""Estimates the effective Toomre Q as a function of r, defined as:
.. math:: Q_{eff} = \\beta Q (h/R)^{\\alpha}
See Q and h for the estimates of Q and h
Parameters
----------
snapshot : tipsy snapshot
molecular_mass : float
Mean molecular mass (for sound speed). Default = 2.0
bins : int or array
Either the number of bins or the bin edges
use_velocity : Bool
Determines whether to use the particles' velocities to calculate orbital
velocity. Useful if the circular orbital velocities are set in the
snapshot.
use_omega : Bool
Default=True. Use omega as a proxy for kappa to reduce noise
alpha : float
Powerlaw for height dependence
beta : float
Normalization such that disks fragment for Qeff = 1
Returns
-------
Qeff : array
Effective Toomre Q as a function of r
r_edges : array
Radial bin edges
"""
Qcalc, r_edges = Q(snapshot, molecular_mass, bins, use_velocity, use_omega)
dummy, h = height(snapshot, r_edges, center_on_star=False)
r = (r_edges[1:] + r_edges[0:-1])/2.
Qeff = beta * ((h/r).in_units('1'))**alpha
return Qeff, r_edges
def kappa(f, bins=100):
"""Estimate the epicyclic frequency from velocity
Parameters
----------
f : TipsySnap
`f` is a Simulation snapshot
bins : int or array-like
Either the number of bins to use or the bin edges
Returns
-------
kappa : SimArray
epicyclic frequency
r_edges : SimArray
binedges used
"""
# Require regular spacing of bins
if not isinstance(bins, int):
dr = bins[[1]] - bins[[0]]
eps = np.finfo(bins.dtype).eps
if not np.all(bins[1:] - bins[0:-1] <= dr + 1000*eps):
warnings.warn('Bins not uniformly spaced')
r = f.g['rxy']
v = f.g['vt']
r_edges, v_mean, dummy = binned_mean(r, v, bins=bins, ret_bin_edges=True)
dummy, rv_mean, dummy2 = binned_mean(r, r*v, bins=r_edges)
r_cent = (r_edges[1:] + r_edges[0:-1])/2
dr = r_edges[[1]] - r_edges[[0]]
drv_dr = np.gradient(rv_mean, dr)
kappa = np.sqrt(2*v_mean*drv_dr)/r_cent
return kappa, r_edges
def height(snapshot, bins=100, center_on_star=True):
"""
Calculates the characteristic height (h) of a flared disk as a function
of cylindrical radius (r).
Parameters
----------
snapshot : TipsySnap
Simulation snapshot for a flared disk
bins : int or array_like
Specifies the bins to use. If int, specifies the number of bins. If
array_like, specifies the bin edges
center_on_star : bool
If true (DEFAULT), cylindrical r is calculated relative to the star
Returns
-------
r_edges : SimArray
Radial bin edges used for calculating h. Length N+1
h : SimArray
Height as a function of r, calculated as the RMS of z over a bin.
Length N
"""
# Center on star
if center_on_star:
star_pos = snapshot.s['pos'].copy()
snapshot['pos'] -= star_pos
else:
star_pos = 0.0*snapshot.s['pos']
# Calculate height
r = snapshot.g['rxy']
z2 = snapshot.g['z']**2
r_edges, z2_mean, err = binned_mean(r, z2, bins=bins, ret_bin_edges=True)
h = np.sqrt(z2_mean)
# Add star_pos back to snapshot
snapshot['pos'] += star_pos
return r_edges, h
def sigma(snapshot, bins=100, cmFlag=True):
"""Calculates surface density vs r (relative to the center of mass)
Parameters
----------
snapshot : tipsy snapshot
bins : int, list, array...
(optional) Either the number of bins to use or the binedges to use
cmFlag : bool
(optional) Calculate relative to the center of mass
Returns
-------
sigma : SimArray
Surface density as a function of r
r_bins : SimArray
Radial bin edges
"""
if cmFlag:
# Begin by subtracting off the center of mass position
cm = (snapshot['mass'][:,None] * snapshot['pos']).sum()/(snapshot['mass'].sum())
snapshot['pos'] -= cm
r = snapshot.g['rxy']
# particle mass
m_gas = snapshot.gas['mass'][[0]]
N, r_bins = np.histogram(r, bins=bins)
r_bins = match_units(r_bins, r.units)[0]
r_center = (r_bins[1:] + r_bins[0:-1])/2
dr = r_bins[[1]] - r_bins[[0]]
sig = N*m_gas/(2*np.pi*r_center*dr)
if cmFlag:
# Add star position back to positions
snapshot['pos'] += cm
return sig, r_bins
|
|
from graphserver.core import Graph, TripBoard, HeadwayBoard, HeadwayAlight, Crossing, TripAlight, Timezone, Street, Link, ElapseTime
from optparse import OptionParser
from graphserver.graphdb import GraphDatabase
from graphserver.ext.gtfs.gtfsdb import GTFSDatabase, parse_gtfs_date
import sys
import pytz
from tools import service_calendar_from_timezone
import datetime
def cons(ary):
for i in range(len(ary)-1):
yield (ary[i], ary[i+1])
class GTFSGraphCompiler:
def __init__(self, gtfsdb, agency_namespace, agency_id=None, reporter=None):
self.gtfsdb = gtfsdb
self.agency_namespace = agency_namespace
self.reporter = reporter
# get graphserver.core.Timezone and graphserver.core.ServiceCalendars from gtfsdb for agency with given agency_id
timezone_name = gtfsdb.agency_timezone_name(agency_id)
self.tz = Timezone.generate( timezone_name )
if reporter: reporter.write( "constructing service calendar for timezone '%s'\n"%timezone_name )
self.sc = service_calendar_from_timezone(gtfsdb, timezone_name )
def bundle_to_boardalight_edges(self, bundle, service_id):
"""takes a bundle and yields a bunch of edges"""
stop_time_bundles = bundle.stop_time_bundles(service_id)
n_trips = len(bundle.trip_ids)
# If there's less than two stations on this trip bundle, the trip bundle doesn't actually span two places
if len(stop_time_bundles)<2:
return
# If there are no stop_times in a bundle on this service day, there is nothing to load
if n_trips==0:
return
if self.reporter: self.reporter.write( "inserting %d trips with %d stop_time bundles on service_id '%s'\n"%(len(stop_time_bundles[0]),len(stop_time_bundles),service_id) )
#add board edges
for i, stop_time_bundle in enumerate(stop_time_bundles[:-1]):
trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled = stop_time_bundle[0]
if arrival_time != departure_time:
patternstop_vx_name = "psv-%s-%03d-%03d-%s-depart"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
# construct the board/alight/dwell triangle for this patternstop
patternstop_arrival_vx_name = "psv-%s-%03d-%03d-%s-arrive"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
dwell_crossing = Crossing()
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stop_time_bundle:
dwell_crossing.add_crossing_time( trip_id, departure_time-arrival_time )
yield (patternstop_arrival_vx_name,
patternstop_vx_name,
dwell_crossing)
else:
patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
b = TripBoard(service_id, self.sc, self.tz, 0)
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stop_time_bundle:
b.add_boarding( trip_id, departure_time, stop_sequence )
yield ( "sta-%s"%stop_id, patternstop_vx_name, b )
#add alight edges
for i, stop_time_bundle in enumerate(stop_time_bundles[1:]):
trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled = stop_time_bundle[0]
if arrival_time != departure_time:
patternstop_vx_name = "psv-%s-%03d-%03d-%s-arrive"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
else:
patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
al = TripAlight(service_id, self.sc, self.tz, 0)
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stop_time_bundle:
al.add_alighting( trip_id.encode('ascii'), arrival_time, stop_sequence )
yield ( patternstop_vx_name, "sta-%s"%stop_id, al )
# add crossing edges
for i, (from_stop_time_bundle, to_stop_time_bundle) in enumerate(cons(stop_time_bundles)):
trip_id, from_arrival_time, from_departure_time, stop_id, stop_sequence, stop_dist_traveled = from_stop_time_bundle[0]
trip_id, to_arrival_time, to_departure_time, stop_id, stop_sequence, stop_dist_traveled = to_stop_time_bundle[0]
if from_arrival_time!=from_departure_time:
from_patternstop_vx_name = "psv-%s-%03d-%03d-%s-depart"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
else:
from_patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
if to_arrival_time!=to_departure_time:
to_patternstop_vx_name = "psv-%s-%03d-%03d-%s-arrive"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
else:
to_patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
crossing = Crossing()
for i in range( len( from_stop_time_bundle ) ):
trip_id, from_arrival_time, from_departure_time, stop_id, stop_sequence, stop_dist_traveled = from_stop_time_bundle[i]
trip_id, to_arrival_time, to_departure_time, stop_id, stop_sequence, stop_dist_traveled = to_stop_time_bundle[i]
crossing.add_crossing_time( trip_id, (to_arrival_time-from_departure_time) )
yield ( from_patternstop_vx_name,
to_patternstop_vx_name,
crossing )
def gtfsdb_to_scheduled_edges(self, maxtrips=None, service_ids=None):
# compile trip bundles from gtfsdb
if self.reporter: self.reporter.write( "Compiling trip bundles...\n" )
bundles = self.gtfsdb.compile_trip_bundles(maxtrips=maxtrips, reporter=self.reporter)
# load bundles to graph
if self.reporter: self.reporter.write( "Loading trip bundles into graph...\n" )
n_bundles = len(bundles)
for i, bundle in enumerate(bundles):
if self.reporter: self.reporter.write( "%d/%d loading %s\n"%(i+1, n_bundles, bundle) )
for service_id in [x.encode("ascii") for x in self.gtfsdb.service_ids()]:
if service_ids is not None and service_id not in service_ids:
continue
for fromv_label, tov_label, edge in self.bundle_to_boardalight_edges(bundle, service_id):
yield fromv_label, tov_label, edge
def gtfsdb_to_headway_edges( self, maxtrips=None ):
# load headways
if self.reporter: self.reporter.write( "Loading headways trips to graph...\n" )
for trip_id, start_time, end_time, headway_secs in self.gtfsdb.execute( "SELECT * FROM frequencies" ):
service_id = list(self.gtfsdb.execute( "SELECT service_id FROM trips WHERE trip_id=?", (trip_id,) ))[0][0]
service_id = service_id.encode('utf-8')
hb = HeadwayBoard( service_id, self.sc, self.tz, 0, trip_id.encode('utf-8'), start_time, end_time, headway_secs )
ha = HeadwayAlight( service_id, self.sc, self.tz, 0, trip_id.encode('utf-8'), start_time, end_time, headway_secs )
stoptimes = list(self.gtfsdb.execute( "SELECT * FROM stop_times WHERE trip_id=? ORDER BY stop_sequence", (trip_id,)) )
#add board edges
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stoptimes[:-1]:
yield ( "sta-%s"%stop_id, "hwv-%s-%s-%s"%(self.agency_namespace,stop_id, trip_id), hb )
#add alight edges
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stoptimes[1:]:
yield ( "hwv-%s-%s-%s"%(self.agency_namespace,stop_id, trip_id), "sta-%s"%stop_id, ha )
#add crossing edges
for (trip_id1, arrival_time1, departure_time1, stop_id1, stop_sequence1, stop_dist_traveled1), (trip_id2, arrival_time2, departure_time2, stop_id2, stop_sequence2,stop_dist_traveled2) in cons(stoptimes):
cr = Crossing()
cr.add_crossing_time( trip_id1, (arrival_time2-departure_time1) )
yield ( "hwv-%s-%s-%s"%(self.agency_namespace,stop_id1, trip_id1), "hwv-%s-%s-%s"%(self.agency_namespace,stop_id2, trip_id2), cr )
def gtfsdb_to_transfer_edges( self ):
# load transfers
if self.reporter: self.reporter.write( "Loading transfers to graph...\n" )
# keep track to avoid redundancies
# this assumes that transfer relationships are bi-directional.
# TODO this implementation is also incomplete - it's theoretically possible that
# a transfers.txt table could contain "A,A,3,", which would mean you can't transfer
# at A.
seen = set([])
for stop_id1, stop_id2, conn_type, min_transfer_time in self.gtfsdb.execute( "SELECT * FROM transfers" ):
s1 = "sta-%s"%stop_id1
s2 = "sta-%s"%stop_id2
# TODO - what is the semantics of this? see note above
if s1 == s2:
continue
key = ".".join(sorted([s1,s2]))
if key not in seen:
seen.add(key)
else:
continue
assert conn_type == None or type(conn_type) == int
if conn_type in (0, None): # This is a recommended transfer point between two routes
if min_transfer_time in ("", None):
yield (s1, s2, Link())
yield (s2, s1, Link())
else:
yield (s1, s2, ElapseTime(int(min_transfer_time)))
yield (s2, s1, ElapseTime(int(min_transfer_time)))
elif conn_type == 1: # This is a timed transfer point between two routes
yield (s1, s2, Link())
yield (s2, s1, Link())
elif conn_type == 2: # This transfer requires a minimum amount of time
yield (s1, s2, ElapseTime(int(min_transfer_time)))
yield (s2, s1, ElapseTime(int(min_transfer_time)))
elif conn_type == 3: # Transfers are not possible between routes at this location.
print "WARNING: Support for no-transfer (transfers.txt transfer_type=3) not implemented."
def gtfsdb_to_edges( self, maxtrips=None, service_ids=None ):
for edge_tuple in self.gtfsdb_to_scheduled_edges(maxtrips, service_ids=service_ids):
yield edge_tuple
for edge_tuple in self.gtfsdb_to_headway_edges(maxtrips):
yield edge_tuple
for edge_tuple in self.gtfsdb_to_transfer_edges():
yield edge_tuple
def gdb_load_gtfsdb(gdb, agency_namespace, gtfsdb, cursor, agency_id=None, maxtrips=None, sample_date=None, reporter=sys.stdout):
# determine which service periods run on the given day, if a day is given
if sample_date is not None:
sample_date = datetime.date( *parse_gtfs_date( sample_date ) )
acceptable_service_ids = gtfsdb.service_periods( sample_date )
print "Importing only service periods operating on %s: %s"%(sample_date, acceptable_service_ids)
else:
acceptable_service_ids = None
compiler = GTFSGraphCompiler( gtfsdb, agency_namespace, agency_id, reporter )
v_added = set([])
for fromv_label, tov_label, edge in compiler.gtfsdb_to_edges( maxtrips, service_ids=acceptable_service_ids ):
if fromv_label not in v_added:
gdb.add_vertex( fromv_label, cursor )
v_added.add(fromv_label)
if tov_label not in v_added:
gdb.add_vertex( tov_label, cursor )
v_added.add(tov_label)
gdb.add_edge( fromv_label, tov_label, edge, cursor )
def graph_load_gtfsdb( agency_namespace, gtfsdb, agency_id=None, maxtrips=None, reporter=sys.stdout ):
compiler = GTFSGraphCompiler( gtfsdb, agency_namespace, agency_id, reporter )
gg = Graph()
for fromv_label, tov_label, edge in compiler.gtfsdb_to_edges( maxtrips ):
gg.add_vertex( fromv_label )
gg.add_vertex( tov_label )
gg.add_edge( fromv_label, tov_label, edge )
return gg
def main():
usage = """usage: python gdb_import_gtfs.py [options] <graphdb_filename> <gtfsdb_filename> [<agency_id>]"""
parser = OptionParser(usage=usage)
parser.add_option("-n", "--namespace", dest="namespace", default="0",
help="agency namespace")
parser.add_option("-m", "--maxtrips", dest="maxtrips", default=None, help="maximum number of trips to load")
parser.add_option("-d", "--date", dest="sample_date", default=None, help="only load transit running on a given day. YYYYMMDD" )
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
exit(-1)
graphdb_filename = args[0]
gtfsdb_filename = args[1]
agency_id = args[2] if len(args)==3 else None
print "importing from gtfsdb '%s' into graphdb '%s'"%(gtfsdb_filename, graphdb_filename)
gtfsdb = GTFSDatabase( gtfsdb_filename )
gdb = GraphDatabase( graphdb_filename, overwrite=False )
maxtrips = int(options.maxtrips) if options.maxtrips else None
gdb_load_gtfsdb( gdb, options.namespace, gtfsdb, gdb.get_cursor(), agency_id, maxtrips=maxtrips, sample_date=options.sample_date)
gdb.commit()
print "done"
if __name__ == '__main__':
main()
|
|
import base64
import datetime
import decimal
try:
import simplejson as json
except ImportError:
import json # noqa
import wsme.tests.protocol
from wsme.utils import parse_isodatetime, parse_isodate, parse_isotime
from wsme.types import isarray, isdict, isusertype
import six
if six.PY3:
from urllib.parse import urlencode
else:
from urllib import urlencode # noqa
def encode_arg(value):
if isinstance(value, tuple):
value, datatype = value
else:
datatype = type(value)
if isinstance(datatype, list):
value = [encode_arg((item, datatype[0])) for item in value]
elif isinstance(datatype, dict):
key_type, value_type = list(datatype.items())[0]
value = dict((
(encode_arg((key, key_type)),
encode_arg((value, value_type)))
for key, value in value.items()
))
elif datatype in (datetime.date, datetime.time, datetime.datetime):
value = value.isoformat()
elif datatype == wsme.types.binary:
value = base64.encodestring(value).decode('ascii')
elif datatype == wsme.types.bytes:
value = value.decode('ascii')
elif datatype == decimal.Decimal:
value = str(value)
return value
def decode_result(value, datatype):
if value is None:
return None
if datatype == wsme.types.binary:
value = base64.decodestring(value.encode('ascii'))
return value
if isusertype(datatype):
datatype = datatype.basetype
if isinstance(datatype, list):
value = [decode_result(item, datatype[0]) for item in value]
elif isarray(datatype):
value = [decode_result(item, datatype.item_type) for item in value]
elif isinstance(datatype, dict):
key_type, value_type = list(datatype.items())[0]
value = dict((
(decode_result(key, key_type),
decode_result(value, value_type))
for key, value in value.items()
))
elif isdict(datatype):
key_type, value_type = datatype.key_type, datatype.value_type
value = dict((
(decode_result(key, key_type),
decode_result(value, value_type))
for key, value in value.items()
))
elif datatype == datetime.time:
value = parse_isotime(value)
elif datatype == datetime.date:
value = parse_isodate(value)
elif datatype == datetime.datetime:
value = parse_isodatetime(value)
elif hasattr(datatype, '_wsme_attributes'):
for attr in datatype._wsme_attributes:
if attr.key not in value:
continue
value[attr.key] = decode_result(value[attr.key], attr.datatype)
elif datatype == decimal.Decimal:
value = decimal.Decimal(value)
elif datatype == wsme.types.bytes:
value = value.encode('ascii')
elif datatype is not None and type(value) != datatype:
value = datatype(value)
return value
class TestExtDirectProtocol(wsme.tests.protocol.ProtocolTestCase):
protocol = 'extdirect'
protocol_options = {
'namespace': 'MyNS.api',
'nsfolder': 'app'
}
def call(self, fname, _rt=None, _no_result_decode=False, _accept=None,
**kw):
path = fname.split('/')
try:
func, funcdef, args = self.root._lookup_function(path)
arguments = funcdef.arguments
except Exception:
arguments = []
if len(path) == 1:
ns, action, fname = '', '', path[0]
elif len(path) == 2:
ns, action, fname = '', path[0], path[1]
else:
ns, action, fname = '.'.join(path[:-2]), path[-2], path[-1]
print(kw)
args = [
dict(
(arg.name, encode_arg(kw[arg.name]))
for arg in arguments if arg.name in kw
)
]
print("args =", args)
data = json.dumps({
'type': 'rpc',
'tid': 0,
'action': action,
'method': fname,
'data': args,
})
print(data)
headers = {'Content-Type': 'application/json'}
if _accept:
headers['Accept'] = _accept
res = self.app.post('/extdirect/router/%s' % ns, data, headers=headers,
expect_errors=True)
print(res.body)
if _no_result_decode:
return res
data = json.loads(res.text)
if data['type'] == 'rpc':
r = data['result']
return decode_result(r, _rt)
elif data['type'] == 'exception':
faultcode, faultstring = data['message'].split(': ', 1)
debuginfo = data.get('where')
raise wsme.tests.protocol.CallException(
faultcode, faultstring, debuginfo)
def test_api_alias(self):
assert self.root._get_protocol('extdirect').api_alias == '/app/api.js'
def test_get_api(self):
res = self.app.get('/app/api.js')
print(res.body)
assert res.body
def test_positional(self):
self.root._get_protocol('extdirect').default_params_notation = \
'positional'
data = json.dumps({
'type': 'rpc',
'tid': 0,
'action': 'misc',
'method': 'multiply',
'data': [2, 5],
})
headers = {'Content-Type': 'application/json'}
res = self.app.post('/extdirect/router', data, headers=headers)
print(res.body)
data = json.loads(res.text)
assert data['type'] == 'rpc'
r = data['result']
assert r == 10
def test_batchcall(self):
data = json.dumps([{
'type': 'rpc',
'tid': 1,
'action': 'argtypes',
'method': 'setdate',
'data': [{'value': '2011-04-06'}],
}, {
'type': 'rpc',
'tid': 2,
'action': 'returntypes',
'method': 'getbytes',
'data': []
}])
print(data)
headers = {'Content-Type': 'application/json'}
res = self.app.post('/extdirect/router', data, headers=headers)
print(res.body)
rdata = json.loads(res.text)
assert len(rdata) == 2
assert rdata[0]['tid'] == 1
assert rdata[0]['result'] == '2011-04-06'
assert rdata[1]['tid'] == 2
assert rdata[1]['result'] == 'astring'
def test_form_call(self):
params = {
'value[0].inner.aint': 54,
'value[1].inner.aint': 55,
'extType': 'rpc',
'extTID': 1,
'extAction': 'argtypes',
'extMethod': 'setnestedarray',
}
body = urlencode(params)
r = self.app.post(
'/extdirect/router',
body,
headers={'Content-Type': 'application/x-www-form-urlencoded'}
)
print(r)
assert json.loads(r.text) == {
"tid": "1",
"action": "argtypes",
"type": "rpc",
"method": "setnestedarray",
"result": [{
"inner": {
"aint": 54
}
}, {
"inner": {
"aint": 55
}
}]
}
|
|
""" Wrappers for result objects and iterators """
from abc import ABC, abstractmethod, abstractproperty
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
ItemsView,
Iterable,
Iterator,
KeysView,
List,
Optional,
Tuple,
Union,
ValuesView,
overload,
)
from .constants import MAX_GET_BATCH, ReturnCapacityType
from .types import (
Dynamizer,
DynamoObject,
EncodedDynamoObject,
ExpressionAttributeNamesType,
)
if TYPE_CHECKING:
from .connection import DynamoDBConnection
def add_dicts(d1, d2):
"""Merge two dicts of addable values"""
if d1 is None:
return d2
if d2 is None:
return d1
keys = set(d1)
keys.update(set(d2))
ret = {}
for key in keys:
v1 = d1.get(key)
v2 = d2.get(key)
if v1 is None:
ret[key] = v2
elif v2 is None:
ret[key] = v1
else:
ret[key] = v1 + v2
return ret
class Count(int):
"""Wrapper for response to query with Select=COUNT"""
count: int
scanned_count: int
consumed_capacity: Optional["Capacity"]
def __new__(
cls,
count: int,
scanned_count: int,
consumed_capacity: Optional["Capacity"] = None,
) -> "Count":
ret = super(Count, cls).__new__(cls, count)
ret.count = count
ret.scanned_count = scanned_count
ret.consumed_capacity = consumed_capacity
return ret
@classmethod
def from_response(cls, response: Dict[str, Any]) -> "Count":
"""Factory method"""
return cls(
response["Count"],
response["ScannedCount"],
response.get("consumed_capacity"),
)
def __add__(self, other):
if other is None:
return self
if not isinstance(other, Count):
return self.count + other
if self.consumed_capacity is None:
capacity = other.consumed_capacity
else:
capacity = self.consumed_capacity + other.consumed_capacity
return Count(
self.count + other.count, self.scanned_count + other.scanned_count, capacity
)
def __radd__(self, other):
return self.__add__(other)
def __repr__(self):
return "Count(%d)" % self
class Capacity(object):
"""Wrapper for the capacity of a table or index"""
def __init__(self, read: float, write: float):
self._read = read
self._write = write
@property
def read(self) -> float:
"""The read capacity"""
return self._read
@property
def write(self) -> float:
"""The write capacity"""
return self._write
@classmethod
def from_response(
cls, response: Dict[str, Any], is_read: Optional[bool]
) -> "Capacity":
read = response.get("ReadCapacityUnits")
if read is None:
read = response["CapacityUnits"] if is_read else 0
write = response.get("WriteCapacityUnits")
if write is None:
write = 0 if is_read else response["CapacityUnits"]
return cls(read, write)
def __getitem__(self, key):
return getattr(self, key)
def __contains__(self, key):
return key in ["read", "write"]
def __hash__(self):
return self._read + self._write
def __eq__(self, other):
if isinstance(other, tuple):
return self.read == other[0] and self.write == other[1]
return self.read == getattr(other, "read", None) and self.write == getattr(
other, "write", None
)
def __ne__(self, other):
return not self.__eq__(other)
def __add__(self, other):
if isinstance(other, tuple):
return Capacity(self.read + other[0], self.write + other[1])
return Capacity(self.read + other.read, self.write + other.write)
def __radd__(self, other):
return self.__add__(other)
def __str__(self):
pieces = []
if self.read:
pieces.append("R:{0:.1f}".format(self.read))
if self.write:
pieces.append("W:{0:.1f}".format(self.write))
if not pieces:
return "0"
return " ".join(pieces)
class ConsumedCapacity(object):
"""Record of the consumed capacity of a request"""
def __init__(
self,
tablename: str,
total: Capacity,
table_capacity: Optional[Capacity] = None,
local_index_capacity: Optional[Dict[str, Capacity]] = None,
global_index_capacity: Optional[Dict[str, Capacity]] = None,
):
self.tablename = tablename
self.total = total
self.table_capacity = table_capacity
self.local_index_capacity = local_index_capacity
self.global_index_capacity = global_index_capacity
@classmethod
def build_indexes(
cls, response: Dict[str, Dict[str, Any]], key: str, is_read: Optional[bool]
) -> Optional[Dict[str, Capacity]]:
"""Construct index capacity map from a request fragment"""
if key not in response:
return None
indexes = {}
for key, val in response[key].items():
indexes[key] = Capacity.from_response(val, is_read)
return indexes
@classmethod
def from_response(
cls, response: Dict[str, Any], is_read: Optional[bool] = None
) -> "ConsumedCapacity":
"""Factory method for ConsumedCapacity from a response object"""
kwargs = {
"tablename": response["TableName"],
"total": Capacity.from_response(response, is_read),
}
local = cls.build_indexes(response, "LocalSecondaryIndexes", is_read)
kwargs["local_index_capacity"] = local
gindex = cls.build_indexes(response, "GlobalSecondaryIndexes", is_read)
kwargs["global_index_capacity"] = gindex
if "Table" in response:
kwargs["table_capacity"] = Capacity.from_response(
response["Table"], is_read
)
return cls(**kwargs)
def __hash__(self):
return hash(self.tablename) + hash(self.total)
def __eq__(self, other):
properties = [
"tablename",
"total",
"table_capacity",
"local_index_capacity",
"global_index_capacity",
]
for prop in properties:
if getattr(self, prop) != getattr(other, prop, None):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __radd__(self, other):
return self.__add__(other)
def __add__(self, other):
# Handle identity cases when added to empty values
if other is None:
return self
if self.tablename != other.tablename:
raise TypeError("Cannot add capacities from different tables")
kwargs = {
"total": self.total + other.total,
}
if self.table_capacity is not None:
kwargs["table_capacity"] = self.table_capacity + other.table_capacity
kwargs["local_index_capacity"] = add_dicts(
self.local_index_capacity, other.local_index_capacity
)
kwargs["global_index_capacity"] = add_dicts(
self.global_index_capacity, other.global_index_capacity
)
return ConsumedCapacity(self.tablename, **kwargs)
def __str__(self):
lines = []
if self.table_capacity:
lines.append("Table: %s" % self.table_capacity)
if self.local_index_capacity:
for name, cap in self.local_index_capacity.items():
lines.append("Local index '%s': %s" % (name, cap))
if self.global_index_capacity:
for name, cap in self.global_index_capacity.items():
lines.append("Global index '%s': %s" % (name, cap))
lines.append("Total: %s" % self.total)
return "\n".join(lines)
class PagedIterator(ABC):
"""An iterator that iterates over paged results from Dynamo"""
def __init__(self):
self.iterator = None
@abstractproperty
def can_fetch_more(self) -> bool: # pragma: no cover
"""Return True if more results can be fetched from the server"""
raise NotImplementedError
@abstractmethod
def _fetch(self) -> Iterator: # pragma: no cover
"""Fetch additional results from the server and return an iterator"""
raise NotImplementedError
def __iter__(self):
return self
def __next__(self):
if self.iterator is None:
self.iterator = self._fetch()
while True:
try:
return next(self.iterator)
except StopIteration:
if self.can_fetch_more:
self.iterator = self._fetch()
else:
raise
class ResultSet(PagedIterator):
"""Iterator that pages results from Dynamo"""
def __init__(
self,
connection: "DynamoDBConnection",
limit: "Limit",
*args: Any,
**kwargs: Any
):
super(ResultSet, self).__init__()
self.connection = connection
# The limit will be mutated, so copy it and leave the original intact
self.limit = limit.copy()
self.args = args
self.kwargs = kwargs
self.last_evaluated_key: Optional[dict] = None
self.consumed_capacity: Optional[ConsumedCapacity] = None
@property
def can_fetch_more(self) -> bool:
"""True if there are more results on the server"""
return self.last_evaluated_key is not None and not self.limit.complete
def _fetch(self) -> Iterator:
"""Fetch more results from Dynamo"""
self.limit.set_request_args(self.kwargs)
data = self.connection.call(*self.args, **self.kwargs)
self.limit.post_fetch(data)
self.last_evaluated_key = data.get("LastEvaluatedKey")
if self.last_evaluated_key is None:
self.kwargs.pop("ExclusiveStartKey", None)
else:
self.kwargs["ExclusiveStartKey"] = self.last_evaluated_key
if "consumed_capacity" in data:
self.consumed_capacity += data["consumed_capacity"]
for raw_item in data["Items"]:
item = self.connection.dynamizer.decode_keys(raw_item)
if self.limit.accept(item):
yield item
def __next__(self) -> DynamoObject: # pylint: disable=W0235
return super().__next__()
class GetResultSet(PagedIterator):
"""Iterator that pages the results of a BatchGetItem"""
def __init__(
self,
connection: "DynamoDBConnection",
keymap: Dict[str, Iterable[DynamoObject]],
consistent: bool = False,
attributes: Optional[str] = None,
alias: Optional[ExpressionAttributeNamesType] = None,
return_capacity: Optional[ReturnCapacityType] = None,
):
super(GetResultSet, self).__init__()
self.connection = connection
self.keymap: Dict[str, Iterator[DynamoObject]] = {
t: iter(keys) for t, keys in keymap.items()
}
self.consistent = consistent
self.attributes = attributes
self.alias = alias
self.return_capacity = return_capacity
self._pending_keys: Dict[str, List[EncodedDynamoObject]] = {}
self._attempt = 0
self.consumed_capacity: Optional[Dict[str, ConsumedCapacity]] = None
self._cached_dict: Optional[Dict[str, List[DynamoObject]]] = None
self._started_iterator = False
@property
def can_fetch_more(self) -> bool:
return bool(self.keymap) or bool(self._pending_keys)
def build_kwargs(self):
"""Construct the kwargs to pass to batch_get_item"""
num_pending = sum([len(v) for v in self._pending_keys.values()])
if num_pending < MAX_GET_BATCH:
tablenames_to_remove = []
for tablename, key_iter in self.keymap.items():
for key in key_iter:
pending_keys = self._pending_keys.setdefault(tablename, [])
pending_keys.append(self.connection.dynamizer.encode_keys(key))
num_pending += 1
if num_pending == MAX_GET_BATCH:
break
else:
tablenames_to_remove.append(tablename)
if num_pending == MAX_GET_BATCH:
break
for tablename in tablenames_to_remove:
self.keymap.pop(tablename, None)
if not self._pending_keys:
return None
request_items = {}
for tablename, keys in self._pending_keys.items():
query: Dict[str, Any] = {"ConsistentRead": self.consistent}
if self.attributes:
query["ProjectionExpression"] = self.attributes
if self.alias:
query["ExpressionAttributeNames"] = self.alias
query["Keys"] = keys
request_items[tablename] = query
self._pending_keys = {}
return {
"RequestItems": request_items,
"ReturnConsumedCapacity": self.return_capacity,
}
def _fetch(self) -> Iterator:
"""Fetch a set of items from their keys"""
kwargs = self.build_kwargs()
if kwargs is None:
return iter([])
data = self.connection.call("batch_get_item", **kwargs)
if "UnprocessedKeys" in data:
for tablename, items in data["UnprocessedKeys"].items():
keys = self._pending_keys.setdefault(tablename, [])
keys.extend(items["Keys"])
# Getting UnprocessedKeys indicates that we are exceeding our
# throughput. So sleep for a bit.
self._attempt += 1
self.connection.exponential_sleep(self._attempt)
else:
# No UnprocessedKeys means our request rate is fine, so we can
# reset the attempt number.
self._attempt = 0
if "consumed_capacity" in data:
self.consumed_capacity = self.consumed_capacity or {}
for cap in data["consumed_capacity"]:
self.consumed_capacity[
cap.tablename
] = cap + self.consumed_capacity.get(cap.tablename)
for tablename, items in data["Responses"].items():
for item in items:
yield tablename, item
def __getitem__(self, key: str) -> List[DynamoObject]:
return self.asdict()[key]
def items(self) -> ItemsView[str, List[DynamoObject]]:
return self.asdict().items()
def keys(self) -> KeysView[str]:
return self.asdict().keys()
def values(self) -> ValuesView[List[DynamoObject]]:
return self.asdict().values()
def __next__(self) -> Tuple[str, DynamoObject]:
self._started_iterator = True
tablename, result = super().__next__()
return tablename, self.connection.dynamizer.decode_keys(result)
def asdict(self) -> Dict[str, List[DynamoObject]]:
if self._cached_dict is None:
if self._started_iterator:
raise ValueError(
"Cannot use asdict if also using GetResultSet as an iterator"
)
self._cached_dict = {}
for tablename, item in self:
items = self._cached_dict.setdefault(tablename, [])
items.append(item)
return self._cached_dict
class SingleTableGetResultSet(object):
def __init__(self, result_set: GetResultSet):
self.result_set = result_set
@property
def consumed_capacity(self) -> Optional[ConsumedCapacity]:
"""Getter for consumed_capacity"""
cap_map = self.result_set.consumed_capacity
if cap_map is None:
return None
return next(iter(cap_map.values()))
def __iter__(self):
return self
def __next__(self) -> DynamoObject:
return next(self.result_set)[1]
class TableResultSet(PagedIterator):
"""Iterator that pages table names from ListTables"""
def __init__(self, connection: "DynamoDBConnection", limit: Optional[int] = None):
super(TableResultSet, self).__init__()
self.connection = connection
self.limit = limit
self.last_evaluated_table_name: Optional[str] = None
@property
def can_fetch_more(self) -> bool:
if self.last_evaluated_table_name is None:
return False
return self.limit is None or self.limit > 0
def _fetch(self) -> Iterator:
kwargs: Dict[str, Any] = {}
if self.limit is None:
kwargs["Limit"] = 100
else:
kwargs["Limit"] = min(self.limit, 100)
if self.last_evaluated_table_name is not None:
kwargs["ExclusiveStartTableName"] = self.last_evaluated_table_name
data = self.connection.call("list_tables", **kwargs)
self.last_evaluated_table_name = data.get("LastEvaluatedTableName")
tables = data["TableNames"]
if self.limit is not None:
self.limit -= len(tables)
return iter(tables)
def __next__(self) -> str: # pylint: disable=W0235
return super().__next__()
class Result(dict):
"""
A wrapper for an item returned from Dynamo
Attributes
----------
consumed_capacity : :class:`~dynamo3.result.ConsumedCapacity`, optional
Consumed capacity on the table
exists : bool
False if the result is empty (i.e. no result was returned from dynamo)
"""
def __init__(self, dynamizer: Dynamizer, response: Dict[str, Any], item_key: str):
super(Result, self).__init__()
self.exists = item_key in response
for k, v in response.get(item_key, {}).items():
self[k] = dynamizer.decode(v)
self.consumed_capacity: Optional[ConsumedCapacity] = response.get(
"consumed_capacity"
)
def __repr__(self):
return "Result({0})".format(super(Result, self).__repr__())
class Limit(object):
"""
Class that defines query/scan limit behavior
Parameters
----------
scan_limit : int, optional
The maximum number of items for DynamoDB to scan. This will not
necessarily be the number of items returned.
item_limit : int, optional
The maximum number of items to return. Fetches will continue until this
number is reached or there are no results left. See also: ``strict``
min_scan_limit : int, optional
This only matters when ``item_limit`` is set and ``scan_limit`` is not.
After doing multiple fetches, the ``item_limit`` may drop to a low
value. The ``item_limit`` will be passed up as the query ``Limit``, but
if your ``item_limit`` is down to 1 you may want to fetch more than 1
item at a time. ``min_scan_limit`` determines the minimum ``Limit`` to
send up when ``scan_limit`` is None. (default 20)
strict : bool, optional
This modifies the behavior of ``item_limit``. If True, the query will
never return more items than ``item_limit``. If False, the query will
fetch until it hits the ``item_limit``, and then return the rest of the
page as well. (default False)
filter : callable, optional
Function that takes a single item dict and returns a boolean. If True,
the item will be counted towards the ``item_limit`` and returned from
the iterator. If False, it will be skipped.
"""
def __init__(
self,
scan_limit: Optional[int] = None,
item_limit: Optional[int] = None,
min_scan_limit: int = 20,
strict: bool = False,
filter: Callable[[DynamoObject], bool] = lambda x: True,
):
self.scan_limit = scan_limit
if item_limit is None:
self.item_limit = scan_limit
else:
self.item_limit = item_limit
self.min_scan_limit = min_scan_limit
self.strict = strict
self.filter = filter
def copy(self) -> "Limit":
"""Return a copy of the limit"""
return Limit(
self.scan_limit,
self.item_limit,
self.min_scan_limit,
self.strict,
self.filter,
)
def set_request_args(self, args: Dict[str, Any]) -> None:
"""Set the Limit parameter into the request args"""
if self.scan_limit is not None:
args["Limit"] = self.scan_limit
elif self.item_limit is not None:
args["Limit"] = max(self.item_limit, self.min_scan_limit)
else:
args.pop("Limit", None)
@property
def complete(self) -> bool:
"""Return True if the limit has been reached"""
if self.scan_limit is not None and self.scan_limit == 0:
return True
if self.item_limit is not None and self.item_limit == 0:
return True
return False
def post_fetch(self, response: Dict[str, Any]) -> None:
"""Called after a fetch. Updates the ScannedCount"""
if self.scan_limit is not None:
self.scan_limit -= response["ScannedCount"]
def accept(self, item: DynamoObject) -> bool:
"""Apply the filter and item_limit, and return True to accept"""
accept = self.filter(item)
if accept and self.item_limit is not None:
if self.item_limit > 0:
self.item_limit -= 1
elif self.strict:
return False
return accept
class TransactionGet(object):
def __init__(
self,
connection: "DynamoDBConnection",
return_capacity: Optional[ReturnCapacityType] = None,
):
self._connection = connection
self._return_capacity = return_capacity
self._cached_list: Optional[List[DynamoObject]] = None
self.consumed_capacity: Optional[Dict[str, ConsumedCapacity]] = None
self._items: List[
Tuple[
str,
DynamoObject,
Optional[Union[str, Iterable[str]]],
Optional[ExpressionAttributeNamesType],
]
] = []
def add_key(
self,
tablename: str,
key: DynamoObject,
attributes: Optional[Union[str, Iterable[str]]] = None,
alias: Optional[ExpressionAttributeNamesType] = None,
) -> None:
self._items.append((tablename, key, attributes, alias))
def __iter__(self):
return iter(self.aslist())
@overload
def __getitem__(self, index: int) -> DynamoObject:
...
@overload
def __getitem__(self, index: slice) -> List[DynamoObject]:
...
def __getitem__(
self, index: Union[int, slice]
) -> Union[DynamoObject, List[DynamoObject]]:
return self.aslist()[index]
def __len__(self):
return len(self.aslist())
def _fetch(self) -> List[DynamoObject]:
if self._cached_list is not None or not self._items:
return self._cached_list or []
transact_items = []
for (tablename, key, attributes, alias) in self._items:
item = {
"TableName": tablename,
"Key": self._connection.dynamizer.encode_keys(key),
}
if attributes:
if not isinstance(attributes, str):
attributes = ", ".join(attributes)
item["ProjectionExpression"] = attributes
if alias is not None:
item["ExpressionAttributeNames"] = alias
transact_items.append({"Get": item})
kwargs: Dict[str, Any] = {"TransactItems": transact_items}
if self._return_capacity is not None:
kwargs["ReturnConsumedCapacity"] = self._return_capacity
response = self._connection.call("transact_get_items", **kwargs)
if "consumed_capacity" in response:
self.consumed_capacity = self.consumed_capacity or {}
for cap in response["consumed_capacity"]:
self.consumed_capacity[
cap.tablename
] = cap + self.consumed_capacity.get(cap.tablename)
decoded = []
for response_item in response["Responses"]:
decoded.append(
self._connection.dynamizer.decode_keys(response_item["Item"])
)
return decoded
def aslist(self) -> List[DynamoObject]:
if self._cached_list is None:
self._cached_list = self._fetch()
return self._cached_list
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A module that handles deferred execution of callables via the task queue.
Tasks consist of a callable and arguments to pass to it. The callable and its
arguments are serialized and put on the task queue, which deserializes and
executes them. The following callables can be used as tasks:
1) Functions defined in the top level of a module
2) Classes defined in the top level of a module
3) Instances of classes in (2) that implement __call__
4) Instance methods of objects of classes in (2)
5) Class methods of classes in (2)
6) Built-in functions
7) Built-in methods
The following callables can NOT be used as tasks:
1) Nested functions or closures
2) Nested classes or objects of them
3) Lambda functions
4) Static methods
The arguments to the callable, and the object (in the case of method or object
calls) must all be pickleable.
If you want your tasks to execute reliably, don't use mutable global variables;
they are not serialized with the task and may not be the same when your task
executes as they were when it was enqueued (in fact, they will almost certainly
be different).
If your app relies on manipulating the import path, make sure that the function
you are deferring is defined in a module that can be found without import path
manipulation. Alternately, you can include deferred.TaskHandler in your own
webapp application instead of using the easy-install method detailed below.
When you create a deferred task using deferred.defer, the task is serialized,
and an attempt is made to add it directly to the task queue. If the task is too
big (larger than about 10 kilobytes when serialized), a datastore entry will be
created for the task, and a new task will be enqueued, which will fetch the
original task from the datastore and execute it. This is much less efficient
than the direct execution model, so it's a good idea to minimize the size of
your tasks when possible.
In order for tasks to be processed, you need to set up the handler. Add the
following to your app.yaml handlers section:
handlers:
- url: /_ah/queue/deferred
script: $PYTHON_LIB/google/appengine/ext/deferred/handler.py
login: admin
By default, the deferred module uses the URL above, and the default queue.
Example usage:
def do_something_later(key, amount):
entity = MyModel.get(key)
entity.total += amount
entity.put()
# Use default URL and queue name, no task name, execute ASAP.
deferred.defer(do_something_later, my_key, 20)
# Providing non-default task queue arguments
deferred.defer(do_something_later, my_key, 20, _queue="foo", _countdown=60)
"""
import logging
import os
import pickle
import types
if os.environ.get("APPENGINE_RUNTIME") == "python27":
from google.appengine.api import taskqueue
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
else:
from google.appengine.api import taskqueue
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
_DEFAULT_LOG_LEVEL = logging.INFO
_TASKQUEUE_HEADERS = {"Content-Type": "application/octet-stream"}
_DEFAULT_URL = "/_ah/queue/deferred"
_DEFAULT_QUEUE = "default"
class Error(Exception):
"""Base class for exceptions in this module."""
class PermanentTaskFailure(Error):
"""Indicates that a task failed, and will never succeed."""
class SingularTaskFailure(Error):
"""Indicates that a task failed once."""
def set_log_level(log_level):
"""Sets the log level deferred will log to in normal circumstances.
Args:
log_level: one of logging log levels, e.g. logging.DEBUG, logging.INFO, etc.
"""
global _DEFAULT_LOG_LEVEL
_DEFAULT_LOG_LEVEL = log_level
def run(data):
"""Unpickles and executes a task.
Args:
data: A pickled tuple of (function, args, kwargs) to execute.
Returns:
The return value of the function invocation.
"""
try:
func, args, kwds = pickle.loads(data)
except Exception, e:
raise PermanentTaskFailure(e)
else:
return func(*args, **kwds)
class _DeferredTaskEntity(db.Model):
"""Datastore representation of a deferred task.
This is used in cases when the deferred task is too big to be included as
payload with the task queue entry.
"""
data = db.BlobProperty(required=True)
def run_from_datastore(key):
"""Retrieves a task from the datastore and executes it.
Args:
key: The datastore key of a _DeferredTaskEntity storing the task.
Returns:
The return value of the function invocation.
"""
entity = _DeferredTaskEntity.get(key)
if not entity:
raise PermanentTaskFailure()
try:
ret = run(entity.data)
entity.delete()
except PermanentTaskFailure:
entity.delete()
raise
def invoke_member(obj, membername, *args, **kwargs):
"""Retrieves a member of an object, then calls it with the provided arguments.
Args:
obj: The object to operate on.
membername: The name of the member to retrieve from ojb.
args: Positional arguments to pass to the method.
kwargs: Keyword arguments to pass to the method.
Returns:
The return value of the method invocation.
"""
return getattr(obj, membername)(*args, **kwargs)
def _curry_callable(obj, *args, **kwargs):
"""Takes a callable and arguments and returns a task queue tuple.
The returned tuple consists of (callable, args, kwargs), and can be pickled
and unpickled safely.
Args:
obj: The callable to curry. See the module docstring for restrictions.
args: Positional arguments to call the callable with.
kwargs: Keyword arguments to call the callable with.
Returns:
A tuple consisting of (callable, args, kwargs) that can be evaluated by
run() with equivalent effect of executing the function directly.
Raises:
ValueError: If the passed in object is not of a valid callable type.
"""
if isinstance(obj, types.MethodType):
return (invoke_member, (obj.im_self, obj.im_func.__name__) + args, kwargs)
elif isinstance(obj, types.BuiltinMethodType):
if not obj.__self__:
return (obj, args, kwargs)
else:
return (invoke_member, (obj.__self__, obj.__name__) + args, kwargs)
elif isinstance(obj, types.ObjectType) and hasattr(obj, "__call__"):
return (obj, args, kwargs)
elif isinstance(obj, (types.FunctionType, types.BuiltinFunctionType,
types.ClassType, types.UnboundMethodType)):
return (obj, args, kwargs)
else:
raise ValueError("obj must be callable")
def serialize(obj, *args, **kwargs):
"""Serializes a callable into a format recognized by the deferred executor.
Args:
obj: The callable to serialize. See module docstring for restrictions.
args: Positional arguments to call the callable with.
kwargs: Keyword arguments to call the callable with.
Returns:
A serialized representation of the callable.
"""
curried = _curry_callable(obj, *args, **kwargs)
return pickle.dumps(curried, protocol=pickle.HIGHEST_PROTOCOL)
def defer(obj, *args, **kwargs):
"""Defers a callable for execution later.
The default deferred URL of /_ah/queue/deferred will be used unless an
alternate URL is explicitly specified. If you want to use the default URL for
a queue, specify _url=None. If you specify a different URL, you will need to
install the handler on that URL (see the module docstring for details).
Args:
obj: The callable to execute. See module docstring for restrictions.
_countdown, _eta, _headers, _name, _target, _transactional, _url,
_retry_options, _queue: Passed through to the task queue - see the
task queue documentation for details.
args: Positional arguments to call the callable with.
kwargs: Any other keyword arguments are passed through to the callable.
Returns:
A taskqueue.Task object which represents an enqueued callable.
"""
taskargs = dict((x, kwargs.pop(("_%s" % x), None))
for x in ("countdown", "eta", "name", "target",
"retry_options"))
taskargs["url"] = kwargs.pop("_url", _DEFAULT_URL)
transactional = kwargs.pop("_transactional", False)
taskargs["headers"] = dict(_TASKQUEUE_HEADERS)
taskargs["headers"].update(kwargs.pop("_headers", {}))
queue = kwargs.pop("_queue", _DEFAULT_QUEUE)
pickled = serialize(obj, *args, **kwargs)
try:
task = taskqueue.Task(payload=pickled, **taskargs)
return task.add(queue, transactional=transactional)
except taskqueue.TaskTooLargeError:
key = _DeferredTaskEntity(data=pickled).put()
pickled = serialize(run_from_datastore, str(key))
task = taskqueue.Task(payload=pickled, **taskargs)
return task.add(queue)
class TaskHandler(webapp.RequestHandler):
"""A webapp handler class that processes deferred invocations."""
def run_from_request(self):
"""Default behavior for POST requests to deferred handler."""
if "X-AppEngine-TaskName" not in self.request.headers:
logging.error("Detected an attempted XSRF attack. The header "
'"X-AppEngine-Taskname" was not set.')
self.response.set_status(403)
return
in_prod = (
not self.request.environ.get("SERVER_SOFTWARE").startswith("Devel"))
if in_prod and self.request.environ.get("REMOTE_ADDR") != "0.1.0.2":
logging.error("Detected an attempted XSRF attack. This request did "
"not originate from Task Queue.")
self.response.set_status(403)
return
headers = ["%s:%s" % (k, v) for k, v in self.request.headers.items()
if k.lower().startswith("x-appengine-")]
logging.log(_DEFAULT_LOG_LEVEL, ", ".join(headers))
run(self.request.body)
def post(self):
try:
self.run_from_request()
except SingularTaskFailure:
logging.debug("Failure executing task, task retry forced")
self.response.set_status(408)
return
except PermanentTaskFailure, e:
logging.exception("Permanent failure attempting to execute task")
application = webapp.WSGIApplication([(".*", TaskHandler)])
def main():
if os.environ["SERVER_SOFTWARE"].startswith("Devel"):
logging.warn("You are using deferred in a deprecated fashion. Please change"
" the request handler path for /_ah/queue/deferred in app.yaml"
" to $PYTHON_LIB/google/appengine/ext/deferred/handler.py to"
" avoid encountering import errors.")
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model Analyzer.
Analyze model, including shape, params, time, memory, structure, etc.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.core.profiler import tfprof_options_pb2
from tensorflow.core.profiler import tfprof_output_pb2
from tensorflow.python import pywrap_tensorflow as print_mdl
from tensorflow.python.framework import errors
from tensorflow.python.profiler import tfprof_logger
_DEFAULT_PROFILE_OPTIONS = 0
_DEFAULT_ADVISE_OPTIONS = 0
# pylint: disable=bad-whitespace
# pylint: disable=bad-continuation
# options examples for profiling API.
#
# Show the parameter statistics of trainable variables.
TRAINABLE_VARS_PARAMS_STAT_OPTIONS = {
'max_depth': 10000,
'min_bytes': 0,
'min_micros': 0,
'min_params': 0,
'min_float_ops': 0,
'order_by': 'name',
'account_type_regexes': [tfprof_logger.TRAINABLE_VARIABLES],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': True,
'select': ['params'],
'output': 'stdout',
'dump_to_file': ''
}
# Show the number float operations.
FLOAT_OPS_OPTIONS = {
'max_depth': 10000,
'min_bytes': 0,
'min_micros': 0,
'min_params': 0,
'min_float_ops': 1,
'order_by': 'float_ops',
'account_type_regexes': ['.*'],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': True,
'select': ['float_ops'],
'output': 'stdout',
'dump_to_file': ''
}
# Show number of parameters on parameter server 0.
# It is recommended to provide`run_meta` argument
# to have complete device placement info.
PRINT_PARAMS_ON_DEVICE = {
'max_depth': 1,
'min_bytes': 0,
'min_micros': 0,
'min_params': 0,
'min_float_ops': 0,
'order_by': 'name',
'account_type_regexes': ['.*ps.*task:0.*'],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': False,
'select': ['device', 'params'],
'output': 'stdout',
'dump_to_file': ''
}
# Show the timing stats and memory demands.
PRINT_ALL_TIMING_MEMORY = {
'max_depth': 10000,
'min_bytes': 1, # Only >=1
'min_micros': 1, # Only >=1
'min_params': 0,
'min_float_ops': 0,
'order_by': 'name',
'account_type_regexes': ['.*'],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': True,
'select': ['micros', 'bytes'],
'output': 'stdout',
'dump_to_file': ''
}
# The following options are for 'advise' cmd.
# Show all advice.
ALL_ADVICE = {
'ExpensiveOperationChecker': {},
'AcceleratorUtilizationChecker': {},
'JobChecker': {}, # Only available internally.
'OperationChecker': {},
}
# pylint: enable=bad-whitespace
# pylint: enable=bad-continuation
def _build_options(options):
"""Build tfprof.OptionsProto.
Args:
options: A dictionary of options.
Returns:
tfprof.OptionsProto.
"""
opts = tfprof_options_pb2.OptionsProto()
opts.max_depth = options.get('max_depth', 10)
opts.min_bytes = options.get('min_bytes', 0)
opts.min_micros = options.get('min_micros', 0)
opts.min_params = options.get('min_params', 0)
opts.min_float_ops = options.get('min_float_ops', 0)
opts.min_occurrence = options.get('min_occurrence', 0)
opts.step = options.get('step', -1)
opts.order_by = options.get('order_by', 'name')
for p in options.get('account_type_regexes', []):
opts.account_type_regexes.append(p)
for p in options.get('start_name_regexes', []):
opts.start_name_regexes.append(p)
for p in options.get('trim_name_regexes', []):
opts.trim_name_regexes.append(p)
for p in options.get('show_name_regexes', []):
opts.show_name_regexes.append(p)
for p in options.get('hide_name_regexes', []):
opts.hide_name_regexes.append(p)
opts.account_displayed_op_only = options.get('account_displayed_op_only',
False)
for p in options.get('select', []):
opts.select.append(p)
opts.output = options.get('output', 'stdout')
opts.dump_to_file = options.get('dump_to_file', '')
return opts
def _build_advisor_options(options):
"""Build tfprof.AdvisorOptionsProto.
Args:
options: A dictionary of options. See ALL_ADVICE example.
Returns:
tfprof.AdvisorOptionsProto.
"""
opts = tfprof_options_pb2.AdvisorOptionsProto()
if options is None:
return opts
for checker, checker_opts in six.iteritems(options):
checker_ops_pb = tfprof_options_pb2.AdvisorOptionsProto.CheckerOption()
for k, v in six.iteritems(checker_opts):
checker_ops_pb[k] = v
opts.checkers[checker].MergeFrom(checker_ops_pb)
return opts
class Profiler(object):
"""TensorFlow multi-step profiler.
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/README.md
Typical use case:
# Currently we are only allowed to create 1 profiler per process.
profiler = Profile(sess.graph)
for i in xrange(total_steps):
if i % 10000 == 0:
run_meta = tf.RunMetadata()
_ = sess.run(...,
options=tf.RunOptions(
trace_level=tf.RunOptions.FULL_TRACE),
run_metadata=run_meta)
profiler.add_step(i, run_meta)
# Profile the parameters of your model.
profiler.profile_name_scope(options=TRAINABLE_VARS_PARAMS_STAT_OPTIONS)
# Or profile the timing of your model operations.
opts = PRINT_ALL_TIMING_MEMORY.copy()
opts['order_by'] = 'micros'
opts['select'] = ['micros', 'occurrence']
opts['max_depth'] = 20
profiler.profile_operations(options=opts)
# Or you can generate a timeline:
opts = PRINT_ALL_TIMING_MEMORY.copy()
opts['output'] = 'timeline:outfile=' + filename
opts['step'] = i
profiler.profile_graph(options=opts)
else:
_ = sess.run(...)
# Auto detect problems and generate advice.
profiler.advise(model_analyzer.ALL_ADVICE)
"""
def __init__(self, graph, op_log=None):
"""Constructor.
Args:
graph: tf.Graph.
op_log: optional. tensorflow::tfprof::OpLog proto. Used to define
extra op types.
"""
self._graph = graph
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
self._graph, op_log=op_log)
# pylint: enable=protected-access
print_mdl.NewProfiler(
self._graph.as_graph_def(add_shapes=True).SerializeToString(),
op_log.SerializeToString())
def __del__(self):
print_mdl.DeleteProfiler()
def add_step(self, step, run_meta):
"""Add statistics of a step.
Args:
step: A step uint64 used to identify the RunMetadata. Must be different
across different AddStep() calls.
run_meta: RunMetadata proto that contains statistics of a session run.
"""
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
self._graph, run_meta=run_meta, add_trace=False,
add_trainable_var=False)
# pylint: enable=protected-access
print_mdl.AddStep(
step, run_meta.SerializeToString(), op_log.SerializeToString())
def profile_python(self, options):
"""Profile the statistics of the Python codes.
By default, it shows the call stack from root. To avoid
redundant output, you may use options to filter as below
options['show_name_regexes'] = ['.*my_code.py.*']
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a TFMultiGraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.TFMultiGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('code'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def profile_operations(self, options):
"""Profile the statistics of the Operation types (e.g. MatMul, Conv2D).
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a TFMultiGraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.TFMultiGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('op'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def profile_name_scope(self, options):
"""Profile the statistics of graph nodes, organized by name scope.
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a TFGraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.TFGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('scope'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def profile_graph(self, options):
"""Profile the statistics of graph nodes, organized by dataflow graph.
Args:
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
a TFGraphNodeProto that records the results.
"""
opts = _build_options(options)
tfprof_node = tfprof_output_pb2.TFGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.Profile('graph'.encode('utf-8'), opts.SerializeToString()))
return tfprof_node
def advise(self, options):
"""Automatically detect problems and generate reports.
Args:
options: A dict of options. See ALL_ADVICE example above.
Returns:
A Advise proto that conains the reports from all checkers.
"""
advise_pb = tfprof_output_pb2.AdviceProto()
opts = _build_advisor_options(options)
advise_pb.ParseFromString(
print_mdl.Profile('advise'.encode('utf-8'), opts.SerializeToString()))
return advise_pb
def profile(graph,
run_meta=None,
op_log=None,
cmd='scope',
options=_DEFAULT_PROFILE_OPTIONS):
"""Print model statistics.
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/README.md
Args:
graph: tf.Graph.
run_meta: tensorflow::RunMetadata proto. When provided, also shows valid
timing and memory information when 'select' option contains
'micros' and 'bytes'.
op_log: tensorflow::tfprof::OpLog proto. users can use this proto to
group together ops and use a op_type to select the group.
cmd: string. Either 'op', 'scope', 'graph', 'code'.
'op' view organize outputs using operation type. (e.g. MatMul)
'scope' view organize outputs using graph node name scope.
'graph' view organize outputs using graph node inputs/outputs.
'code' view organize outputs using Python call stack.
options: A dict of options. See core/profiler/g3doc/options.md.
Returns:
If cmd is 'scope' or 'graph', returns TFGraphNodeProto proto.
If cmd is 'op' or 'code', returns TFMultiGraphNodeProto proto.
Side effect: stdout/file/timeline.json depending on options['output']
"""
if options == _DEFAULT_PROFILE_OPTIONS:
options = TRAINABLE_VARS_PARAMS_STAT_OPTIONS.copy()
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
graph, op_log, run_meta, add_trace=cmd == 'code')
# pylint: enable=protected-access
opts = _build_options(options)
run_meta_str = run_meta.SerializeToString() if run_meta else b''
if cmd == 'code' or cmd == 'op':
tfprof_node = tfprof_output_pb2.TFMultiGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.PrintModelAnalysis(
graph.as_graph_def(add_shapes=True).SerializeToString(),
run_meta_str,
op_log.SerializeToString(),
cmd.encode('utf-8'),
opts.SerializeToString()))
elif cmd == 'graph' or cmd == 'scope':
tfprof_node = tfprof_output_pb2.TFGraphNodeProto()
tfprof_node.ParseFromString(
print_mdl.PrintModelAnalysis(
graph.as_graph_def(add_shapes=True).SerializeToString(),
run_meta_str,
op_log.SerializeToString(),
cmd.encode('utf-8'),
opts.SerializeToString()))
else:
raise errors.InvalidArgumentError(
None, None, 'unknown cmd: %s\n' % cmd)
return tfprof_node
def advise(graph, run_meta=None, options=_DEFAULT_ADVISE_OPTIONS):
"""Auto profile and advise.
Builds profiles and automatically check anormalies of various
aspects. For more details:
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/README.md
Args:
graph: tf.Graph.
run_meta: tensorflow::RunMetadata proto. Allows auto-profile
time and memroy.
options: see ALL_ADVICE example above.
Returns:
Returns AdviceProto proto
"""
if options == _DEFAULT_ADVISE_OPTIONS:
options = ALL_ADVICE.copy()
# pylint: disable=protected-access
op_log = tfprof_logger._merge_default_with_oplog(
graph, None, run_meta, add_trace=True)
# pylint: enable=protected-access
run_meta_str = run_meta.SerializeToString() if run_meta else b''
opts = _build_advisor_options(options)
ret = tfprof_output_pb2.AdviceProto()
ret.ParseFromString(
print_mdl.PrintModelAnalysis(
graph.as_graph_def(add_shapes=True).SerializeToString(),
run_meta_str,
op_log.SerializeToString(),
'advise'.encode('utf-8'),
opts.SerializeToString()))
return ret
|
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
from contextlib import closing
import copy
import functools
import sys
import threading
import datetime
from io import BytesIO
from tornado.escape import utf8
from tornado import gen
from tornado.httpclient import HTTPRequest, HTTPResponse, _RequestProxy, HTTPError, HTTPClient
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream
from tornado.log import gen_log
from tornado import netutil
from tornado.stack_context import ExceptionStackContext, NullContext
from tornado.testing import AsyncHTTPTestCase, bind_unused_port, gen_test, ExpectLog
from tornado.test.util import unittest, skipOnTravis
from tornado.util import u
from tornado.web import Application, RequestHandler, url
from tornado.httputil import format_timestamp, HTTPHeaders
class HelloWorldHandler(RequestHandler):
def get(self):
name = self.get_argument("name", "world")
self.set_header("Content-Type", "text/plain")
self.finish("Hello %s!" % name)
class PostHandler(RequestHandler):
def post(self):
self.finish("Post arg1: %s, arg2: %s" % (
self.get_argument("arg1"), self.get_argument("arg2")))
class PutHandler(RequestHandler):
def put(self):
self.write("Put body: ")
self.write(self.request.body)
class RedirectHandler(RequestHandler):
def prepare(self):
self.write('redirects can have bodies too')
self.redirect(self.get_argument("url"),
status=int(self.get_argument("status", "302")))
class ChunkHandler(RequestHandler):
@gen.coroutine
def get(self):
self.write("asdf")
self.flush()
# Wait a bit to ensure the chunks are sent and received separately.
yield gen.sleep(0.01)
self.write("qwer")
class AuthHandler(RequestHandler):
def get(self):
self.finish(self.request.headers["Authorization"])
class CountdownHandler(RequestHandler):
def get(self, count):
count = int(count)
if count > 0:
self.redirect(self.reverse_url("countdown", count - 1))
else:
self.write("Zero")
class EchoPostHandler(RequestHandler):
def post(self):
self.write(self.request.body)
class UserAgentHandler(RequestHandler):
def get(self):
self.write(self.request.headers.get('User-Agent', 'User agent not set'))
class ContentLength304Handler(RequestHandler):
def get(self):
self.set_status(304)
self.set_header('Content-Length', 42)
def _clear_headers_for_304(self):
# Tornado strips content-length from 304 responses, but here we
# want to simulate servers that include the headers anyway.
pass
class PatchHandler(RequestHandler):
def patch(self):
"Return the request payload - so we can check it is being kept"
self.write(self.request.body)
class AllMethodsHandler(RequestHandler):
SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',)
def method(self):
self.write(self.request.method)
get = post = put = delete = options = patch = other = method
# These tests end up getting run redundantly: once here with the default
# HTTPClient implementation, and then again in each implementation's own
# test suite.
class HTTPClientCommonTestCase(AsyncHTTPTestCase):
def get_app(self):
return Application([
url("/hello", HelloWorldHandler),
url("/post", PostHandler),
url("/put", PutHandler),
url("/redirect", RedirectHandler),
url("/chunk", ChunkHandler),
url("/auth", AuthHandler),
url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
url("/echopost", EchoPostHandler),
url("/user_agent", UserAgentHandler),
url("/304_with_content_length", ContentLength304Handler),
url("/all_methods", AllMethodsHandler),
url('/patch', PatchHandler),
], gzip=True)
def test_patch_receives_payload(self):
body = b"some patch data"
response = self.fetch("/patch", method='PATCH', body=body)
self.assertEqual(response.code, 200)
self.assertEqual(response.body, body)
@skipOnTravis
def test_hello_world(self):
response = self.fetch("/hello")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["Content-Type"], "text/plain")
self.assertEqual(response.body, b"Hello world!")
self.assertEqual(int(response.request_time), 0)
response = self.fetch("/hello?name=Ben")
self.assertEqual(response.body, b"Hello Ben!")
def test_streaming_callback(self):
# streaming_callback is also tested in test_chunked
chunks = []
response = self.fetch("/hello",
streaming_callback=chunks.append)
# with streaming_callback, data goes to the callback and not response.body
self.assertEqual(chunks, [b"Hello world!"])
self.assertFalse(response.body)
def test_post(self):
response = self.fetch("/post", method="POST",
body="arg1=foo&arg2=bar")
self.assertEqual(response.code, 200)
self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
def test_chunked(self):
response = self.fetch("/chunk")
self.assertEqual(response.body, b"asdfqwer")
chunks = []
response = self.fetch("/chunk",
streaming_callback=chunks.append)
self.assertEqual(chunks, [b"asdf", b"qwer"])
self.assertFalse(response.body)
def test_chunked_close(self):
# test case in which chunks spread read-callback processing
# over several ioloop iterations, but the connection is already closed.
sock, port = bind_unused_port()
with closing(sock):
def write_response(stream, request_data):
if b"HTTP/1." not in request_data:
self.skipTest("requires HTTP/1.x")
stream.write(b"""\
HTTP/1.1 200 OK
Transfer-Encoding: chunked
1
1
1
2
0
""".replace(b"\n", b"\r\n"), callback=stream.close)
def accept_callback(conn, address):
# fake an HTTP server using chunked encoding where the final chunks
# and connection close all happen at once
stream = IOStream(conn, io_loop=self.io_loop)
stream.read_until(b"\r\n\r\n",
functools.partial(write_response, stream))
netutil.add_accept_handler(sock, accept_callback, self.io_loop)
self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
resp = self.wait()
resp.rethrow()
self.assertEqual(resp.body, b"12")
self.io_loop.remove_handler(sock.fileno())
def test_streaming_stack_context(self):
chunks = []
exc_info = []
def error_handler(typ, value, tb):
exc_info.append((typ, value, tb))
return True
def streaming_cb(chunk):
chunks.append(chunk)
if chunk == b'qwer':
1 / 0
with ExceptionStackContext(error_handler):
self.fetch('/chunk', streaming_callback=streaming_cb)
self.assertEqual(chunks, [b'asdf', b'qwer'])
self.assertEqual(1, len(exc_info))
self.assertIs(exc_info[0][0], ZeroDivisionError)
def test_basic_auth(self):
self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
auth_password="open sesame").body,
b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
def test_basic_auth_explicit_mode(self):
self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
auth_password="open sesame",
auth_mode="basic").body,
b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
def test_unsupported_auth_mode(self):
# curl and simple clients handle errors a bit differently; the
# important thing is that they don't fall back to basic auth
# on an unknown mode.
with ExpectLog(gen_log, "uncaught exception", required=False):
with self.assertRaises((ValueError, HTTPError)):
response = self.fetch("/auth", auth_username="Aladdin",
auth_password="open sesame",
auth_mode="asdf")
response.rethrow()
def test_follow_redirect(self):
response = self.fetch("/countdown/2", follow_redirects=False)
self.assertEqual(302, response.code)
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
response = self.fetch("/countdown/2")
self.assertEqual(200, response.code)
self.assertTrue(response.effective_url.endswith("/countdown/0"))
self.assertEqual(b"Zero", response.body)
def test_credentials_in_url(self):
url = self.get_url("/auth").replace("http://", "http://me:secret@")
self.http_client.fetch(url, self.stop)
response = self.wait()
self.assertEqual(b"Basic " + base64.b64encode(b"me:secret"),
response.body)
def test_body_encoding(self):
unicode_body = u("\xe9")
byte_body = binascii.a2b_hex(b"e9")
# unicode string in body gets converted to utf8
response = self.fetch("/echopost", method="POST", body=unicode_body,
headers={"Content-Type": "application/blah"})
self.assertEqual(response.headers["Content-Length"], "2")
self.assertEqual(response.body, utf8(unicode_body))
# byte strings pass through directly
response = self.fetch("/echopost", method="POST",
body=byte_body,
headers={"Content-Type": "application/blah"})
self.assertEqual(response.headers["Content-Length"], "1")
self.assertEqual(response.body, byte_body)
# Mixing unicode in headers and byte string bodies shouldn't
# break anything
response = self.fetch("/echopost", method="POST", body=byte_body,
headers={"Content-Type": "application/blah"},
user_agent=u("foo"))
self.assertEqual(response.headers["Content-Length"], "1")
self.assertEqual(response.body, byte_body)
def test_types(self):
response = self.fetch("/hello")
self.assertEqual(type(response.body), bytes)
self.assertEqual(type(response.headers["Content-Type"]), str)
self.assertEqual(type(response.code), int)
self.assertEqual(type(response.effective_url), str)
def test_header_callback(self):
first_line = []
headers = {}
chunks = []
def header_callback(header_line):
if header_line.startswith('HTTP/1.1 101'):
# Upgrading to HTTP/2
pass
elif header_line.startswith('HTTP/'):
first_line.append(header_line)
elif header_line != '\r\n':
k, v = header_line.split(':', 1)
headers[k.lower()] = v.strip()
def streaming_callback(chunk):
# All header callbacks are run before any streaming callbacks,
# so the header data is available to process the data as it
# comes in.
self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
chunks.append(chunk)
self.fetch('/chunk', header_callback=header_callback,
streaming_callback=streaming_callback)
self.assertEqual(len(first_line), 1, first_line)
self.assertRegexpMatches(first_line[0], 'HTTP/[0-9]\\.[0-9] 200.*\r\n')
self.assertEqual(chunks, [b'asdf', b'qwer'])
def test_header_callback_stack_context(self):
exc_info = []
def error_handler(typ, value, tb):
exc_info.append((typ, value, tb))
return True
def header_callback(header_line):
if header_line.lower().startswith('content-type:'):
1 / 0
with ExceptionStackContext(error_handler):
self.fetch('/chunk', header_callback=header_callback)
self.assertEqual(len(exc_info), 1)
self.assertIs(exc_info[0][0], ZeroDivisionError)
def test_configure_defaults(self):
defaults = dict(user_agent='TestDefaultUserAgent', allow_ipv6=False)
# Construct a new instance of the configured client class
client = self.http_client.__class__(self.io_loop, force_instance=True,
defaults=defaults)
try:
client.fetch(self.get_url('/user_agent'), callback=self.stop)
response = self.wait()
self.assertEqual(response.body, b'TestDefaultUserAgent')
finally:
client.close()
def test_header_types(self):
# Header values may be passed as character or utf8 byte strings,
# in a plain dictionary or an HTTPHeaders object.
# Keys must always be the native str type.
# All combinations should have the same results on the wire.
for value in [u("MyUserAgent"), b"MyUserAgent"]:
for container in [dict, HTTPHeaders]:
headers = container()
headers['User-Agent'] = value
resp = self.fetch('/user_agent', headers=headers)
self.assertEqual(
resp.body, b"MyUserAgent",
"response=%r, value=%r, container=%r" %
(resp.body, value, container))
def test_multi_line_headers(self):
# Multi-line http headers are rare but rfc-allowed
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2
sock, port = bind_unused_port()
with closing(sock):
def write_response(stream, request_data):
stream.write(b"""\
HTTP/1.1 200 OK
X-XSS-Protection: 1;
\tmode=block
""".replace(b"\n", b"\r\n"), callback=stream.close)
def accept_callback(conn, address):
stream = IOStream(conn, io_loop=self.io_loop)
stream.read_until(b"\r\n\r\n",
functools.partial(write_response, stream))
netutil.add_accept_handler(sock, accept_callback, self.io_loop)
self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
resp = self.wait()
resp.rethrow()
self.assertEqual(resp.headers['X-XSS-Protection'], "1; mode=block")
self.io_loop.remove_handler(sock.fileno())
def test_304_with_content_length(self):
# According to the spec 304 responses SHOULD NOT include
# Content-Length or other entity headers, but some servers do it
# anyway.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
response = self.fetch('/304_with_content_length')
self.assertEqual(response.code, 304)
self.assertEqual(response.headers['Content-Length'], '42')
def test_final_callback_stack_context(self):
# The final callback should be run outside of the httpclient's
# stack_context. We want to ensure that there is not stack_context
# between the user's callback and the IOLoop, so monkey-patch
# IOLoop.handle_callback_exception and disable the test harness's
# context with a NullContext.
# Note that this does not apply to secondary callbacks (header
# and streaming_callback), as errors there must be seen as errors
# by the http client so it can clean up the connection.
exc_info = []
def handle_callback_exception(callback):
exc_info.append(sys.exc_info())
self.stop()
self.io_loop.handle_callback_exception = handle_callback_exception
with NullContext():
self.http_client.fetch(self.get_url('/hello'),
lambda response: 1 / 0)
self.wait()
self.assertEqual(exc_info[0][0], ZeroDivisionError)
@gen_test
def test_future_interface(self):
response = yield self.http_client.fetch(self.get_url('/hello'))
self.assertEqual(response.body, b'Hello world!')
@gen_test
def test_future_http_error(self):
with self.assertRaises(HTTPError) as context:
yield self.http_client.fetch(self.get_url('/notfound'))
self.assertEqual(context.exception.code, 404)
self.assertEqual(context.exception.response.code, 404)
@gen_test
def test_future_http_error_no_raise(self):
response = yield self.http_client.fetch(self.get_url('/notfound'), raise_error=False)
self.assertEqual(response.code, 404)
@gen_test
def test_reuse_request_from_response(self):
# The response.request attribute should be an HTTPRequest, not
# a _RequestProxy.
# This test uses self.http_client.fetch because self.fetch calls
# self.get_url on the input unconditionally.
url = self.get_url('/hello')
response = yield self.http_client.fetch(url)
self.assertEqual(response.request.url, url)
self.assertTrue(isinstance(response.request, HTTPRequest))
response2 = yield self.http_client.fetch(response.request)
self.assertEqual(response2.body, b'Hello world!')
def test_all_methods(self):
for method in ['GET', 'DELETE', 'OPTIONS']:
response = self.fetch('/all_methods', method=method)
self.assertEqual(response.body, utf8(method))
for method in ['POST', 'PUT', 'PATCH']:
response = self.fetch('/all_methods', method=method, body=b'')
self.assertEqual(response.body, utf8(method))
response = self.fetch('/all_methods', method='HEAD')
self.assertEqual(response.body, b'')
response = self.fetch('/all_methods', method='OTHER',
allow_nonstandard_methods=True)
self.assertEqual(response.body, b'OTHER')
@gen_test
def test_body_sanity_checks(self):
hello_url = self.get_url('/hello')
with self.assertRaises(ValueError) as context:
yield self.http_client.fetch(hello_url, body='data')
self.assertTrue('must be None' in str(context.exception))
with self.assertRaises(ValueError) as context:
yield self.http_client.fetch(hello_url, method='POST')
self.assertTrue('must not be None' in str(context.exception))
# This test causes odd failures with the combination of
# curl_httpclient (at least with the version of libcurl available
# on ubuntu 12.04), TwistedIOLoop, and epoll. For POST (but not PUT),
# curl decides the response came back too soon and closes the connection
# to start again. It does this *before* telling the socket callback to
# unregister the FD. Some IOLoop implementations have special kernel
# integration to discover this immediately. Tornado's IOLoops
# ignore errors on remove_handler to accommodate this behavior, but
# Twisted's reactor does not. The removeReader call fails and so
# do all future removeAll calls (which our tests do at cleanup).
#
# def test_post_307(self):
# response = self.fetch("/redirect?status=307&url=/post",
# method="POST", body=b"arg1=foo&arg2=bar")
# self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
def test_put_307(self):
response = self.fetch("/redirect?status=307&url=/put",
method="PUT", body=b"hello")
response.rethrow()
self.assertEqual(response.body, b"Put body: hello")
class RequestProxyTest(unittest.TestCase):
def test_request_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/',
user_agent='foo'),
dict())
self.assertEqual(proxy.user_agent, 'foo')
def test_default_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
dict(network_interface='foo'))
self.assertEqual(proxy.network_interface, 'foo')
def test_both_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/',
proxy_host='foo'),
dict(proxy_host='bar'))
self.assertEqual(proxy.proxy_host, 'foo')
def test_neither_set(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
dict())
self.assertIs(proxy.auth_username, None)
def test_bad_attribute(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
dict())
with self.assertRaises(AttributeError):
proxy.foo
def test_defaults_none(self):
proxy = _RequestProxy(HTTPRequest('http://example.com/'), None)
self.assertIs(proxy.auth_username, None)
class HTTPResponseTestCase(unittest.TestCase):
def test_str(self):
response = HTTPResponse(HTTPRequest('http://example.com'),
200, headers={}, buffer=BytesIO())
s = str(response)
self.assertTrue(s.startswith('HTTPResponse('))
self.assertIn('code=200', s)
class SyncHTTPClientTest(unittest.TestCase):
def setUp(self):
if IOLoop.configured_class().__name__ in ('TwistedIOLoop',
'AsyncIOMainLoop'):
# TwistedIOLoop only supports the global reactor, so we can't have
# separate IOLoops for client and server threads.
# AsyncIOMainLoop doesn't work with the default policy
# (although it could with some tweaks to this test and a
# policy that created loops for non-main threads).
raise unittest.SkipTest(
'Sync HTTPClient not compatible with TwistedIOLoop or '
'AsyncIOMainLoop')
self.server_ioloop = IOLoop()
sock, self.port = bind_unused_port()
app = Application([('/', HelloWorldHandler)])
self.server = HTTPServer(app, io_loop=self.server_ioloop)
self.server.add_socket(sock)
self.server_thread = threading.Thread(target=self.server_ioloop.start)
self.server_thread.start()
self.http_client = HTTPClient()
def tearDown(self):
def stop_server():
self.server.stop()
# Delay the shutdown of the IOLoop by one iteration because
# the server may still have some cleanup work left when
# the client finishes with the response (this is noticable
# with http/2, which leaves a Future with an unexamined
# StreamClosedError on the loop).
self.server_ioloop.add_callback(self.server_ioloop.stop)
self.server_ioloop.add_callback(stop_server)
self.server_thread.join()
self.http_client.close()
self.server_ioloop.close(all_fds=True)
def get_url(self, path):
return 'http://127.0.0.1:%d%s' % (self.port, path)
def test_sync_client(self):
response = self.http_client.fetch(self.get_url('/'))
self.assertEqual(b'Hello world!', response.body)
def test_sync_client_error(self):
# Synchronous HTTPClient raises errors directly; no need for
# response.rethrow()
with self.assertRaises(HTTPError) as assertion:
self.http_client.fetch(self.get_url('/notfound'))
self.assertEqual(assertion.exception.code, 404)
class HTTPRequestTestCase(unittest.TestCase):
def test_headers(self):
request = HTTPRequest('http://example.com', headers={'foo': 'bar'})
self.assertEqual(request.headers, {'foo': 'bar'})
def test_headers_setter(self):
request = HTTPRequest('http://example.com')
request.headers = {'bar': 'baz'}
self.assertEqual(request.headers, {'bar': 'baz'})
def test_null_headers_setter(self):
request = HTTPRequest('http://example.com')
request.headers = None
self.assertEqual(request.headers, {})
def test_body(self):
request = HTTPRequest('http://example.com', body='foo')
self.assertEqual(request.body, utf8('foo'))
def test_body_setter(self):
request = HTTPRequest('http://example.com')
request.body = 'foo'
self.assertEqual(request.body, utf8('foo'))
def test_if_modified_since(self):
http_date = datetime.datetime.utcnow()
request = HTTPRequest('http://example.com', if_modified_since=http_date)
self.assertEqual(request.headers,
{'If-Modified-Since': format_timestamp(http_date)})
class HTTPErrorTestCase(unittest.TestCase):
def test_copy(self):
e = HTTPError(403)
e2 = copy.copy(e)
self.assertIsNot(e, e2)
self.assertEqual(e.code, e2.code)
def test_str(self):
e = HTTPError(403)
self.assertEqual(str(e), "HTTP 403: Forbidden")
|
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Lighting(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "mesh3d"
_path_str = "mesh3d.lighting"
_valid_props = {
"ambient",
"diffuse",
"facenormalsepsilon",
"fresnel",
"roughness",
"specular",
"vertexnormalsepsilon",
}
# ambient
# -------
@property
def ambient(self):
"""
Ambient light increases overall color visibility but can wash
out the image.
The 'ambient' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["ambient"]
@ambient.setter
def ambient(self, val):
self["ambient"] = val
# diffuse
# -------
@property
def diffuse(self):
"""
Represents the extent that incident rays are reflected in a
range of angles.
The 'diffuse' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["diffuse"]
@diffuse.setter
def diffuse(self, val):
self["diffuse"] = val
# facenormalsepsilon
# ------------------
@property
def facenormalsepsilon(self):
"""
Epsilon for face normals calculation avoids math issues arising
from degenerate geometry.
The 'facenormalsepsilon' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["facenormalsepsilon"]
@facenormalsepsilon.setter
def facenormalsepsilon(self, val):
self["facenormalsepsilon"] = val
# fresnel
# -------
@property
def fresnel(self):
"""
Represents the reflectance as a dependency of the viewing
angle; e.g. paper is reflective when viewing it from the edge
of the paper (almost 90 degrees), causing shine.
The 'fresnel' property is a number and may be specified as:
- An int or float in the interval [0, 5]
Returns
-------
int|float
"""
return self["fresnel"]
@fresnel.setter
def fresnel(self, val):
self["fresnel"] = val
# roughness
# ---------
@property
def roughness(self):
"""
Alters specular reflection; the rougher the surface, the wider
and less contrasty the shine.
The 'roughness' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["roughness"]
@roughness.setter
def roughness(self, val):
self["roughness"] = val
# specular
# --------
@property
def specular(self):
"""
Represents the level that incident rays are reflected in a
single direction, causing shine.
The 'specular' property is a number and may be specified as:
- An int or float in the interval [0, 2]
Returns
-------
int|float
"""
return self["specular"]
@specular.setter
def specular(self, val):
self["specular"] = val
# vertexnormalsepsilon
# --------------------
@property
def vertexnormalsepsilon(self):
"""
Epsilon for vertex normals calculation avoids math issues
arising from degenerate geometry.
The 'vertexnormalsepsilon' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["vertexnormalsepsilon"]
@vertexnormalsepsilon.setter
def vertexnormalsepsilon(self, val):
self["vertexnormalsepsilon"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
ambient
Ambient light increases overall color visibility but
can wash out the image.
diffuse
Represents the extent that incident rays are reflected
in a range of angles.
facenormalsepsilon
Epsilon for face normals calculation avoids math issues
arising from degenerate geometry.
fresnel
Represents the reflectance as a dependency of the
viewing angle; e.g. paper is reflective when viewing it
from the edge of the paper (almost 90 degrees), causing
shine.
roughness
Alters specular reflection; the rougher the surface,
the wider and less contrasty the shine.
specular
Represents the level that incident rays are reflected
in a single direction, causing shine.
vertexnormalsepsilon
Epsilon for vertex normals calculation avoids math
issues arising from degenerate geometry.
"""
def __init__(
self,
arg=None,
ambient=None,
diffuse=None,
facenormalsepsilon=None,
fresnel=None,
roughness=None,
specular=None,
vertexnormalsepsilon=None,
**kwargs
):
"""
Construct a new Lighting object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.mesh3d.Lighting`
ambient
Ambient light increases overall color visibility but
can wash out the image.
diffuse
Represents the extent that incident rays are reflected
in a range of angles.
facenormalsepsilon
Epsilon for face normals calculation avoids math issues
arising from degenerate geometry.
fresnel
Represents the reflectance as a dependency of the
viewing angle; e.g. paper is reflective when viewing it
from the edge of the paper (almost 90 degrees), causing
shine.
roughness
Alters specular reflection; the rougher the surface,
the wider and less contrasty the shine.
specular
Represents the level that incident rays are reflected
in a single direction, causing shine.
vertexnormalsepsilon
Epsilon for vertex normals calculation avoids math
issues arising from degenerate geometry.
Returns
-------
Lighting
"""
super(Lighting, self).__init__("lighting")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.mesh3d.Lighting
constructor must be a dict or
an instance of :class:`plotly.graph_objs.mesh3d.Lighting`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("ambient", None)
_v = ambient if ambient is not None else _v
if _v is not None:
self["ambient"] = _v
_v = arg.pop("diffuse", None)
_v = diffuse if diffuse is not None else _v
if _v is not None:
self["diffuse"] = _v
_v = arg.pop("facenormalsepsilon", None)
_v = facenormalsepsilon if facenormalsepsilon is not None else _v
if _v is not None:
self["facenormalsepsilon"] = _v
_v = arg.pop("fresnel", None)
_v = fresnel if fresnel is not None else _v
if _v is not None:
self["fresnel"] = _v
_v = arg.pop("roughness", None)
_v = roughness if roughness is not None else _v
if _v is not None:
self["roughness"] = _v
_v = arg.pop("specular", None)
_v = specular if specular is not None else _v
if _v is not None:
self["specular"] = _v
_v = arg.pop("vertexnormalsepsilon", None)
_v = vertexnormalsepsilon if vertexnormalsepsilon is not None else _v
if _v is not None:
self["vertexnormalsepsilon"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/vision_v1p2beta1/proto/geometry.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/vision_v1p2beta1/proto/geometry.proto",
package="google.cloud.vision.v1p2beta1",
syntax="proto3",
serialized_pb=_b(
'\n2google/cloud/vision_v1p2beta1/proto/geometry.proto\x12\x1dgoogle.cloud.vision.v1p2beta1"\x1e\n\x06Vertex\x12\t\n\x01x\x18\x01 \x01(\x05\x12\t\n\x01y\x18\x02 \x01(\x05"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"\x95\x01\n\x0c\x42oundingPoly\x12\x37\n\x08vertices\x18\x01 \x03(\x0b\x32%.google.cloud.vision.v1p2beta1.Vertex\x12L\n\x13normalized_vertices\x18\x02 \x03(\x0b\x32/.google.cloud.vision.v1p2beta1.NormalizedVertex"+\n\x08Position\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\x42|\n!com.google.cloud.vision.v1p2beta1B\rGeometryProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p2beta1;vision\xf8\x01\x01\x62\x06proto3'
),
)
_VERTEX = _descriptor.Descriptor(
name="Vertex",
full_name="google.cloud.vision.v1p2beta1.Vertex",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="x",
full_name="google.cloud.vision.v1p2beta1.Vertex.x",
index=0,
number=1,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="y",
full_name="google.cloud.vision.v1p2beta1.Vertex.y",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=85,
serialized_end=115,
)
_NORMALIZEDVERTEX = _descriptor.Descriptor(
name="NormalizedVertex",
full_name="google.cloud.vision.v1p2beta1.NormalizedVertex",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="x",
full_name="google.cloud.vision.v1p2beta1.NormalizedVertex.x",
index=0,
number=1,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="y",
full_name="google.cloud.vision.v1p2beta1.NormalizedVertex.y",
index=1,
number=2,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=117,
serialized_end=157,
)
_BOUNDINGPOLY = _descriptor.Descriptor(
name="BoundingPoly",
full_name="google.cloud.vision.v1p2beta1.BoundingPoly",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="vertices",
full_name="google.cloud.vision.v1p2beta1.BoundingPoly.vertices",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="normalized_vertices",
full_name="google.cloud.vision.v1p2beta1.BoundingPoly.normalized_vertices",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=160,
serialized_end=309,
)
_POSITION = _descriptor.Descriptor(
name="Position",
full_name="google.cloud.vision.v1p2beta1.Position",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="x",
full_name="google.cloud.vision.v1p2beta1.Position.x",
index=0,
number=1,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="y",
full_name="google.cloud.vision.v1p2beta1.Position.y",
index=1,
number=2,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="z",
full_name="google.cloud.vision.v1p2beta1.Position.z",
index=2,
number=3,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=311,
serialized_end=354,
)
_BOUNDINGPOLY.fields_by_name["vertices"].message_type = _VERTEX
_BOUNDINGPOLY.fields_by_name["normalized_vertices"].message_type = _NORMALIZEDVERTEX
DESCRIPTOR.message_types_by_name["Vertex"] = _VERTEX
DESCRIPTOR.message_types_by_name["NormalizedVertex"] = _NORMALIZEDVERTEX
DESCRIPTOR.message_types_by_name["BoundingPoly"] = _BOUNDINGPOLY
DESCRIPTOR.message_types_by_name["Position"] = _POSITION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Vertex = _reflection.GeneratedProtocolMessageType(
"Vertex",
(_message.Message,),
dict(
DESCRIPTOR=_VERTEX,
__module__="google.cloud.vision_v1p2beta1.proto.geometry_pb2",
__doc__="""X coordinate.
Attributes:
y:
Y coordinate.
""",
# @@protoc_insertion_point(class_scope:google.cloud.vision.v1p2beta1.Vertex)
),
)
_sym_db.RegisterMessage(Vertex)
NormalizedVertex = _reflection.GeneratedProtocolMessageType(
"NormalizedVertex",
(_message.Message,),
dict(
DESCRIPTOR=_NORMALIZEDVERTEX,
__module__="google.cloud.vision_v1p2beta1.proto.geometry_pb2",
__doc__="""X coordinate.
Attributes:
y:
Y coordinate.
""",
# @@protoc_insertion_point(class_scope:google.cloud.vision.v1p2beta1.NormalizedVertex)
),
)
_sym_db.RegisterMessage(NormalizedVertex)
BoundingPoly = _reflection.GeneratedProtocolMessageType(
"BoundingPoly",
(_message.Message,),
dict(
DESCRIPTOR=_BOUNDINGPOLY,
__module__="google.cloud.vision_v1p2beta1.proto.geometry_pb2",
__doc__="""A bounding polygon for the detected image annotation.
Attributes:
vertices:
The bounding polygon vertices.
normalized_vertices:
The bounding polygon normalized vertices.
""",
# @@protoc_insertion_point(class_scope:google.cloud.vision.v1p2beta1.BoundingPoly)
),
)
_sym_db.RegisterMessage(BoundingPoly)
Position = _reflection.GeneratedProtocolMessageType(
"Position",
(_message.Message,),
dict(
DESCRIPTOR=_POSITION,
__module__="google.cloud.vision_v1p2beta1.proto.geometry_pb2",
__doc__="""A 3D position in the image, used primarily for Face detection landmarks.
A valid Position must have both x and y coordinates. The position
coordinates are in the same scale as the original image.
Attributes:
x:
X coordinate.
y:
Y coordinate.
z:
Z coordinate (or depth).
""",
# @@protoc_insertion_point(class_scope:google.cloud.vision.v1p2beta1.Position)
),
)
_sym_db.RegisterMessage(Position)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(
descriptor_pb2.FileOptions(),
_b(
"\n!com.google.cloud.vision.v1p2beta1B\rGeometryProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p2beta1;vision\370\001\001"
),
)
# @@protoc_insertion_point(module_scope)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Service that stores identities and issues and manages tokens
HEADERS
-------
HTTP_ is a standard http header
HTTP_X is an extended http header
> Coming in from initial call
HTTP_X_AUTH_TOKEN : the client token being passed in
HTTP_X_STORAGE_TOKEN: the client token being passed in (legacy Rackspace use)
to support cloud files
> Used for communication between components
www-authenticate : only used if this component is being used remotely
HTTP_AUTHORIZATION : basic auth password used to validate the connection
> What we add to the request for use by the OpenStack service
HTTP_X_AUTHORIZATION: the client identity being passed in
"""
import logging
import os
import routes
import sys
from webob import Response
from webob.exc import (HTTPNotFound,
HTTPConflict,
HTTPBadRequest)
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'keystone', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from keystone.common import wsgi
import keystone.logic.service as serv
import keystone.logic.types.tenant as tenants
import keystone.logic.types.auth as auth
import keystone.logic.types.user as users
import keystone.common.template as template
import keystone.utils as utils
logger = logging.getLogger('keystone.server')
VERSION_STATUS = "ALPHA"
VERSION_DATE = "2011-04-23T00:00:00Z"
service = serv.IDMService()
class StaticFilesController(wsgi.Controller):
"""
Static Files Controller -
Controller for contract documents
"""
def __init__(self, options):
self.options = options
@utils.wrap_error
def get_pdf_contract(self, req):
resp = Response()
return template.static_file(resp, req, "content/idmdevguide.pdf",
root=utils.get_app_root(),
mimetype="application/pdf")
@utils.wrap_error
def get_wadl_contract(self, req):
resp = Response()
return template.static_file(resp, req, "identity.wadl",
root=utils.get_app_root(),
mimetype="application/vnd.sun.wadl+xml")
@utils.wrap_error
def get_xsd_contract(self, req, xsd):
resp = Response()
return template.static_file(resp, req, "/xsd/" + xsd,
root=utils.get_app_root(),
mimetype="application/xml")
@utils.wrap_error
def get_xsd_atom_contract(self, req, xsd):
resp = Response()
return template.static_file(resp, req, "/xsd/atom/" + xsd,
root=utils.get_app_root(),
mimetype="application/xml")
class VersionController(wsgi.Controller):
"""
Version Controller -
Controller for version related methods
"""
def __init__(self, options):
self.options = options
@utils.wrap_error
def get_version_info(self, req):
resp = Response()
resp.charset = 'UTF-8'
if utils.is_xml_response(req):
resp_file = os.path.join(POSSIBLE_TOPDIR,
"keystone/content/version.xml.tpl")
resp.content_type = "application/xml"
else:
resp_file = os.path.join(POSSIBLE_TOPDIR,
"keystone/content/version.json.tpl")
resp.content_type = "application/json"
hostname = req.environ.get("SERVER_NAME")
port = req.environ.get("SERVER_PORT")
resp.unicode_body = template.template(resp_file, HOST=hostname,
PORT=port, VERSION_STATUS=VERSION_STATUS,
VERSION_DATE=VERSION_DATE)
return resp
class AuthController(wsgi.Controller):
"""
Auth Controller -
Controller for token related operations
"""
def __init__(self, options):
self.options = options
self.request = None
@utils.wrap_error
def authenticate(self, req):
self.request = req
creds = utils.get_normalized_request_content(auth.PasswordCredentials,
req)
return utils.send_result(200, req, service.authenticate(creds))
@utils.wrap_error
def validate_token(self, req, token_id):
belongs_to = None
if "belongsTo" in req.GET:
belongs_to = req.GET["belongsTo"]
rval = service.validate_token(utils.get_auth_token(req),
token_id, belongs_to)
return utils.send_result(200, req, rval)
@utils.wrap_error
def delete_token(self, req, token_id):
return utils.send_result(204, req,
service.revoke_token(utils.get_auth_token(req), token_id))
class TenantController(wsgi.Controller):
"""
Tenant Controller -
Controller for Tenant and Tenant Group related operations
"""
def __init__(self, options):
self.options = options
@utils.wrap_error
def create_tenant(self, req):
tenant = utils.get_normalized_request_content(tenants.Tenant, req)
return utils.send_result(201, req,
service.create_tenant(utils.get_auth_token(req), tenant))
@utils.wrap_error
def get_tenants(self, req):
marker = None
if "marker" in req.GET:
marker = req.GET["marker"]
if "limit" in req.GET:
limit = req.GET["limit"]
else:
limit = 10
url = '%s://%s:%s%s' % (req.environ['wsgi.url_scheme'],
req.environ.get("SERVER_NAME"),
req.environ.get("SERVER_PORT"),
req.environ['PATH_INFO'])
tenants = service.get_tenants(utils.get_auth_token(req), marker,
limit, url)
return utils.send_result(200, req, tenants)
@utils.wrap_error
def get_tenant(self, req, tenant_id):
tenant = service.get_tenant(utils.get_auth_token(req), tenant_id)
return utils.send_result(200, req, tenant)
@utils.wrap_error
def update_tenant(self, req, tenant_id):
tenant = utils.get_normalized_request_content(tenants.Tenant, req)
rval = service.update_tenant(utils.get_auth_token(req), tenant_id,
tenant)
return utils.send_result(200, req, rval)
@utils.wrap_error
def delete_tenant(self, req, tenant_id):
rval = service.delete_tenant(utils.get_auth_token(req), tenant_id)
return utils.send_result(204, req, rval)
@utils.wrap_error
def create_tenant_group(self, req, tenant_id):
group = utils.get_normalized_request_content(tenants.Group, req)
return utils.send_result(201, req,
service.create_tenant_group(utils.get_auth_token(req),
tenant_id, group))
@utils.wrap_error
def get_tenant_groups(self, req, tenant_id):
marker = None
if "marker" in req.GET:
marker = req.GET["marker"]
if "limit" in req.GET:
limit = req.GET["limit"]
else:
limit = 10
url = '%s://%s:%s%s' % (req.environ['wsgi.url_scheme'],
req.environ.get("SERVER_NAME"),
req.environ.get("SERVER_PORT"),
req.environ['PATH_INFO'])
groups = service.get_tenant_groups(utils.get_auth_token(req),
tenant_id, marker, limit, url)
return utils.send_result(200, req, groups)
@utils.wrap_error
def get_tenant_group(self, req, tenant_id, group_id):
tenant = service.get_tenant_group(utils.get_auth_token(req), tenant_id,
group_id)
return utils.send_result(200, req, tenant)
@utils.wrap_error
def update_tenant_group(self, req, tenant_id, group_id):
group = utils.get_normalized_request_content(tenants.Group, req)
rval = service.update_tenant_group(utils.get_auth_token(req),
tenant_id, group_id, group)
return utils.send_result(200, req, rval)
@utils.wrap_error
def delete_tenant_group(self, req, tenant_id, group_id):
rval = service.delete_tenant_group(utils.get_auth_token(req),
tenant_id, group_id)
return utils.send_result(204, req, rval)
@utils.wrap_error
def get_users_tenant_group(self, req, tenant_id, group_id):
marker = None
if "marker" in req.GET:
marker = req.GET["marker"]
if "limit" in req.GET:
limit = req.GET["limit"]
else:
limit = 10
url = '%s://%s:%s%s' % (req.environ['wsgi.url_scheme'],
req.environ.get("SERVER_NAME"),
req.environ.get("SERVER_PORT"),
req.environ['PATH_INFO'])
users = service.get_users_tenant_group(utils.get_auth_token(req),
tenant_id, group_id, marker,
limit, url)
return utils.send_result(200, req, users)
@utils.wrap_error
def add_user_tenant_group(self, req, tenant_id, group_id, user_id):
return utils.send_result(201, req, service.add_user_tenant_group(\
utils.get_auth_token(req), tenant_id, group_id,
user_id))
@utils.wrap_error
def delete_user_tenant_group(self, req, tenant_id, group_id, user_id):
return utils.send_result(204, req, service.delete_user_tenant_group(\
utils.get_auth_token(req), tenant_id, group_id,
user_id))
class UserController(wsgi.Controller):
"""
User Controller -
Controller for User related operations
"""
def __init__(self, options):
self.options = options
@utils.wrap_error
def create_user(self, req, tenant_id):
user = utils.get_normalized_request_content(users.User, req)
return utils.send_result(201, req,
service.create_user(utils.get_auth_token(req), \
tenant_id, user))
@utils.wrap_error
def get_tenant_users(self, req, tenant_id):
marker = None
if "marker" in req.GET:
marker = req.GET["marker"]
if "limit" in req.GET:
limit = req.GET["limit"]
else:
limit = 10
url = '%s://%s:%s%s' % (req.environ['wsgi.url_scheme'],
req.environ.get("SERVER_NAME"),
req.environ.get("SERVER_PORT"),
req.environ['PATH_INFO'])
users = service.get_tenant_users(utils.get_auth_token(req), \
tenant_id, marker, limit, url)
return utils.send_result(200, req, users)
@utils.wrap_error
def get_user_groups(self, req, tenant_id, user_id):
marker = None
if "marker" in req.GET:
marker = req.GET["marker"]
if "limit" in req.GET:
limit = req.GET["limit"]
else:
limit = 10
url = '%s://%s:%s%s' % (req.environ['wsgi.url_scheme'],
req.environ.get("SERVER_NAME"),
req.environ.get("SERVER_PORT"),
req.environ['PATH_INFO'])
groups = service.get_user_groups(utils.get_auth_token(req),
tenant_id, user_id, marker, limit, url)
return utils.send_result(200, req, groups)
@utils.wrap_error
def get_user(self, req, tenant_id, user_id):
user = service.get_user(utils.get_auth_token(req), tenant_id, user_id)
return utils.send_result(200, req, user)
@utils.wrap_error
def update_user(self, req, user_id, tenant_id):
user = utils.get_normalized_request_content(users.User_Update, req)
rval = service.update_user(utils.get_auth_token(req),
user_id, user, tenant_id)
return utils.send_result(200, req, rval)
@utils.wrap_error
def delete_user(self, req, user_id, tenant_id):
rval = service.delete_user(utils.get_auth_token(req), user_id,
tenant_id)
return utils.send_result(204, req, rval)
@utils.wrap_error
def set_user_password(self, req, user_id, tenant_id):
user = utils.get_normalized_request_content(users.User_Update, req)
rval = service.set_user_password(utils.get_auth_token(req), user_id,
user, tenant_id)
return utils.send_result(200, req, rval)
@utils.wrap_error
def set_user_enabled(self, req, user_id, tenant_id):
user = utils.get_normalized_request_content(users.User_Update, req)
rval = service.enable_disable_user(utils.get_auth_token(req), user_id,
user, tenant_id)
return utils.send_result(200, req, rval)
@utils.wrap_error
def add_user_tenant(self, req, user_id, tenant_id):
rval = service.add_user_tenant(utils.get_auth_token(req), user_id,
tenant_id)
return utils.send_result(200, req, rval)
class GroupsController(wsgi.Controller):
"""
Groups Controller -
Controller for Group related operations
"""
def __init__(self, options):
self.options = options
@utils.wrap_error
def create_group(self, req):
group = utils.get_normalized_request_content(tenants.GlobalGroup, req)
return utils.send_result(201, req,
service.create_global_group(utils.get_auth_token(req),
group))
@utils.wrap_error
def get_groups(self, req):
marker = None
if "marker" in req.GET:
marker = req.GET["marker"]
if "limit" in req.GET:
limit = req.GET["limit"]
else:
limit = 10
url = '%s://%s:%s%s' % (req.environ['wsgi.url_scheme'],
req.environ.get("SERVER_NAME"),
req.environ.get("SERVER_PORT"),
req.environ['PATH_INFO'])
groups = service.get_global_groups(utils.get_auth_token(req),
marker, limit, url)
return utils.send_result(200, req, groups)
@utils.wrap_error
def get_group(self, req, group_id):
tenant = service.get_global_group(utils.get_auth_token(req), group_id)
return utils.send_result(200, req, tenant)
@utils.wrap_error
def update_group(self, req, group_id):
group = utils.get_normalized_request_content(tenants.GlobalGroup, req)
rval = service.update_global_group(utils.get_auth_token(req),
group_id, group)
return utils.send_result(200, req, rval)
@utils.wrap_error
def delete_group(self, req, group_id):
rval = service.delete_global_group(utils.get_auth_token(req), group_id)
return utils.send_result(204, req, rval)
@utils.wrap_error
def get_users_global_group(self, req, group_id):
marker = None
if "marker" in req.GET:
marker = req.GET["marker"]
if "limit" in req.GET:
limit = req.GET["limit"]
else:
limit = 10
url = '%s://%s:%s%s' % (req.environ['wsgi.url_scheme'],
req.environ.get("SERVER_NAME"),
req.environ.get("SERVER_PORT"),
req.environ['PATH_INFO'])
users = service.get_users_global_group(utils.get_auth_token(req),
group_id, marker, limit, url)
return utils.send_result(200, req, users)
@utils.wrap_error
def add_user_global_group(self, req, group_id, user_id):
return utils.send_result(201, req, service.add_user_global_group(\
utils.get_auth_token(req), group_id, user_id))
@utils.wrap_error
def delete_user_global_group(self, req, group_id, user_id):
return utils.send_result(204, req, service.delete_user_global_group(\
utils.get_auth_token(req), group_id, user_id))
class KeystoneAPI(wsgi.Router):
"""WSGI entry point for all Keystone Auth API requests."""
def __init__(self, options):
self.options = options
mapper = routes.Mapper()
# Token Operations
auth_controller = AuthController(options)
mapper.connect("/v1.0/token", controller=auth_controller,
action="authenticate")
mapper.connect("/v1.0/token/{token_id}", controller=auth_controller,
action="validate_token",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/token/{token_id}", controller=auth_controller,
action="delete_token",
conditions=dict(method=["DELETE"]))
# Tenant Operations
tenant_controller = TenantController(options)
mapper.connect("/v1.0/tenants", controller=tenant_controller,
action="create_tenant", conditions=dict(method=["POST"]))
mapper.connect("/v1.0/tenants", controller=tenant_controller,
action="get_tenants", conditions=dict(method=["GET"]))
mapper.connect("/v1.0/tenants/{tenant_id}",
controller=tenant_controller,
action="get_tenant", conditions=dict(method=["GET"]))
mapper.connect("/v1.0/tenants/{tenant_id}",
controller=tenant_controller,
action="update_tenant", conditions=dict(method=["PUT"]))
mapper.connect("/v1.0/tenants/{tenant_id}",
controller=tenant_controller,
action="delete_tenant", conditions=dict(method=["DELETE"]))
# Tenant Group Operations
mapper.connect("/v1.0/tenant/{tenant_id}/groups",
controller=tenant_controller,
action="create_tenant_group",
conditions=dict(method=["POST"]))
mapper.connect("/v1.0/tenant/{tenant_id}/groups",
controller=tenant_controller,
action="get_tenant_groups",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/tenant/{tenant_id}/groups/{group_id}",
controller=tenant_controller,
action="get_tenant_group",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/tenant/{tenant_id}/groups/{group_id}",
controller=tenant_controller,
action="update_tenant_group",
conditions=dict(method=["PUT"]))
mapper.connect("/v1.0/tenant/{tenant_id}/groups/{group_id}",
controller=tenant_controller,
action="delete_tenant_group",
conditions=dict(method=["DELETE"]))
mapper.connect("/v1.0/tenants/{tenant_id}/groups/{group_id}/users",
controller=tenant_controller,
action="get_users_tenant_group",
conditions=dict(method=["GET"]))
mapper.connect(
"/v1.0/tenants/{tenant_id}/groups/{group_id}/users/{user_id}",
controller=tenant_controller,
action="add_user_tenant_group",
conditions=dict(method=["PUT"]))
mapper.connect(
"/v1.0/tenants/{tenant_id}/groups/{group_id}/users/{user_id}",
controller=tenant_controller,
action="delete_user_tenant_group",
conditions=dict(method=["DELETE"]))
# User Operations
user_controller = UserController(options)
mapper.connect("/v1.0/tenants/{tenant_id}/users",
controller=user_controller,
action="create_user",
conditions=dict(method=["POST"]))
mapper.connect("/v1.0/tenants/{tenant_id}/users",
controller=user_controller,
action="get_tenant_users",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/tenants/{tenant_id}/users/{user_id}/groups",
controller=user_controller,
action="get_user_groups",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/tenants/{tenant_id}/users/{user_id}",
controller=user_controller,
action="get_user",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/tenants/{tenant_id}/users/{user_id}",
controller=user_controller,
action="update_user",
conditions=dict(method=["PUT"]))
mapper.connect("/v1.0/tenants/{tenant_id}/users/{user_id}",
controller=user_controller,
action="delete_user",
conditions=dict(method=["DELETE"]))
mapper.connect("/v1.0/tenants/{tenant_id}/users/{user_id}/password",
controller=user_controller,
action="set_user_password",
conditions=dict(method=["PUT"]))
mapper.connect("/v1.0/tenants/{tenant_id}/users/{user_id}/add",
controller=user_controller,
action="add_user_tenant",
conditions=dict(method=["PUT"]))
# Test this, test failed
mapper.connect("/v1.0/tenants/{tenant_id}/users/{user_id}/enabled",
controller=user_controller,
action="set_user_enabled",
conditions=dict(method=["PUT"]))
#Global Groups
groups_controller = GroupsController(options)
mapper.connect("/v1.0/groups", controller=groups_controller,
action="create_group", conditions=dict(method=["POST"]))
mapper.connect("/v1.0/groups", controller=groups_controller,
action="get_groups", conditions=dict(method=["GET"]))
mapper.connect("/v1.0/groups/{group_id}", controller=groups_controller,
action="get_group", conditions=dict(method=["GET"]))
mapper.connect("/v1.0/groups/{group_id}", controller=groups_controller,
action="update_group", conditions=dict(method=["PUT"]))
mapper.connect("/v1.0/groups/{group_id}", controller=groups_controller,
action="delete_group", conditions=dict(method=["DELETE"]))
mapper.connect("/v1.0/groups/{group_id}/users",
controller=groups_controller,
action="get_users_global_group",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/groups/{group_id}/users/{user_id}",
controller=groups_controller,
action="add_user_global_group",
conditions=dict(method=["PUT"]))
mapper.connect("/v1.0/groups/{group_id}/users/{user_id}",
controller=groups_controller,
action="delete_user_global_group",
conditions=dict(method=["DELETE"]))
# Miscellaneous Operations
version_controller = VersionController(options)
mapper.connect("/v1.0/", controller=version_controller,
action="get_version_info",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0", controller=version_controller,
action="get_version_info",
conditions=dict(method=["GET"]))
# Static Files Controller
static_files_controller = StaticFilesController(options)
mapper.connect("/v1.0/idmdevguide.pdf",
controller=static_files_controller,
action="get_pdf_contract",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/identity.wadl",
controller=static_files_controller,
action="get_identity_wadl",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/xsd/{xsd}",
controller=static_files_controller,
action="get_pdf_contract",
conditions=dict(method=["GET"]))
mapper.connect("/v1.0/xsd/atom/{xsd}",
controller=static_files_controller,
action="get_pdf_contract",
conditions=dict(method=["GET"]))
super(KeystoneAPI, self).__init__(mapper)
def app_factory(global_conf, **local_conf):
"""paste.deploy app factory for creating OpenStack API server apps"""
try:
conf = global_conf.copy()
conf.update(local_conf)
except Exception as err:
print err
return KeystoneAPI(conf)
|
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
class sid(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/subTLVs/subTLVs/lan-adjacency-sid/sid. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Adjacency Segment-IDs List. An IGP-Adjacency Segment is an IGP
segment attached to a unidirectional adjacency or a set of
unidirectional adjacencies. By default, an IGP-Adjacency Segment is
local to the node which advertises it.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "sid"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isis-neighbor-attribute",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"lan-adjacency-sid",
"sid",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/lan_adjacency_sid/sid/state (container)
YANG Description: State parameters of LAN Adjacency-SID.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/lan_adjacency_sid/sid/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of LAN Adjacency-SID.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
from . import state
class sid(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/subTLVs/subTLVs/lan-adjacency-sid/sid. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Adjacency Segment-IDs List. An IGP-Adjacency Segment is an IGP
segment attached to a unidirectional adjacency or a set of
unidirectional adjacencies. By default, an IGP-Adjacency Segment is
local to the node which advertises it.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "sid"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isis-neighbor-attribute",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"lan-adjacency-sid",
"sid",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/lan_adjacency_sid/sid/state (container)
YANG Description: State parameters of LAN Adjacency-SID.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/subTLVs/subTLVs/lan_adjacency_sid/sid/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of LAN Adjacency-SID.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
|
|
#!/usr/bin/python
import math
from collections import defaultdict
from olypy.oid import to_oid
from olypy.oid import to_int
import olymap.utilities as u
from olymap.utilities import get_oid, get_name, get_subkind, get_who_has
import olymap.maps as maps
import pathlib
from olypy.db import loop_here
from jinja2 import Environment, PackageLoader, select_autoescape
import olymap
from olymap.ship import build_basic_ship_dict
from olymap.item import build_basic_item_dict, get_magic_item
from olymap.player import build_complete_player_dict
from olymap.loc import build_basic_loc_dict, get_road_here, get_gate_here, get_gate_start_end, get_where_info, get_region
from olymap.char import build_basic_char_dict, get_items_list, get_loc
def ship_report(data, outdir):
ship_list = []
for unit in data:
unit_box = data[unit]
if u.is_ship(unit_box):
ship_list.append(unit)
sort_ship_list = sorted(ship_list, key=lambda x:int(x))
outf = open(pathlib.Path(outdir).joinpath('master_ship_report.html'), 'w')
template = olymap.env.get_template('master_ship_report.html')
ship = build_ship_dict(sort_ship_list, data)
outf.write(template.render(ship=ship))
def build_ship_dict(ship_list, data):
ship = []
for ship_id in ship_list:
ship_rec = data[ship_id]
ship_entry = build_basic_ship_dict(ship_id, ship_rec, data)
ship.append(ship_entry)
return ship
def item_report(data, trade_chain, outdir):
item_list = []
for unit in data:
unit_box = data[unit]
if u.is_item(unit_box):
item_list.append(unit)
# item_list.sort()
# for unit in item_list:
sort_item_list = sorted(item_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_item_report.html'), 'w')
template = olymap.env.get_template('master_item_report.html')
itemz = build_item_dict(sort_item_list, data, trade_chain)
outf.write(template.render(itemz=itemz))
def build_item_dict(item_list, data, trade_chain):
itemz = []
for item_id in item_list:
item_rec = data[item_id]
item_entry = build_basic_item_dict(item_id, item_rec, data, trade_chain)
item_entry.update({'magic_item': get_magic_item(data, item_id, item_rec)})
itemz.append(item_entry)
return itemz
def player_report(data, outdir):
player_list = []
for unit in data:
unit_box = data[unit]
if u.is_player(unit_box):
player_list.append(unit)
sort_player_list = sorted(player_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_player_report.html'), 'w')
template = olymap.env.get_template('master_player_report.html')
player = build_player_dict(sort_player_list, data)
outf.write(template.render(player=player))
def build_player_dict(player_list, data):
player = []
for player_id in player_list:
player_rec = data[player_id]
player_entry = build_complete_player_dict(player_id, player_rec, data)
player.append(player_entry)
return player
def healing_potion_report(data, outdir):
healing_potion_list = []
for unit in data:
unit_box = data[unit]
if u.is_item(unit_box) and u.get_use_key(unit_box) == '2':
healing_potion_list.append(unit)
# healing_potion_list.sort()
# for unit in healing_potion_list:
sort_healing_potion_list = sorted(healing_potion_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_healing_potion_report.html'), 'w')
template = olymap.env.get_template('master_healing_potion_report.html')
healing_potion = build_item_dict(sort_healing_potion_list, data, None)
outf.write(template.render(healing_potion=healing_potion))
def orb_report(data, outdir):
orb_list = []
for unit in data:
unit_box = data[unit]
if u.is_item(unit_box):
item_rec = data[unit]
if u.is_orb(item_rec):
orb_list.append(unit)
# orb_list.sort()
# for unit in orb_list:
sort_orb_list = sorted(orb_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_orb_report.html'), 'w')
template = olymap.env.get_template('master_orb_report.html')
orb = build_item_dict(sort_orb_list, data, None)
outf.write(template.render(orb=orb))
def projected_cast_potion_report(data, outdir):
projected_cast_list = []
for unit in data:
unit_box = data[unit]
if u.is_item(unit_box):
item_rec = data[unit]
if u.is_projected_cast(item_rec):
projected_cast_list.append(unit)
# projected_cast_list.sort()
# for unit in projected_cast_list:
sort_projected_cast_list = sorted(projected_cast_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_projected_cast_report.html'), 'w')
template = olymap.env.get_template('master_projected_cast_report.html')
projected_cast = build_item_dict(sort_projected_cast_list, data, None)
outf.write(template.render(projected_cast=projected_cast))
def location_report(data, outdir):
location_list = []
for unit in data:
unit_box = data[unit]
if u.is_loc(unit_box):
location_list.append(unit)
# location_list.sort()
# for unit in location_list:
sort_location_list = sorted(location_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_location_report.html'), 'w')
template = olymap.env.get_template('master_location_report.html')
loc = build_loc_dict(sort_location_list, data, True, None)
outf.write(template.render(loc=loc))
def build_loc_dict(loc_list, data, nbr_men_flag=False, garrisons_chain=None, port_city_flag=False, nbr_provinces_flag=False):
loc = []
for loc_id in loc_list:
loc_rec = data[loc_id]
loc_entry = build_basic_loc_dict(loc_id, loc_rec, data, garrisons_chain)
if nbr_men_flag == True:
nbrmen, _, _ = maps.count_stuff(loc_rec, data)
loc_entry.update({'nbr_men': nbrmen})
if port_city_flag == True:
port_city = u.is_port_city(loc_rec, data)
loc_entry.update({'port_city': port_city})
if nbr_provinces_flag == True:
nbr_provinces = 0
if 'LI' in loc_rec:
if 'hl' in loc_rec['LI']:
nbr_provinces = len(loc_rec['LI']['hl'])
loc_entry.update({'nbr_provinces': nbr_provinces})
loc.append(loc_entry)
return loc
def skill_xref_report(data, teaches_chain, outdir):
skill_list = sorted(list(teaches_chain))
sort_skill_xref_list = []
for unit in skill_list:
city_list = teaches_chain[unit]
if len(city_list) > 0 and unit is not None:
skill_rec = data[unit]
for city in city_list:
city_rec = data[city]
where_rec = data[city_rec['LI']['wh'][0]]
loc_dict = {'id': city,
'oid': to_oid(city),
'name': get_name(city_rec)}
sort_skill_xref_dict = {'id': unit,
'oid': to_oid(unit),
'name': get_name(skill_rec),
'loc_dict': loc_dict,
'where_dict': get_where_info(city_rec, data),
'region_dict': get_region(city, data)}
sort_skill_xref_list.append(sort_skill_xref_dict)
outf = open(pathlib.Path(outdir).joinpath('master_skill_xref_report.html'), 'w')
template = olymap.env.get_template('master_skill_xref_report.html')
loc = sort_skill_xref_list
outf.write(template.render(loc=loc))
def trade_report(data, trade_chain, outdir):
trade_list = sorted(list(trade_chain))
sort_trade_list = []
for unit in trade_list:
city_list = trade_chain[unit]
if len(city_list) > 0 and unit is not None:
item_rec = data[unit]
sell_list = []
buy_list = []
for city in city_list:
city_id = city[0]
city_rec = data[city_id]
region_id = u.region(city_id, data)
region_rec = data[region_id]
if city[1] == '1':
buy_dict = {'id': city_id,
'oid': to_oid(city_id),
'name': get_name(city_rec),
'region_oid': to_oid(region_id),
'region_name': get_name(region_rec)}
buy_list.append(buy_dict)
else:
sell_dict = {'id': city_id,
'oid': to_oid(city_id),
'name': get_name(city_rec),
'region_oid': to_oid(region_id),
'region_name': get_name(region_rec)}
sell_list.append(sell_dict)
trade_entry = {'id': unit,
'oid': to_oid(unit),
'name': get_name(item_rec),
'buy_list': buy_list,
'sell_list': sell_list}
sort_trade_list.append((trade_entry))
outf = open(pathlib.Path(outdir).joinpath('master_trade_report.html'), 'w')
template = olymap.env.get_template('master_trade_report.html')
loc = sort_trade_list
outf.write(template.render(loc=loc))
def road_report(data, outdir):
road_list = []
for unit in data:
if u.is_road_or_gate(data[unit]):
unit_rec = data[unit]
if get_road_here(unit_rec) == True:
road_list.append(unit)
# road_list.sort()
# for road in road_list:
sort_road_list = sorted(road_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_road_report.html'), 'w')
template = olymap.env.get_template('master_road_report.html')
loc = build_road_dict(sort_road_list, data)
outf.write(template.render(loc=loc))
def build_road_dict(loc_list, data):
loc = []
for loc_id in loc_list:
loc_rec = data[loc_id]
loc_entry = build_basic_loc_dict(loc_id, loc_rec, data)
loc_entry.update({'road': get_gate_start_end(loc_rec, data)})
loc.append(loc_entry)
return loc
def gate_report(data, outdir):
gate_list = []
for unit in data:
if u.is_road_or_gate(data[unit]):
unit_rec = data[unit]
if get_gate_here(unit_rec) == True:
gate_list.append(unit)
# road_list.sort()
# for road in road_list:
sort_gate_list = sorted(gate_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_gate_report.html'), 'w')
template = olymap.env.get_template('master_gate_report.html')
loc = build_road_dict(sort_gate_list, data)
outf.write(template.render(loc=loc))
def character_report(data, outdir):
character_list = []
for unit in data:
unit_box = data[unit]
if u.is_char(unit_box):
character_list.append(unit)
# character_list.sort()
# for unit in character_list:
sort_character_list = sorted(character_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_character_report.html'), 'w')
template = olymap.env.get_template('master_character_report.html')
char = build_char_dict(sort_character_list, data)
outf.write(template.render(char=char))
def build_char_dict(char_list, data):
char = []
for char_id in char_list:
char_rec = data[char_id]
char_entry = build_basic_char_dict(char_id, char_rec, data)
char.append(char_entry)
return char
def graveyard_report(data, outdir):
graveyard_list = []
for unit in data:
unit_box = data[unit]
if u.is_graveyard(unit_box):
graveyard_list.append(unit)
# graveyard_list.sort()
# for unit in graveyard_list:
sort_graveyard_list = []
for unit in sorted(graveyard_list, key=lambda x: int(x)):
graveyard_rec = data[unit]
# SL/lt
if 'SL' in graveyard_rec and 'lt' in graveyard_rec['SL']:
target_id = graveyard_rec['SL']['lt'][0]
target_rec = data[target_id]
target_dict = {'id': target_id,
'oid': to_oid(target_id),
'name': get_name(target_rec)}
else:
target_rec = None
target_dict = None
target_region_dict = None
sort_graveyard_dict = {'id:' : unit,
'oid': to_oid(unit),
'name': get_name(graveyard_rec),
'where_dict': get_where_info(graveyard_rec, data),
'region_dict': get_region(unit, data),
'target_dict': target_dict}
sort_graveyard_list.append((sort_graveyard_dict))
outf = open(pathlib.Path(outdir).joinpath('master_graveyard_report.html'), 'w')
template = olymap.env.get_template('master_graveyard_report.html')
loc = sort_graveyard_list
outf.write(template.render(loc=loc))
def faeryhill_report(data, outdir):
faeryhill_list = []
for unit in data:
unit_box = data[unit]
if u.is_faeryhill(unit_box):
faeryhill_list.append(unit)
# faeryhill_list.sort()
# for unit in faeryhill_list:
sort_faeryhill_list = []
for unit in sorted(faeryhill_list, key=lambda x: int(x)):
faeryhill_rec = data[unit]
# SL/lt
if 'SL' in faeryhill_rec and 'lt' in faeryhill_rec['SL']:
target_id = faeryhill_rec['SL']['lt'][0]
target_rec = data[target_id]
target_dict = {'id': target_id,
'oid': to_oid(target_id),
'name': get_name(target_rec)}
target_region_dict = get_region(target_id, data)
else:
target_rec = None
target_dict = None
target_region_dict = None
sort_faeryhill_dict = {'id:' : unit,
'oid': to_oid(unit),
'name': get_name(faeryhill_rec),
'where_dict': get_where_info(faeryhill_rec, data),
'region_dict': get_region(unit, data),
'target_dict': target_dict,
'target_region_dict': target_region_dict}
sort_faeryhill_list.append((sort_faeryhill_dict))
outf = open(pathlib.Path(outdir).joinpath('master_faeryhill_report.html'), 'w')
template = olymap.env.get_template('master_faeryhill_report.html')
loc = sort_faeryhill_list
outf.write(template.render(loc=loc))
def castle_report(data, outdir, garrisons_chain):
castle_list = []
for unit in data:
unit_box = data[unit]
if u.is_castle(unit_box):
castle_list.append(unit)
# castle_list.sort()
# for unit in castle_list:
sort_castle_list = sorted(castle_list, key=lambda x: int(x))
# nbrmen, _, _ = maps.count_stuff(castle, data)
outf = open(pathlib.Path(outdir).joinpath('master_castle_report.html'), 'w')
template = olymap.env.get_template('master_castle_report.html')
loc = build_loc_dict(sort_castle_list, data, True, garrisons_chain)
outf.write(template.render(loc=loc))
def city_report(data, outdir):
city_list = []
for unit in data:
unit_box = data[unit]
if u.is_city(unit_box):
city_list.append(unit)
# city_list.sort()
# for unit in city_list:
sort_city_list = sorted(city_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_city_report.html'), 'w')
template = olymap.env.get_template('master_city_report.html')
loc = build_loc_dict(sort_city_list, data, True, None, True)
outf.write(template.render(loc=loc))
def region_report(data, outdir):
region_list = []
for unit in data:
unit_box = data[unit]
if u.is_region(unit_box):
region_list.append(unit)
# region_list.sort()
# for unit in region_list:
sort_region_list = sorted(region_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_region_report.html'), 'w')
template = olymap.env.get_template('master_region_report.html')
loc = build_loc_dict(sort_region_list, data, False, None, False, True)
outf.write(template.render(loc=loc))
def mage_report(data, outdir):
mage_list = []
for unit in data:
unit_rec = data[unit]
if u.is_magician(unit_rec):
mage_list.append(unit)
# mage_list.sort()
# for unit in mage_list:
sort_mage_list = sorted(mage_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_mage_report.html'), 'w')
template = olymap.env.get_template('master_mage_report.html')
char = build_char_dict(sort_mage_list, data)
outf.write(template.render(char=char))
def priest_report(data, outdir):
priest_list = []
for unit in data:
if u.is_priest(data[unit]):
priest_list.append(unit)
# priest_list.sort()
# for unit in priest_list:
sort_priest_list = sorted(priest_list, key=lambda x: int(x))
outf = open(pathlib.Path(outdir).joinpath('master_priest_report.html'), 'w')
template = olymap.env.get_template('master_priest_report.html')
char = build_char_dict(sort_priest_list, data)
outf.write(template.render(char=char))
def gold_report(data, outdir):
character_list = []
for unit in data:
unit_box = data[unit]
if u.is_char(unit_box):
character_list.append(unit)
# character_list.sort()
# for unit in character_list:
sort_gold_list = []
for unit in sorted(character_list, key=lambda x: int(x)):
character_rec = data[unit]
items_list = get_items_list(character_rec, data, False, '1')
if items_list != []:
if int(items_list[0]['qty']) > 10000:
gold_dict = {'id': unit,
'oid': to_oid(unit),
'name': get_name(character_rec),
'loc': get_loc(character_rec, data),
'qty': int(items_list[0]['qty'])}
sort_gold_list.append(gold_dict)
outf = open(pathlib.Path(outdir).joinpath('master_gold_report.html'), 'w')
template = olymap.env.get_template('master_gold_report.html')
char = sort_gold_list
outf.write(template.render(char=char))
|
|
# Copyright (c) 2020 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""performance_constants.py"""
# Unisphere REST performance constants
PERFORMANCE = 'performance'
CATEGORY = 'category'
CATEGORY_NAME = 'categoryName'
CATEGORIES = 'categories'
KEYS = 'keys'
METRICS = 'metrics'
METRIC = 'metric'
METRIC_NAME = 'metricName'
METRICS_ALL = 'metrics_all'
METRICS_KPI = 'metrics_kpi'
KPI = 'kpi'
ALL = 'all'
All_CAP = 'All'
AVERAGE = 'Average'
MAXIMUM = 'Maximum'
LIST = 'list'
UPDATE = 'update'
START_DATE = 'startDate'
START_DATE_SN = 'start_date'
END_DATE = 'endDate'
END_DATE_SN = 'end_date'
FA_DATE = 'firstAvailableDate'
LA_DATE = 'lastAvailableDate'
DATA_FORMAT = 'dataFormat'
RESULT = 'result'
REP_LEVEL = 'reporting_level'
ONE_MINUTE = 60000
ONE_HOUR = 3600000
# Director Tags
BE_DIR_TAGS = ['DF', 'DX']
FE_DIR_TAGS = ['EF', 'FA', 'FE', 'SE']
RDF_DIR_TAGS = ['RF', 'RE']
IM_DIR_TAGS = ['IM']
EDS_DIR_TAGS = ['ED']
# Performance categories and keys
ARRAY = 'Array'
ARRAY_ID = 'array_id'
ARRAY_INFO = 'arrayInfo'
BE_DIR = 'BEDirector'
BE_DIR_INFO = 'beDirectorInfo'
BE_EMU = 'BeEmulation'
BE_EMU_ID = 'beEmulationId'
BE_EMU_INFO = 'beEmulationInfo'
BE_PORT = 'BEPort'
BE_PORT_INFO = 'bePortInfo'
BOARD = 'Board'
BOARD_ID = 'boardId'
BOARD_INFO = 'boardInfo'
CACHE_PART = 'CachePartition'
CACHE_PART_ID = 'cachePartitionId'
CACHE_PART_INFO = 'cachePartitionInfo'
COLLECTION_INT = 'collectionintervalmins'
CORE = 'Core'
CORE_ID = 'coreId'
CORE_INFO = 'coreInfo'
DAYS_TO_FULL = 'daystofull'
DAYS_TO_FULL_RESULT = 'daysToFullObjectResultType'
DB = 'Database'
DB_ID = 'databaseId'
DB_INFO = 'databaseInfo'
DEV_GRP = 'DeviceGroup'
DEV_GRP_ID = 'deviceGroupId'
DEV_GRP_INFO = 'deviceGroupInfo'
DIR_ID = 'directorId'
DISK = 'Disk'
DISK_ID = 'diskId'
DISK_INFO = 'diskInfo'
DISK_GRP = 'DiskGroup'
DISK_GRP_ID = 'diskGroupId'
DISK_GRP_INFO = 'diskGroupInfo'
DISK_TECH = 'diskTechnology'
DISK_TECH_POOL = 'DiskTechPool'
DISK_TECH_POOL_INFO = 'diskTechPoolInfo'
EDS_DIR = 'EDSDirector'
EDS_DIR_INFO = 'edsDirectorInfo'
EDS_EMU = 'EDSEmulation'
EDS_EMU_ID = 'edsEmulationId'
EDS_EMU_INFO = 'edsEmulationInfo'
EXT_DIR = 'ExternalDirector'
EXT_DIR_INFO = 'externalDirectorInfo'
EXT_DISK = 'ExternalDisk'
EXT_DISK_INFO = 'externalDiskInfo'
EXT_DISK_GRP = 'ExternalDiskGroup'
EXT_DISK_GRP_INFO = 'externalDiskGroupInfo'
EXT_PORT = 'ExternalPort'
FE_DIR = 'FEDirector'
FE_DIR_INFO = 'feDirectorInfo'
FE_EMU = 'FeEmulation'
FE_EMU_ID = 'feEmulationId'
FE_EMU_INFO = 'feEmulationInfo'
FE_PORT = 'FEPort'
FE_PORT_INFO = 'fePortInfo'
FICON_EMU = 'FiconEmulation'
FICON_EMU_ID = 'ficonEmulationId'
FICON_EMU_INFO = 'ficonEmulationInfo'
FICON_EMU_THR = 'FiconEmulationThread'
FICON_EMU_THR_ID = 'ficonEmulationThreadId'
FICON_EMU_THR_INFO = 'ficonEmulationThreadInfo'
FICON_PORT_THR = 'FiconPortThread'
FICON_PORT_THR_ID = 'ficonPortThreadId'
FICON_PORT_THR_INFO = 'ficonPortThreadInfo'
HELP = 'help'
HOST = 'Host'
HOST_ID = 'hostId'
HOST_INFO = 'hostInfo'
IM_DIR = 'IMDirector'
IM_DIR_INFO = 'imDirectorInfo'
IM_EMU = 'IMEmulation'
IM_EMU_ID = 'imEmulationId'
IM_EMU_INFO = 'iMEmulationInfo'
INIT = 'Initiator'
INIT_ID = 'initiatorId'
INIT_INFO = 'initiatorInfo'
INIT_BY_PORT = 'InitiatorByPort'
INIT_BY_PORT_ID = 'initiatorByPortId'
INIT_BY_PORT_INFO = 'initiatorByPortInfo'
INSTANCE_ID = 'instanceId'
INSTANCE_ID_SN = 'instance_id'
IP_INT = 'IPInterface'
IP_INT_ID = 'ipInterfaceId'
ISCSI_CLIENT_INFO = 'iSCSIClientInfo'
ISCSI_TGT = 'ISCSITarget'
ISCSI_TGT_ID_KEY = 'iscsiTargetId'
ISCSI_TGT_ID_METRICS = 'iSCSITargetId'
ISCSI_TGT_INFO = 'iSCSITargetInfo'
MV = 'MaskingView'
MV_ID = 'maskingViewId'
MV_INFO = 'maskingViewInfo'
PG = 'PortGroup'
PG_ID = 'portGroupId'
PG_INFO = 'portGroupInfo'
POOL_ID = 'poolId'
POOL_INFO = 'poolInfo'
PORT_ID = 'portId'
PROJECTION = 'projection'
RA_GRP_ID = 'raGroupId'
RA_GRP_INFO = 'raGroupId'
REAL_TIME = 'realtime'
REAL_TIME_SN = 'real_time'
REG = 'registration'
REG_DETAILS = 'registrationdetails'
REG_DETAILS_INFO = 'registrationDetailsInfo'
REG_DIAGNOSTIC = 'diagnostic'
REGISTER = 'register'
RDF_DIR = 'RDFDirector'
RDF_DIR_INFO = 'rdfDirectorInfo'
RDF_EMU = 'RDFEmulation'
RDF_EMU_ID = 'rdfEmulationId'
RDF_EMU_INFO = 'rdfEmulationInfo'
RDF_PORT = 'RDFPort'
RDF_PORT_INFO = 'rdfPortInfo'
RDFA = 'RDFA'
RDFA_INFO = 'rdfaInfo'
RDFS = 'RDFS'
RDFS_INFO = 'rdfsInfo'
SG = 'StorageGroup'
SG_ID = 'storageGroupId'
SG_INFO = 'storageGroupInfo'
SG_BY_POOL = 'StorageGroupByPool'
SRP = 'SRP'
SRP_ID = 'srpId'
SRP_INFO = 'srpInfo'
STORAGE_CONT = 'StorageContainer'
STORAGE_CONT_ID = 'storageContainerId'
STORAGE_CONT_INFO = 'storageContainerInfo'
STORAGE_RES = 'StorageResource'
STORAGE_RES_ID = 'storageResourceId'
STORAGE_RES_INFO = 'storageResourceInfo'
STORAGE_RES_BY_POOL = 'StorageResourceByPool'
STORAGE_TIER = 'StorageTier'
SUMMARY = 'Summary'
SYMM_ID = 'symmetrixId'
TIMES = 'times'
TIMESTAMP = 'timestamp'
THIN_POOL = 'ThinPool'
# Threshold constants
ALERT = 'alert'
ALERT_ERR = 'alertError'
THRESHOLD = 'threshold'
PERF_THRESH = 'performanceThreshold'
THRESH_CAT = 'endpoint'
INFO_LVL = 'INFORMATION'
WARN_LVL = 'WARNING'
CRIT_LVL = 'CRITICAL'
FIRST_THRESH = 'firstThreshold'
FIRST_THRESH_OCC = 'firstThresholdOccurrrences'
FIRST_THRESH_SAMP = 'firstThresholdSamples'
FIRST_THRESH_SEV = 'firstThresholdSeverity'
SEC_THRESH = 'secondThreshold'
SEC_THRESH_OCC = 'secondThresholdOccurrrences'
SEC_THRESH_SAMP = 'secondThresholdSamples'
SEC_THRESH_SEV = 'secondThresholdSeverity'
# Backup
BACKUP = 'backup'
FILENAME = 'filename'
FILENAME_PREFIX = 'PyU4V'
NAMED_RT_TRACES = 'namedrealtimetraces'
LAST_DAY_DIAG = 'lastdayofdiagnostic'
|
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from mox3 import mox
from neutronclient.common import exceptions as n_exc
from neutronclient.v2_0 import client
from six.moves import range
from nova import context
from nova import exception
from nova.network.neutronv2 import api as neutronapi
from nova.network.security_group import neutron_driver
from nova import test
class TestNeutronDriver(test.NoDBTestCase):
def setUp(self):
super(TestNeutronDriver, self).setUp()
self.mox.StubOutWithMock(neutronapi, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
def test_list_with_project(self):
project_id = '0af70a4d22cf4652824ddc1f2435dd85'
security_groups_list = {'security_groups': []}
self.moxed_client.list_security_groups(tenant_id=project_id).AndReturn(
security_groups_list)
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
sg_api.list(self.context, project=project_id)
def test_list_with_all_tenants_and_admin_context(self):
project_id = '0af70a4d22cf4652824ddc1f2435dd85'
search_opts = {'all_tenants': 1}
security_groups_list = {'security_groups': []}
admin_context = context.RequestContext('user1', project_id, True)
self.mox.ReplayAll()
with mock.patch.object(
self.moxed_client,
'list_security_groups',
return_value=security_groups_list) as mock_list_secgroup:
sg_api = neutron_driver.SecurityGroupAPI()
sg_api.list(admin_context,
project=project_id,
search_opts=search_opts)
mock_list_secgroup.assert_called_once_with()
def test_list_without_all_tenants_and_admin_context(self):
project_id = '0af70a4d22cf4652824ddc1f2435dd85'
security_groups_list = {'security_groups': []}
admin_context = context.RequestContext('user1', project_id, True)
self.mox.ReplayAll()
with mock.patch.object(
self.moxed_client,
'list_security_groups',
return_value=security_groups_list) as mock_list_secgroup:
sg_api = neutron_driver.SecurityGroupAPI()
sg_api.list(admin_context, project=project_id)
mock_list_secgroup.assert_called_once_with(tenant_id=project_id)
def test_list_with_all_tenants_sec_name_and_admin_context(self):
project_id = '0af70a4d22cf4652824ddc1f2435dd85'
search_opts = {'all_tenants': 1}
security_group_names = ['secgroup_ssh']
security_groups_list = {'security_groups': []}
admin_context = context.RequestContext('user1', project_id, True)
self.mox.ReplayAll()
with mock.patch.object(
self.moxed_client,
'list_security_groups',
return_value=security_groups_list) as mock_list_secgroup:
sg_api = neutron_driver.SecurityGroupAPI()
sg_api.list(admin_context, project=project_id,
names=security_group_names,
search_opts=search_opts)
mock_list_secgroup.assert_called_once_with(
name=security_group_names,
tenant_id=project_id)
def test_list_with_all_tenants_sec_name_ids_and_admin_context(self):
project_id = '0af70a4d22cf4652824ddc1f2435dd85'
search_opts = {'all_tenants': 1}
security_group_names = ['secgroup_ssh']
security_group_ids = ['id1']
security_groups_list = {'security_groups': []}
admin_context = context.RequestContext('user1', project_id, True)
self.mox.ReplayAll()
with mock.patch.object(
self.moxed_client,
'list_security_groups',
return_value=security_groups_list) as mock_list_secgroup:
sg_api = neutron_driver.SecurityGroupAPI()
sg_api.list(admin_context, project=project_id,
names=security_group_names,
ids=security_group_ids,
search_opts=search_opts)
mock_list_secgroup.assert_called_once_with(
name=security_group_names,
id=security_group_ids,
tenant_id=project_id)
def test_list_with_all_tenants_not_admin(self):
search_opts = {'all_tenants': 1}
security_groups_list = {'security_groups': []}
self.mox.ReplayAll()
with mock.patch.object(
self.moxed_client,
'list_security_groups',
return_value=security_groups_list) as mock_list_secgroup:
sg_api = neutron_driver.SecurityGroupAPI()
sg_api.list(self.context, project=self.context.tenant,
search_opts=search_opts)
mock_list_secgroup.assert_called_once_with(
tenant_id=self.context.tenant)
def test_get_with_name_duplicated(self):
sg_name = 'web_server'
expected_sg_id = '85cc3048-abc3-43cc-89b3-377341426ac5'
list_security_groups = {'security_groups':
[{'name': sg_name,
'id': expected_sg_id,
'tenant_id': self.context.tenant,
'description': 'server',
'rules': []}
]}
self.moxed_client.list_security_groups(name=sg_name, fields='id',
tenant_id=self.context.tenant).AndReturn(list_security_groups)
expected_sg = {'security_group': {'name': sg_name,
'id': expected_sg_id,
'tenant_id': self.context.tenant,
'description': 'server', 'rules': []}}
self.moxed_client.show_security_group(expected_sg_id).AndReturn(
expected_sg)
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
observed_sg = sg_api.get(self.context, name=sg_name)
expected_sg['security_group']['project_id'] = self.context.tenant
del expected_sg['security_group']['tenant_id']
self.assertEqual(expected_sg['security_group'], observed_sg)
def test_get_with_invalid_name(self):
sg_name = 'invalid_name'
expected_sg_id = '85cc3048-abc3-43cc-89b3-377341426ac5'
list_security_groups = {'security_groups':
[{'name': sg_name,
'id': expected_sg_id,
'tenant_id': self.context.tenant,
'description': 'server',
'rules': []}
]}
self.moxed_client.list_security_groups(name=sg_name, fields='id',
tenant_id=self.context.tenant).AndReturn(list_security_groups)
self.moxed_client.show_security_group(expected_sg_id).AndRaise(
TypeError)
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
self.assertRaises(exception.SecurityGroupNotFound,
sg_api.get, self.context, name=sg_name)
def test_create_security_group_with_bad_request(self):
name = 'test-security-group'
description = None
body = {'security_group': {'name': name,
'description': description}}
message = "Invalid input. Reason: 'None' is not a valid string."
self.moxed_client.create_security_group(
body).AndRaise(n_exc.BadRequest(message=message))
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
self.assertRaises(exception.Invalid,
sg_api.create_security_group, self.context, name,
description)
def test_create_security_group_exceed_quota(self):
name = 'test-security-group'
description = 'test-security-group'
body = {'security_group': {'name': name,
'description': description}}
message = "Quota exceeded for resources: ['security_group']"
self.moxed_client.create_security_group(
body).AndRaise(n_exc.NeutronClientException(status_code=409,
message=message))
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
self.assertRaises(exception.SecurityGroupLimitExceeded,
sg_api.create_security_group, self.context, name,
description)
def test_create_security_group_rules_exceed_quota(self):
vals = {'protocol': 'tcp', 'cidr': '0.0.0.0/0',
'parent_group_id': '7ae75663-277e-4a0e-8f87-56ea4e70cb47',
'group_id': None, 'from_port': 1025, 'to_port': 1025}
body = {'security_group_rules': [{'remote_group_id': None,
'direction': 'ingress', 'protocol': 'tcp', 'ethertype': 'IPv4',
'port_range_max': 1025, 'port_range_min': 1025,
'security_group_id': '7ae75663-277e-4a0e-8f87-56ea4e70cb47',
'remote_ip_prefix': '0.0.0.0/0'}]}
name = 'test-security-group'
message = "Quota exceeded for resources: ['security_group_rule']"
self.moxed_client.create_security_group_rule(
body).AndRaise(n_exc.NeutronClientException(status_code=409,
message=message))
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
self.assertRaises(exception.SecurityGroupLimitExceeded,
sg_api.add_rules, self.context, None, name, [vals])
def test_create_security_group_rules_bad_request(self):
vals = {'protocol': 'icmp', 'cidr': '0.0.0.0/0',
'parent_group_id': '7ae75663-277e-4a0e-8f87-56ea4e70cb47',
'group_id': None, 'to_port': 255}
body = {'security_group_rules': [{'remote_group_id': None,
'direction': 'ingress', 'protocol': 'icmp',
'ethertype': 'IPv4', 'port_range_max': 255,
'security_group_id': '7ae75663-277e-4a0e-8f87-56ea4e70cb47',
'remote_ip_prefix': '0.0.0.0/0'}]}
name = 'test-security-group'
message = "ICMP code (port-range-max) 255 is provided but ICMP type" \
" (port-range-min) is missing"
self.moxed_client.create_security_group_rule(
body).AndRaise(n_exc.NeutronClientException(status_code=400,
message=message))
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
self.assertRaises(exception.Invalid, sg_api.add_rules,
self.context, None, name, [vals])
def test_list_security_group_with_no_port_range_and_not_tcp_udp_icmp(self):
sg1 = {'description': 'default',
'id': '07f1362f-34f6-4136-819a-2dcde112269e',
'name': 'default',
'tenant_id': 'c166d9316f814891bcb66b96c4c891d6',
'security_group_rules':
[{'direction': 'ingress',
'ethertype': 'IPv4',
'id': '0a4647f1-e1aa-488d-90e1-97a7d0293beb',
'port_range_max': None,
'port_range_min': None,
'protocol': '51',
'remote_group_id': None,
'remote_ip_prefix': None,
'security_group_id':
'07f1362f-34f6-4136-819a-2dcde112269e',
'tenant_id': 'c166d9316f814891bcb66b96c4c891d6'}]}
self.moxed_client.list_security_groups().AndReturn(
{'security_groups': [sg1]})
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
result = sg_api.list(self.context)
expected = [{'rules':
[{'from_port': -1, 'protocol': '51', 'to_port': -1,
'parent_group_id': '07f1362f-34f6-4136-819a-2dcde112269e',
'cidr': '0.0.0.0/0', 'group_id': None,
'id': '0a4647f1-e1aa-488d-90e1-97a7d0293beb'}],
'project_id': 'c166d9316f814891bcb66b96c4c891d6',
'id': '07f1362f-34f6-4136-819a-2dcde112269e',
'name': 'default', 'description': 'default'}]
self.assertEqual(expected, result)
def test_instances_security_group_bindings(self):
server_id = 'c5a20e8d-c4b0-47cf-9dca-ebe4f758acb1'
port1_id = '4c505aec-09aa-47bc-bcc0-940477e84dc0'
port2_id = 'b3b31a53-6e29-479f-ae5c-00b7b71a6d44'
sg1_id = '2f7ce969-1a73-4ef9-bbd6-c9a91780ecd4'
sg2_id = '20c89ce5-9388-4046-896e-64ffbd3eb584'
servers = [{'id': server_id}]
ports = [{'id': port1_id, 'device_id': server_id,
'security_groups': [sg1_id]},
{'id': port2_id, 'device_id': server_id,
'security_groups': [sg2_id]}]
port_list = {'ports': ports}
sg1 = {'id': sg1_id, 'name': 'wol'}
sg2 = {'id': sg2_id, 'name': 'eor'}
security_groups_list = {'security_groups': [sg1, sg2]}
sg_bindings = {server_id: [{'name': 'wol'}, {'name': 'eor'}]}
self.moxed_client.list_ports(device_id=[server_id]).AndReturn(
port_list)
self.moxed_client.list_security_groups(
id=mox.SameElementsAs([sg2_id, sg1_id])).AndReturn(
security_groups_list)
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
result = sg_api.get_instances_security_groups_bindings(
self.context, servers)
self.assertEqual(result, sg_bindings)
def _test_instances_security_group_bindings_scale(self, num_servers):
max_query = 150
sg1_id = '2f7ce969-1a73-4ef9-bbd6-c9a91780ecd4'
sg2_id = '20c89ce5-9388-4046-896e-64ffbd3eb584'
sg1 = {'id': sg1_id, 'name': 'wol'}
sg2 = {'id': sg2_id, 'name': 'eor'}
security_groups_list = {'security_groups': [sg1, sg2]}
servers = []
device_ids = []
ports = []
sg_bindings = {}
for i in range(0, num_servers):
server_id = "server-%d" % i
port_id = "port-%d" % i
servers.append({'id': server_id})
device_ids.append(server_id)
ports.append({'id': port_id,
'device_id': server_id,
'security_groups': [sg1_id, sg2_id]})
sg_bindings[server_id] = [{'name': 'wol'}, {'name': 'eor'}]
for x in range(0, num_servers, max_query):
self.moxed_client.list_ports(
device_id=device_ids[x:x + max_query]).\
AndReturn({'ports': ports[x:x + max_query]})
self.moxed_client.list_security_groups(
id=mox.SameElementsAs([sg2_id, sg1_id])).AndReturn(
security_groups_list)
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
result = sg_api.get_instances_security_groups_bindings(
self.context, servers)
self.assertEqual(result, sg_bindings)
def test_instances_security_group_bindings_less_than_max(self):
self._test_instances_security_group_bindings_scale(100)
def test_instances_security_group_bindings_max(self):
self._test_instances_security_group_bindings_scale(150)
def test_instances_security_group_bindings_more_then_max(self):
self._test_instances_security_group_bindings_scale(300)
def test_instances_security_group_bindings_with_hidden_sg(self):
servers = [{'id': 'server_1'}]
ports = [{'id': '1', 'device_id': 'dev_1', 'security_groups': ['1']},
{'id': '2', 'device_id': 'dev_1', 'security_groups': ['2']}]
port_list = {'ports': ports}
sg1 = {'id': '1', 'name': 'wol'}
# User doesn't have access to sg2
security_groups_list = {'security_groups': [sg1]}
sg_bindings = {'dev_1': [{'name': 'wol'}]}
self.moxed_client.list_ports(device_id=['server_1']).AndReturn(
port_list)
self.moxed_client.\
list_security_groups(id=mox.SameElementsAs(['1', '2'])).AndReturn(
security_groups_list)
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
result = sg_api.get_instances_security_groups_bindings(
self.context, servers)
self.assertEqual(result, sg_bindings)
def test_instance_empty_security_groups(self):
port_list = {'ports': [{'id': 1, 'device_id': '1',
'security_groups': []}]}
self.moxed_client.list_ports(device_id=['1']).AndReturn(port_list)
self.mox.ReplayAll()
sg_api = neutron_driver.SecurityGroupAPI()
result = sg_api.get_instance_security_groups(self.context, '1')
self.assertEqual([], result)
class TestNeutronDriverWithoutMock(test.NoDBTestCase):
def test_validate_property(self):
sg_api = neutron_driver.SecurityGroupAPI()
sg_api.validate_property('foo', 'name', None)
sg_api.validate_property('', 'name', None)
self.assertRaises(exception.Invalid, sg_api.validate_property,
'a' * 256, 'name', None)
self.assertRaises(exception.Invalid, sg_api.validate_property,
None, 'name', None)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum
from six import with_metaclass
from azure.core import CaseInsensitiveEnumMeta
class AccessTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Required for storage accounts where kind = BlobStorage. The access tier used for billing.
"""
HOT = "Hot"
COOL = "Cool"
class AccountImmutabilityPolicyState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The ImmutabilityPolicy state defines the mode of the policy. Disabled state disables the
policy, Unlocked state allows increase and decrease of immutability retention time and also
allows toggling allowProtectedAppendWrites property, Locked state only allows the increase of
the immutability retention time. A policy can only be created in a Disabled or Unlocked state
and can be toggled between the two states. Only a policy in an Unlocked state can transition to
a Locked state which cannot be reverted.
"""
UNLOCKED = "Unlocked"
LOCKED = "Locked"
DISABLED = "Disabled"
class AccountStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Gets the status indicating whether the primary location of the storage account is available or
unavailable.
"""
AVAILABLE = "available"
UNAVAILABLE = "unavailable"
class BlobInventoryPolicyName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "default"
class BlobRestoreProgressStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The status of blob restore progress. Possible values are: - InProgress: Indicates that blob
restore is ongoing. - Complete: Indicates that blob restore has been completed successfully. -
Failed: Indicates that blob restore is failed.
"""
IN_PROGRESS = "InProgress"
COMPLETE = "Complete"
FAILED = "Failed"
class Bypass(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Possible values are
any combination of Logging|Metrics|AzureServices (For example, "Logging, Metrics"), or None to
bypass none of those traffics.
"""
NONE = "None"
LOGGING = "Logging"
METRICS = "Metrics"
AZURE_SERVICES = "AzureServices"
class CorsRuleAllowedMethodsItem(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DELETE = "DELETE"
GET = "GET"
HEAD = "HEAD"
MERGE = "MERGE"
POST = "POST"
OPTIONS = "OPTIONS"
PUT = "PUT"
class CreatedByType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The type of identity that created the resource.
"""
USER = "User"
APPLICATION = "Application"
MANAGED_IDENTITY = "ManagedIdentity"
KEY = "Key"
class DefaultAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies the default action of allow or deny when no other rules match.
"""
ALLOW = "Allow"
DENY = "Deny"
class DefaultSharePermission(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Default share permission for users using Kerberos authentication if RBAC role is not assigned.
"""
NONE = "None"
STORAGE_FILE_DATA_SMB_SHARE_READER = "StorageFileDataSmbShareReader"
STORAGE_FILE_DATA_SMB_SHARE_CONTRIBUTOR = "StorageFileDataSmbShareContributor"
STORAGE_FILE_DATA_SMB_SHARE_ELEVATED_CONTRIBUTOR = "StorageFileDataSmbShareElevatedContributor"
class DirectoryServiceOptions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Indicates the directory service used.
"""
NONE = "None"
AADDS = "AADDS"
AD = "AD"
class EnabledProtocols(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The authentication protocol that is used for the file share. Can only be specified when
creating a share.
"""
SMB = "SMB"
NFS = "NFS"
class EncryptionScopeSource(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The provider for the encryption scope. Possible values (case-insensitive): Microsoft.Storage,
Microsoft.KeyVault.
"""
MICROSOFT_STORAGE = "Microsoft.Storage"
MICROSOFT_KEY_VAULT = "Microsoft.KeyVault"
class EncryptionScopeState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled.
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
class ExpirationAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The SAS expiration action. Can only be Log.
"""
LOG = "Log"
class ExtendedLocationTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The type of extendedLocation.
"""
EDGE_ZONE = "EdgeZone"
class Format(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""This is a required field, it specifies the format for the inventory files.
"""
CSV = "Csv"
PARQUET = "Parquet"
class GeoReplicationStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The status of the secondary location. Possible values are: - Live: Indicates that the secondary
location is active and operational. - Bootstrap: Indicates initial synchronization from the
primary location to the secondary location is in progress.This typically occurs when
replication is first enabled. - Unavailable: Indicates that the secondary location is
temporarily unavailable.
"""
LIVE = "Live"
BOOTSTRAP = "Bootstrap"
UNAVAILABLE = "Unavailable"
class HttpProtocol(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The protocol permitted for a request made with the account SAS.
"""
HTTPS_HTTP = "https,http"
HTTPS = "https"
class IdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The identity type.
"""
NONE = "None"
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
class ImmutabilityPolicyState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The ImmutabilityPolicy state of a blob container, possible values include: Locked and Unlocked.
"""
LOCKED = "Locked"
UNLOCKED = "Unlocked"
class ImmutabilityPolicyUpdateType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The ImmutabilityPolicy update type of a blob container, possible values include: put, lock and
extend.
"""
PUT = "put"
LOCK = "lock"
EXTEND = "extend"
class InventoryRuleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The valid value is Inventory
"""
INVENTORY = "Inventory"
class KeyPermission(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Permissions for the key -- read-only or full permissions.
"""
READ = "Read"
FULL = "Full"
class KeySource(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Storage,
Microsoft.Keyvault
"""
MICROSOFT_STORAGE = "Microsoft.Storage"
MICROSOFT_KEYVAULT = "Microsoft.Keyvault"
class KeyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Encryption key type to be used for the encryption service. 'Account' key type implies that an
account-scoped encryption key will be used. 'Service' key type implies that a default service
key is used.
"""
SERVICE = "Service"
ACCOUNT = "Account"
class Kind(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Indicates the type of storage account.
"""
STORAGE = "Storage"
STORAGE_V2 = "StorageV2"
BLOB_STORAGE = "BlobStorage"
FILE_STORAGE = "FileStorage"
BLOCK_BLOB_STORAGE = "BlockBlobStorage"
class LargeFileSharesState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Allow large file shares if sets to Enabled. It cannot be disabled once it is enabled.
"""
DISABLED = "Disabled"
ENABLED = "Enabled"
class LeaseContainerRequestAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies the lease action. Can be one of the available actions.
"""
ACQUIRE = "Acquire"
RENEW = "Renew"
CHANGE = "Change"
RELEASE = "Release"
BREAK_ENUM = "Break"
class LeaseDuration(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies whether the lease on a container is of infinite or fixed duration, only when the
container is leased.
"""
INFINITE = "Infinite"
FIXED = "Fixed"
class LeaseShareAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies the lease action. Can be one of the available actions.
"""
ACQUIRE = "Acquire"
RENEW = "Renew"
CHANGE = "Change"
RELEASE = "Release"
BREAK_ENUM = "Break"
class LeaseState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Lease state of the container.
"""
AVAILABLE = "Available"
LEASED = "Leased"
EXPIRED = "Expired"
BREAKING = "Breaking"
BROKEN = "Broken"
class LeaseStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The lease status of the container.
"""
LOCKED = "Locked"
UNLOCKED = "Unlocked"
class ListContainersInclude(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DELETED = "deleted"
class ManagementPolicyName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "default"
class MigrationState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""This property denotes the container level immutability to object level immutability migration
state.
"""
IN_PROGRESS = "InProgress"
COMPLETED = "Completed"
class MinimumTlsVersion(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Set the minimum TLS version to be permitted on requests to storage. The default interpretation
is TLS 1.0 for this property.
"""
TLS1_0 = "TLS1_0"
TLS1_1 = "TLS1_1"
TLS1_2 = "TLS1_2"
class Name(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Name of the policy. The valid value is AccessTimeTracking. This field is currently read only
"""
ACCESS_TIME_TRACKING = "AccessTimeTracking"
class ObjectType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""This is a required field. This field specifies the scope of the inventory created either at the
blob or container level.
"""
BLOB = "Blob"
CONTAINER = "Container"
class Permissions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The signed permissions for the account SAS. Possible values include: Read (r), Write (w),
Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p).
"""
R = "r"
D = "d"
W = "w"
L = "l"
A = "a"
C = "c"
U = "u"
P = "p"
class PrivateEndpointConnectionProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The current provisioning state.
"""
SUCCEEDED = "Succeeded"
CREATING = "Creating"
DELETING = "Deleting"
FAILED = "Failed"
class PrivateEndpointServiceConnectionStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The private endpoint connection status.
"""
PENDING = "Pending"
APPROVED = "Approved"
REJECTED = "Rejected"
class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Gets the status of the storage account at the time the operation was called.
"""
CREATING = "Creating"
RESOLVING_DNS = "ResolvingDNS"
SUCCEEDED = "Succeeded"
class PublicAccess(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies whether data in the container may be accessed publicly and the level of access.
"""
CONTAINER = "Container"
BLOB = "Blob"
NONE = "None"
class PublicNetworkAccess(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Allow or disallow public network access to Storage Account. Value is optional but if passed in,
must be 'Enabled' or 'Disabled'.
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
class Reason(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Gets the reason that a storage account name could not be used. The Reason element is only
returned if NameAvailable is false.
"""
ACCOUNT_NAME_INVALID = "AccountNameInvalid"
ALREADY_EXISTS = "AlreadyExists"
class ReasonCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The reason for the restriction. As of now this can be "QuotaId" or
"NotAvailableForSubscription". Quota Id is set when the SKU has requiredQuotas parameter as the
subscription does not belong to that quota. The "NotAvailableForSubscription" is related to
capacity at DC.
"""
QUOTA_ID = "QuotaId"
NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription"
class RootSquashType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The property is for NFS share only. The default is NoRootSquash.
"""
NO_ROOT_SQUASH = "NoRootSquash"
ROOT_SQUASH = "RootSquash"
ALL_SQUASH = "AllSquash"
class RoutingChoice(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Routing Choice defines the kind of network routing opted by the user.
"""
MICROSOFT_ROUTING = "MicrosoftRouting"
INTERNET_ROUTING = "InternetRouting"
class RuleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The valid value is Lifecycle
"""
LIFECYCLE = "Lifecycle"
class Schedule(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""This is a required field. This field is used to schedule an inventory formation.
"""
DAILY = "Daily"
WEEKLY = "Weekly"
class Services(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The signed services accessible with the account SAS. Possible values include: Blob (b), Queue
(q), Table (t), File (f).
"""
B = "b"
Q = "q"
T = "t"
F = "f"
class ShareAccessTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Access tier for specific share. GpV2 account can choose between TransactionOptimized (default),
Hot, and Cool. FileStorage account can choose Premium.
"""
TRANSACTION_OPTIMIZED = "TransactionOptimized"
HOT = "Hot"
COOL = "Cool"
PREMIUM = "Premium"
class SignedResource(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The signed services accessible with the service SAS. Possible values include: Blob (b),
Container (c), File (f), Share (s).
"""
B = "b"
C = "c"
F = "f"
S = "s"
class SignedResourceTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The signed resource types that are accessible with the account SAS. Service (s): Access to
service-level APIs; Container (c): Access to container-level APIs; Object (o): Access to
object-level APIs for blobs, queue messages, table entities, and files.
"""
S = "s"
C = "c"
O = "o"
class SkuName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The SKU name. Required for account creation; optional for update. Note that in older versions,
SKU name was called accountType.
"""
STANDARD_LRS = "Standard_LRS"
STANDARD_GRS = "Standard_GRS"
STANDARD_RAGRS = "Standard_RAGRS"
STANDARD_ZRS = "Standard_ZRS"
PREMIUM_LRS = "Premium_LRS"
PREMIUM_ZRS = "Premium_ZRS"
STANDARD_GZRS = "Standard_GZRS"
STANDARD_RAGZRS = "Standard_RAGZRS"
class SkuTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The SKU tier. This is based on the SKU name.
"""
STANDARD = "Standard"
PREMIUM = "Premium"
class State(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Gets the state of virtual network rule.
"""
PROVISIONING = "Provisioning"
DEPROVISIONING = "Deprovisioning"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
NETWORK_SOURCE_DELETED = "NetworkSourceDeleted"
class StorageAccountExpand(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
GEO_REPLICATION_STATS = "geoReplicationStats"
BLOB_RESTORE_STATUS = "blobRestoreStatus"
class UsageUnit(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Gets the unit of measurement.
"""
COUNT = "Count"
BYTES = "Bytes"
SECONDS = "Seconds"
PERCENT = "Percent"
COUNTS_PER_SECOND = "CountsPerSecond"
BYTES_PER_SECOND = "BytesPerSecond"
|
|
import mock
import pytest
from urlparse import urlparse
from addons.wiki.tests.factories import WikiFactory
from api.base.settings.defaults import API_BASE
from api.base.settings import osf_settings
from api_tests import utils as test_utils
from framework.auth import core
from osf.models import Guid
from osf_tests.factories import (
ProjectFactory,
AuthUserFactory,
CommentFactory,
RegistrationFactory,
PrivateLinkFactory,
)
from rest_framework import exceptions
@pytest.mark.django_db
@pytest.mark.enable_implicit_clean
class CommentDetailMixin(object):
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def contributor(self):
return AuthUserFactory()
@pytest.fixture()
def non_contrib(self):
return AuthUserFactory()
# check if all necessary fixtures are setup by subclass
@pytest.fixture()
def private_project(self):
raise NotImplementedError
@pytest.fixture()
def comment(self):
raise NotImplementedError
@pytest.fixture()
def private_url(self):
raise NotImplementedError
@pytest.fixture()
def payload(self):
raise NotImplementedError
# public_project_with_comments
@pytest.fixture()
def public_project(self):
raise NotImplementedError
@pytest.fixture()
def public_comment(self):
raise NotImplementedError
@pytest.fixture()
def public_comment_reply(self):
raise NotImplementedError
@pytest.fixture()
def public_url(self):
raise NotImplementedError
@pytest.fixture()
def public_comment_payload(self):
raise NotImplementedError
# registration_with_comments
@pytest.fixture()
def registration(self):
raise NotImplementedError
@pytest.fixture()
def registration_url(self):
raise NotImplementedError
@pytest.fixture()
def registration_comment(self):
raise NotImplementedError
@pytest.fixture()
def comment_url(self):
raise NotImplementedError
@pytest.fixture()
def registration_comment_reply(self):
raise NotImplementedError
@pytest.fixture()
def replies_url(self):
raise NotImplementedError
@pytest.fixture()
def set_up_payload(self):
def payload(target_id, content='test', has_content=True):
payload = {
'data': {
'id': target_id,
'type': 'comments',
'attributes': {
'content': 'Updating this comment',
'deleted': False
}
}
}
if has_content:
payload['data']['attributes']['content'] = content
return payload
return payload
def test_private_node_comments_related_auth(
self, app, user, non_contrib,
comment, private_url
):
# test_private_node_logged_in_contributor_can_view_comment
res = app.get(private_url, auth=user.auth)
assert res.status_code == 200
assert comment._id == res.json['data']['id']
assert comment.content == res.json['data']['attributes']['content']
# def test_private_node_logged_in_non_contrib_cannot_view_comment
res = app.get(private_url, auth=non_contrib.auth, expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# def test_private_node_logged_out_user_cannot_view_comment
res = app.get(private_url, expect_errors=True)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
def test_private_node_user_with_private_and_anonymous_link_misc(
self, app, private_project, comment):
# def test_private_node_user_with_private_link_can_see_comment
private_link = PrivateLinkFactory(anonymous=False)
private_link.nodes.add(private_project)
private_link.save()
res = app.get(
'/{}comments/{}/'.format(API_BASE, comment._id),
{'view_only': private_link.key}, expect_errors=True
)
assert res.status_code == 200
assert comment._id == res.json['data']['id']
assert comment.content == res.json['data']['attributes']['content']
# test_private_node_user_with_anonymous_link_cannot_see_commenter_info
private_link = PrivateLinkFactory(anonymous=True)
private_link.nodes.add(private_project)
private_link.save()
res = app.get(
'/{}comments/{}/'.format(API_BASE, comment._id),
{'view_only': private_link.key}
)
assert res.status_code == 200
assert comment._id == res.json['data']['id']
assert comment.content == res.json['data']['attributes']['content']
assert 'user' not in res.json['data']['relationships']
# test_private_node_user_with_anonymous_link_cannot_see_mention_info
comment.content = 'test with [@username](userlink) and @mention'
comment.save()
res = app.get(
'/{}comments/{}/'.format(API_BASE, comment._id),
{'view_only': private_link.key}
)
assert res.status_code == 200
assert comment._id == res.json['data']['id']
assert 'test with @A User and @mention' == res.json['data']['attributes']['content']
def test_public_node_comment_can_view_misc(
self, app, user, non_contrib,
public_project, public_url,
public_comment, registration_comment,
comment_url
):
# test_public_node_logged_in_contributor_can_view_comment
res = app.get(public_url, auth=user.auth)
assert res.status_code == 200
assert public_comment._id == res.json['data']['id']
assert public_comment.content == res.json['data']['attributes']['content']
# test_public_node_logged_in_non_contrib_can_view_comment
res = app.get(public_url, auth=non_contrib.auth)
assert res.status_code == 200
assert public_comment._id == res.json['data']['id']
assert public_comment.content == res.json['data']['attributes']['content']
# test_public_node_logged_out_user_can_view_comment
res = app.get(public_url)
assert res.status_code == 200
assert public_comment._id == res.json['data']['id']
assert public_comment.content == res.json['data']['attributes']['content']
# test_registration_logged_in_contributor_can_view_comment
res = app.get(comment_url, auth=user.auth)
assert res.status_code == 200
assert registration_comment._id == res.json['data']['id']
assert registration_comment.content == res.json['data']['attributes']['content']
# test_public_node_user_with_private_link_can_view_comment
private_link = PrivateLinkFactory(anonymous=False)
private_link.nodes.add(public_project)
private_link.save()
res = app.get(
'/{}comments/{}/'.format(API_BASE, public_comment._id),
{'view_only': private_link.key}, expect_errors=True
)
assert public_comment._id == res.json['data']['id']
assert public_comment.content == res.json['data']['attributes']['content']
def test_comment_has_multiple_links(
self, app, user, public_url, public_project, public_comment,
public_comment_reply, comment_url, registration
):
res = app.get(public_url)
assert res.status_code == 200
# test_comment_has_user_link
url_user = res.json['data']['relationships']['user']['links']['related']['href']
expected_url = '/{}users/{}/'.format(API_BASE, user._id)
assert urlparse(url_user).path == expected_url
# test_comment_has_node_link
url_node = res.json['data']['relationships']['node']['links']['related']['href']
expected_url = '/{}nodes/{}/'.format(API_BASE, public_project._id)
assert urlparse(url_node).path == expected_url
# test_comment_has_replies_link
url_replies = res.json['data']['relationships']['replies']['links']['related']['href']
uri = test_utils.urlparse_drop_netloc(url_replies)
res_uri = app.get(uri)
assert res_uri.status_code == 200
assert res_uri.json['data'][0]['type'] == 'comments'
# test_comment_has_reports_link
url_reports = res.json['data']['relationships']['reports']['links']['related']['href']
expected_url = '/{}comments/{}/reports/'.format(
API_BASE, public_comment._id)
assert urlparse(url_reports).path == expected_url
# test_registration_comment_has_node_link
res = app.get(comment_url, auth=user.auth)
url = res.json['data']['relationships']['node']['links']['related']['href']
expected_url = '/{}registrations/{}/'.format(
API_BASE, registration._id)
assert res.status_code == 200
assert urlparse(url).path == expected_url
def test_private_node_comment_auth_misc(
self, app, user, non_contrib, private_url, payload):
# test_private_node_only_logged_in_contributor_commenter_can_update_comment
res = app.put_json_api(private_url, payload, auth=user.auth)
assert res.status_code == 200
assert payload['data']['attributes']['content'] == res.json['data']['attributes']['content']
# test_private_node_logged_in_non_contrib_cannot_update_comment
res = app.put_json_api(
private_url, payload,
auth=non_contrib.auth, expect_errors=True
)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# test_private_node_logged_out_user_cannot_update_comment
res = app.put_json_api(private_url, payload, expect_errors=True)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
def test_public_node_comment_update_misc(
self, app, user, contributor,
non_contrib, public_url,
public_comment_payload
):
# test_public_node_only_contributor_commenter_can_update_comment
res = app.put_json_api(
public_url, public_comment_payload,
auth=user.auth
)
assert res.status_code == 200
assert public_comment_payload['data']['attributes']['content'] == res.json['data']['attributes']['content']
# test_public_node_contributor_cannot_update_other_users_comment
res = app.put_json_api(
public_url, public_comment_payload,
auth=contributor.auth, expect_errors=True
)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# test_public_node_non_contrib_cannot_update_other_users_comment
res = app.put_json_api(
public_url, public_comment_payload,
auth=non_contrib.auth, expect_errors=True
)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# test_public_node_logged_out_user_cannot_update_comment
res = app.put_json_api(
public_url, public_comment_payload,
expect_errors=True
)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
def test_update_comment_misc(
self, app, user, private_url,
comment, set_up_payload
):
# test_update_comment_cannot_exceed_max_length
content = ('c' * (osf_settings.COMMENT_MAXLENGTH + 3))
payload = set_up_payload(comment._id, content=content)
res = app.put_json_api(
private_url, payload,
auth=user.auth, expect_errors=True
)
assert res.status_code == 400
assert (res.json['errors'][0]['detail'] == 'Ensure this field has no more than {} characters.'.format(
str(osf_settings.COMMENT_MAXLENGTH)))
# test_update_comment_cannot_be_empty
payload = set_up_payload(comment._id, content='')
res = app.put_json_api(
private_url, payload,
auth=user.auth, expect_errors=True
)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'This field may not be blank.'
def test_private_node_only_logged_in_contributor_commenter_can_delete_comment(
self, app, user, private_url):
res = app.delete_json_api(private_url, auth=user.auth)
assert res.status_code == 204
def test_private_node_only_logged_in_contributor_commenter_can_delete_own_reply(
self, app, user, private_project, comment):
reply_target = Guid.load(comment._id)
reply = CommentFactory(
node=private_project,
target=reply_target, user=user
)
reply_url = '/{}comments/{}/'.format(API_BASE, reply._id)
res = app.delete_json_api(reply_url, auth=user.auth)
assert res.status_code == 204
def test_private_node_only_logged_in_contributor_commenter_can_undelete_own_reply(
self, app, user, private_project, comment, set_up_payload):
reply_target = Guid.load(comment._id)
reply = CommentFactory(
node=private_project,
target=reply_target, user=user
)
reply_url = '/{}comments/{}/'.format(API_BASE, reply._id)
reply.is_deleted = True
reply.save()
payload = set_up_payload(reply._id, has_content=False)
res = app.patch_json_api(reply_url, payload, auth=user.auth)
assert res.status_code == 200
assert not res.json['data']['attributes']['deleted']
assert res.json['data']['attributes']['content'] == reply.content
def test_private_node_cannot_delete_comment_situation(
self, app, user, contributor, non_contrib, private_url, comment):
# def
# test_private_node_contributor_cannot_delete_other_users_comment(self):
res = app.delete_json_api(
private_url, auth=contributor.auth,
expect_errors=True
)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# def test_private_node_non_contrib_cannot_delete_comment(self):
res = app.delete_json_api(
private_url, auth=non_contrib.auth,
expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# def test_private_node_logged_out_user_cannot_delete_comment(self):
res = app.delete_json_api(private_url, expect_errors=True)
assert res.status_code == 401
# def test_private_node_user_cannot_delete_already_deleted_comment(self):
comment.is_deleted = True
comment.save()
res = app.delete_json_api(
private_url, auth=user.auth,
expect_errors=True
)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Comment already deleted.'
def test_private_node_only_logged_in_contributor_commenter_can_undelete_comment(
self, app, user, comment, set_up_payload):
comment.is_deleted = True
comment.save()
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id, has_content=False)
res = app.patch_json_api(url, payload, auth=user.auth)
assert res.status_code == 200
assert not res.json['data']['attributes']['deleted']
assert res.json['data']['attributes']['content'] == comment.content
def test_private_node_cannot_undelete_comment_situation(
self, app, contributor, non_contrib, comment, set_up_payload):
comment.is_deleted = True
comment.save()
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id, has_content=False)
# test_private_node_contributor_cannot_undelete_other_users_comment
res = app.patch_json_api(
url, payload, auth=contributor.auth,
expect_errors=True)
assert res.status_code == 403
# test_private_node_non_contrib_cannot_undelete_comment
res = app.patch_json_api(
url, payload, auth=non_contrib.auth,
expect_errors=True)
assert res.status_code == 403
# test_private_node_logged_out_user_cannot_undelete_comment
res = app.patch_json_api(url, payload, expect_errors=True)
assert res.status_code == 401
def test_public_node_only_logged_in_contributor_commenter_can_delete_comment(
self, app, user, public_url):
res = app.delete_json_api(public_url, auth=user.auth)
assert res.status_code == 204
def test_public_node_cannot_delete_comment_situations(
self, app, user, contributor, non_contrib, public_url, public_comment):
# test_public_node_contributor_cannot_delete_other_users_comment
res = app.delete_json_api(
public_url, auth=contributor.auth,
expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# test_public_node_non_contrib_cannot_delete_other_users_comment
res = app.delete_json_api(
public_url, auth=non_contrib.auth,
expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
# test_public_node_logged_out_user_cannot_delete_comment
res = app.delete_json_api(public_url, expect_errors=True)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
# test_public_node_user_cannot_delete_already_deleted_comment
public_comment.is_deleted = True
public_comment.save()
res = app.delete_json_api(
public_url, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Comment already deleted.'
def test_private_node_deleted_comment_auth_misc(
self, app, user, contributor, comment, private_project):
comment.is_deleted = True
comment.save()
# test_private_node_only_logged_in_commenter_can_view_deleted_comment
url = '/{}comments/{}/'.format(API_BASE, comment._id)
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] == comment.content
# test_private_node_contributor_cannot_see_other_users_deleted_comment
url = '/{}comments/{}/'.format(API_BASE, comment._id)
res = app.get(url, auth=contributor.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] is None
# test_private_node_logged_out_user_cannot_see_deleted_comment
url = '/{}comments/{}/'.format(API_BASE, comment._id)
res = app.get(url, expect_errors=True)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
# test_private_node_view_only_link_user_cannot_see_deleted_comment
private_link = PrivateLinkFactory(anonymous=False)
private_link.nodes.add(private_project)
private_link.save()
res = app.get('/{}comments/{}/'.format(API_BASE, comment._id),
{'view_only': private_link.key}, expect_errors=True)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] is None
# test_private_node_anonymous_view_only_link_user_cannot_see_deleted_comment
anonymous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.add(private_project)
anonymous_link.save()
res = app.get('/{}comments/{}/'.format(API_BASE, comment._id),
{'view_only': anonymous_link.key}, expect_errors=True)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] is None
def test_public_node_deleted_comments_auth_misc(
self, app, user, contributor, non_contrib,
public_project, public_comment
):
public_comment.is_deleted = True
public_comment.save()
url = '/{}comments/{}/'.format(API_BASE, public_comment._id)
# test_public_node_only_logged_in_commenter_can_view_deleted_comment
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] == public_comment.content
# test_public_node_contributor_cannot_view_other_users_deleted_comment
res = app.get(url, auth=contributor.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] is None
# test_public_node_non_contrib_cannot_view_other_users_deleted_comment
res = app.get(url, auth=non_contrib.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] is None
# test_public_node_logged_out_user_cannot_view_deleted_comments
res = app.get(url)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] is None
# test_public_node_view_only_link_user_cannot_see_deleted_comment
private_link = PrivateLinkFactory(anonymous=False)
private_link.nodes.add(public_project)
private_link.save()
res = app.get(
'/{}comments/{}/'.format(
API_BASE, public_comment._id
),
{'view_only': private_link.key},
expect_errors=True
)
assert res.status_code == 200
assert res.json['data']['attributes']['content'] is None
class TestCommentDetailView(CommentDetailMixin):
# private_project_with_comments
@pytest.fixture()
def private_project(self, user, contributor):
private_project = ProjectFactory.create(is_public=False, creator=user)
private_project.add_contributor(contributor, save=True)
return private_project
@pytest.fixture()
def comment(self, user, private_project):
return CommentFactory(node=private_project, user=user)
@pytest.fixture()
def private_url(self, comment):
return '/{}comments/{}/'.format(API_BASE, comment._id)
@pytest.fixture()
def payload(self, comment, set_up_payload):
return set_up_payload(comment._id)
# public_project_with_comments
@pytest.fixture()
def public_project(self, user, contributor):
public_project = ProjectFactory.create(is_public=True, creator=user)
public_project.add_contributor(contributor, save=True)
return public_project
@pytest.fixture()
def public_comment(self, user, public_project):
return CommentFactory(node=public_project, user=user)
@pytest.fixture()
def public_comment_reply(self, user, public_comment, public_project):
reply_target = Guid.load(public_comment._id)
return CommentFactory(
node=public_project,
target=reply_target, user=user
)
@pytest.fixture()
def public_url(self, public_comment):
return '/{}comments/{}/'.format(API_BASE, public_comment._id)
@pytest.fixture()
def public_comment_payload(self, public_comment, set_up_payload):
return set_up_payload(public_comment._id)
# registration_with_comments
@pytest.fixture()
def registration(self, user):
return RegistrationFactory(creator=user)
@pytest.fixture()
def registration_url(self, registration):
return '/{}registrations/{}/'.format(API_BASE, registration._id)
@pytest.fixture()
def registration_comment(self, user, registration):
return CommentFactory(node=registration, user=user)
@pytest.fixture()
def comment_url(self, registration_comment):
return '/{}comments/{}/'.format(API_BASE, registration_comment._id)
@pytest.fixture()
def registration_comment_reply(
self, user, registration,
registration_comment
):
reply_target = Guid.load(registration_comment._id)
return CommentFactory(
node=registration,
target=reply_target, user=user
)
@pytest.fixture()
def replies_url(self, registration, registration_comment):
return '/{}registrations/{}/comments/?filter[target]={}'.format(
API_BASE, registration._id, registration_comment._id)
def test_comment_has_target_link_with_correct_type(
self, app, public_url, public_project):
res = app.get(public_url)
url = res.json['data']['relationships']['target']['links']['related']['href']
expected_url = '/{}nodes/{}/'.format(API_BASE, public_project._id)
target_type = res.json['data']['relationships']['target']['links']['related']['meta']['type']
expected_type = 'nodes'
assert res.status_code == 200
assert urlparse(url).path == expected_url
assert target_type == expected_type
def test_public_node_non_contrib_commenter_can_update_comment(
self, app, non_contrib, set_up_payload):
project = ProjectFactory(is_public=True, comment_level='public')
comment = CommentFactory(node=project, user=non_contrib)
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id)
res = app.put_json_api(url, payload, auth=non_contrib.auth)
assert res.status_code == 200
assert payload['data']['attributes']['content'] == res.json['data']['attributes']['content']
def test_public_node_non_contrib_commenter_cannot_update_own_comment_if_comment_level_private(
self, app, non_contrib, set_up_payload):
project = ProjectFactory(is_public=True, comment_level='public')
comment = CommentFactory(node=project, user=non_contrib)
project.comment_level = 'private'
project.save()
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id)
res = app.put_json_api(
url, payload, auth=non_contrib.auth,
expect_errors=True
)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
def test_public_node_non_contrib_commenter_can_delete_comment(
self, app, non_contrib):
project = ProjectFactory(is_public=True)
comment = CommentFactory(node=project, user=non_contrib)
url = '/{}comments/{}/'.format(API_BASE, comment._id)
res = app.delete_json_api(url, auth=non_contrib.auth)
assert res.status_code == 204
def test_registration_comment_has_usable_replies_relationship_link(
self, app, user, registration_url, registration_comment_reply):
res = app.get(registration_url, auth=user.auth)
assert res.status_code == 200
comments_url = res.json['data']['relationships']['comments']['links']['related']['href']
comments_uri = test_utils.urlparse_drop_netloc(comments_url)
comments_res = app.get(comments_uri, auth=user.auth)
assert comments_res.status_code == 200
replies_url = comments_res.json['data'][0]['relationships']['replies']['links']['related']['href']
replies_uri = test_utils.urlparse_drop_netloc(replies_url)
app.get(replies_uri, auth=user.auth)
node_url = comments_res.json['data'][0]['relationships']['node']['links']['related']['href']
node_uri = test_utils.urlparse_drop_netloc(node_url)
assert node_uri == registration_url
def test_registration_comment_has_usable_node_relationship_link(
self, app, user, registration, registration_url,
registration_comment_reply
):
res = app.get(registration_url, auth=user.auth)
assert res.status_code == 200
comments_url = res.json['data']['relationships']['comments']['links']['related']['href']
comments_uri = test_utils.urlparse_drop_netloc(comments_url)
comments_res = app.get(comments_uri, auth=user.auth)
assert comments_res.status_code == 200
node_url = comments_res.json['data'][0]['relationships']['node']['links']['related']['href']
node_uri = test_utils.urlparse_drop_netloc(node_url)
node_res = app.get(node_uri, auth=user.auth)
assert registration._id in node_res.json['data']['id']
class TestFileCommentDetailView(CommentDetailMixin):
# private_project_with_comments
@pytest.fixture()
def private_project(self, user, contributor):
private_project = ProjectFactory.create(is_public=False, creator=user)
private_project.add_contributor(contributor, save=True)
return private_project
@pytest.fixture()
def file(self, user, private_project):
return test_utils.create_test_file(private_project, user)
@pytest.fixture()
def comment(self, user, private_project, file):
return CommentFactory(
node=private_project,
target=file.get_guid(),
user=user)
@pytest.fixture()
def private_url(self, comment):
return '/{}comments/{}/'.format(API_BASE, comment._id)
@pytest.fixture()
def payload(self, comment, set_up_payload):
return set_up_payload(comment._id)
# public_project_with_comments
@pytest.fixture()
def public_project(self, user, contributor):
public_project = ProjectFactory.create(
is_public=True, creator=user, comment_level='private')
public_project.add_contributor(contributor, save=True)
return public_project
@pytest.fixture()
def public_file(self, user, public_project):
return test_utils.create_test_file(public_project, user)
@pytest.fixture()
def public_comment(self, user, public_project, public_file):
return CommentFactory(
node=public_project,
target=public_file.get_guid(),
user=user)
@pytest.fixture()
def public_comment_reply(self, user, public_comment, public_project):
reply_target = Guid.load(public_comment._id)
return CommentFactory(
node=public_project,
target=reply_target, user=user
)
@pytest.fixture()
def public_url(self, public_comment):
return '/{}comments/{}/'.format(API_BASE, public_comment._id)
@pytest.fixture()
def public_comment_payload(self, public_comment, set_up_payload):
return set_up_payload(public_comment._id)
# registration_with_comments
@pytest.fixture()
def registration(self, user):
return RegistrationFactory(creator=user, comment_level='private')
@pytest.fixture()
def registration_file(self, user, registration):
return test_utils.create_test_file(registration, user)
@pytest.fixture()
def registration_comment(self, user, registration, registration_file):
return CommentFactory(
node=registration,
target=registration_file.get_guid(),
user=user)
@pytest.fixture()
def comment_url(self, registration_comment):
return '/{}comments/{}/'.format(API_BASE, registration_comment._id)
@pytest.fixture()
def registration_comment_reply(
self, user, registration,
registration_comment
):
reply_target = Guid.load(registration_comment._id)
return CommentFactory(
node=registration,
target=reply_target,
user=user)
def test_file_comment_has_target_link_with_correct_type(
self, app, public_url, public_file):
res = app.get(public_url)
url = res.json['data']['relationships']['target']['links']['related']['href']
expected_url = '/{}files/{}/'.format(API_BASE, public_file._id)
target_type = res.json['data']['relationships']['target']['links']['related']['meta']['type']
expected_type = 'files'
assert res.status_code == 200
assert urlparse(url).path == expected_url
assert target_type == expected_type
def test_public_node_non_contrib_commenter_can_update_file_comment(
self, app, non_contrib, set_up_payload):
project = ProjectFactory(is_public=True)
test_file = test_utils.create_test_file(project, project.creator)
comment = CommentFactory(
node=project,
target=test_file.get_guid(),
user=non_contrib)
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id)
res = app.put_json_api(url, payload, auth=non_contrib.auth)
assert res.status_code == 200
assert payload['data']['attributes']['content'] == res.json['data']['attributes']['content']
def test_public_node_non_contrib_commenter_cannot_update_own_file_comment_if_comment_level_private(
self, app, non_contrib, set_up_payload):
project = ProjectFactory(is_public=True)
test_file = test_utils.create_test_file(project, project.creator)
comment = CommentFactory(
node=project,
target=test_file.get_guid(),
user=non_contrib)
project.comment_level = 'private'
project.save()
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id)
res = app.put_json_api(
url, payload, auth=non_contrib.auth,
expect_errors=True
)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
def test_public_node_non_contrib_commenter_can_delete_file_comment(
self, app, non_contrib):
project = ProjectFactory(is_public=True, comment_level='public')
test_file = test_utils.create_test_file(project, project.creator)
comment = CommentFactory(
node=project,
target=test_file.get_guid(),
user=non_contrib)
url = '/{}comments/{}/'.format(API_BASE, comment._id)
res = app.delete_json_api(url, auth=non_contrib.auth)
assert res.status_code == 204
def test_comment_detail_for_deleted_file_is_not_returned(
self, app, user, private_project, file, private_url):
# Delete commented file
osfstorage = private_project.get_addon('osfstorage')
osfstorage.get_root()
file.delete()
res = app.get(private_url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
class TestWikiCommentDetailView(CommentDetailMixin):
# private_project_with_comments
@pytest.fixture()
def private_project(self, user, contributor):
private_project = ProjectFactory.create(
is_public=False, creator=user, comment_level='private')
private_project.add_contributor(contributor, save=True)
return private_project
@pytest.fixture()
def wiki(self, user, private_project):
with mock.patch('osf.models.AbstractNode.update_search'):
wiki = WikiFactory(
user=user,
node=private_project,
page_name='not home'
)
return wiki
@pytest.fixture()
def comment(self, user, private_project, wiki):
return CommentFactory(
node=private_project,
target=Guid.load(wiki._id),
user=user
)
@pytest.fixture()
def private_url(self, comment):
return '/{}comments/{}/'.format(API_BASE, comment._id)
@pytest.fixture()
def payload(self, comment, set_up_payload):
return set_up_payload(comment._id)
# public_project_with_comments
@pytest.fixture()
def public_project(self, user, contributor):
public_project = ProjectFactory.create(
is_public=True, creator=user, comment_level='private')
public_project.add_contributor(contributor, save=True)
return public_project
@pytest.fixture()
def public_wiki(self, user, public_project):
with mock.patch('osf.models.AbstractNode.update_search'):
return WikiFactory(
user=user,
node=public_project,
)
@pytest.fixture()
def public_comment(self, user, public_project, public_wiki):
return CommentFactory(
node=public_project,
target=Guid.load(public_wiki._id),
user=user)
@pytest.fixture()
def public_comment_reply(self, user, public_comment, public_project):
reply_target = Guid.load(public_comment._id)
return CommentFactory(
node=public_project,
target=reply_target,
user=user
)
@pytest.fixture()
def public_url(self, public_comment):
return '/{}comments/{}/'.format(API_BASE, public_comment._id)
@pytest.fixture()
def public_comment_payload(self, public_comment, set_up_payload):
return set_up_payload(public_comment._id)
# registration_with_comments
@pytest.fixture()
def registration(self, user):
return RegistrationFactory(creator=user, comment_level='private')
@pytest.fixture()
def registration_wiki(self, registration, user):
with mock.patch('osf.models.AbstractNode.update_search'):
return WikiFactory(
user=user,
node=registration,
)
@pytest.fixture()
def registration_comment(self, user, registration, registration_wiki):
return CommentFactory(
node=registration,
target=Guid.load(registration_wiki._id),
user=user
)
@pytest.fixture()
def comment_url(self, registration_comment):
return '/{}comments/{}/'.format(API_BASE, registration_comment._id)
@pytest.fixture()
def registration_comment_reply(
self, user, registration,
registration_comment
):
reply_target = Guid.load(registration_comment._id)
return CommentFactory(
node=registration,
target=reply_target, user=user
)
def test_wiki_comment_has_target_link_with_correct_type(
self, app, public_url, public_wiki):
res = app.get(public_url)
url = res.json['data']['relationships']['target']['links']['related']['href']
expected_url = public_wiki.get_absolute_url()
target_type = res.json['data']['relationships']['target']['links']['related']['meta']['type']
expected_type = 'wiki'
assert res.status_code == 200
assert url == expected_url
assert target_type == expected_type
def test_public_node_non_contrib_commenter_can_update_wiki_comment(
self, app, user, non_contrib, set_up_payload):
project = ProjectFactory(is_public=True)
wiki_page = WikiFactory(
user=user,
node=project,
)
comment = CommentFactory(
node=project,
target=Guid.load(wiki_page._id),
user=non_contrib
)
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id)
res = app.put_json_api(url, payload, auth=non_contrib.auth)
assert res.status_code == 200
assert payload['data']['attributes']['content'] == res.json['data']['attributes']['content']
def test_public_node_non_contrib_commenter_cannot_update_own_wiki_comment_if_comment_level_private(
self, app, user, non_contrib, set_up_payload):
project = ProjectFactory(is_public=True)
wiki_page = WikiFactory(
user=user,
node=project,
)
comment = CommentFactory(
node=project,
target=Guid.load(wiki_page._id),
user=non_contrib
)
project.comment_level = 'private'
project.save()
url = '/{}comments/{}/'.format(API_BASE, comment._id)
payload = set_up_payload(comment._id)
res = app.put_json_api(
url, payload, auth=non_contrib.auth,
expect_errors=True
)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
def test_public_node_non_contrib_commenter_can_delete_wiki_comment(
self, app, user, non_contrib):
project = ProjectFactory(is_public=True, comment_level='public')
wiki_page = WikiFactory(
user=user,
node=project,
)
comment = CommentFactory(
node=project,
target=Guid.load(wiki_page._id),
user=non_contrib
)
url = '/{}comments/{}/'.format(API_BASE, comment._id)
res = app.delete_json_api(url, auth=non_contrib.auth)
assert res.status_code == 204
def test_comment_detail_for_deleted_wiki_is_not_returned(
self, app, user, wiki, private_url, private_project):
# Delete commented wiki page
private_project.delete_node_wiki(wiki.page_name, core.Auth(user))
res = app.get(private_url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.