repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
netscaler/neutron | neutron/tests/unit/test_policy.py | 8 | 23444 | # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of Policy Engine For Neutron"""
import json
import StringIO
import urllib2
import fixtures
import mock
import neutron
from neutron.api.v2 import attributes
from neutron.common import exceptions
from neutron import context
from neutron import manager
from neutron.openstack.common import importutils
from neutron.openstack.common import policy as common_policy
from neutron import policy
from neutron.tests import base
class PolicyFileTestCase(base.BaseTestCase):
def setUp(self):
super(PolicyFileTestCase, self).setUp()
policy.reset()
self.addCleanup(policy.reset)
self.context = context.Context('fake', 'fake', is_admin=False)
self.target = {}
self.tempdir = self.useFixture(fixtures.TempDir())
def test_modified_policy_reloads(self):
def fake_find_config_file(_1, _2):
return self.tempdir.join('policy')
with mock.patch.object(neutron.common.utils,
'find_config_file',
new=fake_find_config_file):
tmpfilename = fake_find_config_file(None, None)
action = "example:test"
with open(tmpfilename, "w") as policyfile:
policyfile.write("""{"example:test": ""}""")
policy.enforce(self.context, action, self.target)
with open(tmpfilename, "w") as policyfile:
policyfile.write("""{"example:test": "!"}""")
# NOTE(vish): reset stored policy cache so we don't have to
# sleep(1)
policy._POLICY_CACHE = {}
self.assertRaises(exceptions.PolicyNotAuthorized,
policy.enforce,
self.context,
action,
self.target)
class PolicyTestCase(base.BaseTestCase):
def setUp(self):
super(PolicyTestCase, self).setUp()
policy.reset()
self.addCleanup(policy.reset)
# NOTE(vish): preload rules to circumvent reloading from file
policy.init()
rules = {
"true": '@',
"example:allowed": '@',
"example:denied": '!',
"example:get_http": "http:http://www.example.com",
"example:my_file": "role:compute_admin or tenant_id:%(tenant_id)s",
"example:early_and_fail": "! and @",
"example:early_or_success": "@ or !",
"example:lowercase_admin": "role:admin or role:sysadmin",
"example:uppercase_admin": "role:ADMIN or role:sysadmin",
}
# NOTE(vish): then overload underlying rules
common_policy.set_rules(common_policy.Rules(
dict((k, common_policy.parse_rule(v))
for k, v in rules.items())))
self.context = context.Context('fake', 'fake', roles=['member'])
self.target = {}
def test_enforce_nonexistent_action_throws(self):
action = "example:noexist"
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, self.target)
def test_enforce_bad_action_throws(self):
action = "example:denied"
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, self.target)
def test_check_bad_action_noraise(self):
action = "example:denied"
result = policy.check(self.context, action, self.target)
self.assertEqual(result, False)
def test_check_if_exists_non_existent_action_raises(self):
action = "example:idonotexist"
self.assertRaises(exceptions.PolicyRuleNotFound,
policy.check_if_exists,
self.context, action, self.target)
def test_enforce_good_action(self):
action = "example:allowed"
result = policy.enforce(self.context, action, self.target)
self.assertEqual(result, True)
def test_enforce_http_true(self):
def fakeurlopen(url, post_data):
return StringIO.StringIO("True")
with mock.patch.object(urllib2, 'urlopen', new=fakeurlopen):
action = "example:get_http"
target = {}
result = policy.enforce(self.context, action, target)
self.assertEqual(result, True)
def test_enforce_http_false(self):
def fakeurlopen(url, post_data):
return StringIO.StringIO("False")
with mock.patch.object(urllib2, 'urlopen', new=fakeurlopen):
action = "example:get_http"
target = {}
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, target)
def test_templatized_enforcement(self):
target_mine = {'tenant_id': 'fake'}
target_not_mine = {'tenant_id': 'another'}
action = "example:my_file"
policy.enforce(self.context, action, target_mine)
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, target_not_mine)
def test_early_AND_enforcement(self):
action = "example:early_and_fail"
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, self.target)
def test_early_OR_enforcement(self):
action = "example:early_or_success"
policy.enforce(self.context, action, self.target)
def test_ignore_case_role_check(self):
lowercase_action = "example:lowercase_admin"
uppercase_action = "example:uppercase_admin"
# NOTE(dprince) we mix case in the Admin role here to ensure
# case is ignored
admin_context = context.Context('admin', 'fake', roles=['AdMiN'])
policy.enforce(admin_context, lowercase_action, self.target)
policy.enforce(admin_context, uppercase_action, self.target)
class DefaultPolicyTestCase(base.BaseTestCase):
def setUp(self):
super(DefaultPolicyTestCase, self).setUp()
policy.reset()
policy.init()
self.addCleanup(policy.reset)
self.rules = {
"default": '',
"example:exist": '!',
}
self._set_rules('default')
self.context = context.Context('fake', 'fake')
def _set_rules(self, default_rule):
rules = common_policy.Rules(
dict((k, common_policy.parse_rule(v))
for k, v in self.rules.items()), default_rule)
common_policy.set_rules(rules)
def test_policy_called(self):
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, "example:exist", {})
def test_not_found_policy_calls_default(self):
policy.enforce(self.context, "example:noexist", {})
def test_default_not_found(self):
self._set_rules("default_noexist")
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, "example:noexist", {})
FAKE_RESOURCE_NAME = 'something'
FAKE_RESOURCE = {"%ss" % FAKE_RESOURCE_NAME:
{'attr': {'allow_post': True,
'allow_put': True,
'is_visible': True,
'default': None,
'enforce_policy': True,
'validate': {'type:dict':
{'sub_attr_1': {'type:string': None},
'sub_attr_2': {'type:string': None}}}
}}}
class NeutronPolicyTestCase(base.BaseTestCase):
def setUp(self):
super(NeutronPolicyTestCase, self).setUp()
policy.reset()
policy.init()
self.addCleanup(policy.reset)
self.admin_only_legacy = "role:admin"
self.admin_or_owner_legacy = "role:admin or tenant_id:%(tenant_id)s"
# Add a Fake 'something' resource to RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP.update(FAKE_RESOURCE)
self.rules = dict((k, common_policy.parse_rule(v)) for k, v in {
"context_is_admin": "role:admin",
"admin_or_network_owner": "rule:context_is_admin or "
"tenant_id:%(network:tenant_id)s",
"admin_or_owner": ("rule:context_is_admin or "
"tenant_id:%(tenant_id)s"),
"admin_only": "rule:context_is_admin",
"regular_user": "role:user",
"shared": "field:networks:shared=True",
"external": "field:networks:router:external=True",
"default": '@',
"create_network": "rule:admin_or_owner",
"create_network:shared": "rule:admin_only",
"update_network": '@',
"update_network:shared": "rule:admin_only",
"get_network": "rule:admin_or_owner or "
"rule:shared or "
"rule:external",
"create_port:mac": "rule:admin_or_network_owner",
"create_something": "rule:admin_or_owner",
"create_something:attr": "rule:admin_or_owner",
"create_something:attr:sub_attr_1": "rule:admin_or_owner",
"create_something:attr:sub_attr_2": "rule:admin_only",
"get_firewall_policy": "rule:admin_or_owner or "
"rule:shared",
"get_firewall_rule": "rule:admin_or_owner or "
"rule:shared"
}.items())
def fakepolicyinit():
common_policy.set_rules(common_policy.Rules(self.rules))
def remove_fake_resource():
del attributes.RESOURCE_ATTRIBUTE_MAP["%ss" % FAKE_RESOURCE_NAME]
self.patcher = mock.patch.object(neutron.policy,
'init',
new=fakepolicyinit)
self.patcher.start()
self.addCleanup(self.patcher.stop)
self.addCleanup(remove_fake_resource)
self.context = context.Context('fake', 'fake', roles=['user'])
plugin_klass = importutils.import_class(
"neutron.db.db_base_plugin_v2.NeutronDbPluginV2")
self.manager_patcher = mock.patch('neutron.manager.NeutronManager')
fake_manager = self.manager_patcher.start()
fake_manager_instance = fake_manager.return_value
fake_manager_instance.plugin = plugin_klass()
self.addCleanup(self.manager_patcher.stop)
def _test_action_on_attr(self, context, action, attr, value,
exception=None):
action = "%s_network" % action
target = {'tenant_id': 'the_owner', attr: value}
if exception:
self.assertRaises(exception, policy.enforce,
context, action, target)
else:
result = policy.enforce(context, action, target)
self.assertEqual(result, True)
def _test_nonadmin_action_on_attr(self, action, attr, value,
exception=None):
user_context = context.Context('', "user", roles=['user'])
self._test_action_on_attr(user_context, action, attr,
value, exception)
def test_nonadmin_write_on_private_fails(self):
self._test_nonadmin_action_on_attr('create', 'shared', False,
exceptions.PolicyNotAuthorized)
def test_nonadmin_read_on_private_fails(self):
self._test_nonadmin_action_on_attr('get', 'shared', False,
exceptions.PolicyNotAuthorized)
def test_nonadmin_write_on_shared_fails(self):
self._test_nonadmin_action_on_attr('create', 'shared', True,
exceptions.PolicyNotAuthorized)
def test_nonadmin_read_on_shared_succeeds(self):
self._test_nonadmin_action_on_attr('get', 'shared', True)
def _test_enforce_adminonly_attribute(self, action):
admin_context = context.get_admin_context()
target = {'shared': True}
result = policy.enforce(admin_context, action, target)
self.assertEqual(result, True)
def test_enforce_adminonly_attribute_create(self):
self._test_enforce_adminonly_attribute('create_network')
def test_enforce_adminonly_attribute_update(self):
self._test_enforce_adminonly_attribute('update_network')
def test_enforce_adminonly_attribute_no_context_is_admin_policy(self):
del self.rules[policy.ADMIN_CTX_POLICY]
self.rules['admin_only'] = common_policy.parse_rule(
self.admin_only_legacy)
self.rules['admin_or_owner'] = common_policy.parse_rule(
self.admin_or_owner_legacy)
self._test_enforce_adminonly_attribute('create_network')
def test_enforce_adminonly_attribute_nonadminctx_returns_403(self):
action = "create_network"
target = {'shared': True, 'tenant_id': 'somebody_else'}
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, target)
def test_enforce_adminonly_nonadminctx_no_ctx_is_admin_policy_403(self):
del self.rules[policy.ADMIN_CTX_POLICY]
self.rules['admin_only'] = common_policy.parse_rule(
self.admin_only_legacy)
self.rules['admin_or_owner'] = common_policy.parse_rule(
self.admin_or_owner_legacy)
action = "create_network"
target = {'shared': True, 'tenant_id': 'somebody_else'}
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, target)
def _test_build_subattribute_match_rule(self, validate_value):
bk = FAKE_RESOURCE['%ss' % FAKE_RESOURCE_NAME]['attr']['validate']
FAKE_RESOURCE['%ss' % FAKE_RESOURCE_NAME]['attr']['validate'] = (
validate_value)
action = "create_something"
target = {'tenant_id': 'fake', 'attr': {'sub_attr_1': 'x'}}
self.assertFalse(policy._build_subattr_match_rule(
'attr',
FAKE_RESOURCE['%ss' % FAKE_RESOURCE_NAME]['attr'],
action,
target))
FAKE_RESOURCE['%ss' % FAKE_RESOURCE_NAME]['attr']['validate'] = bk
def test_build_subattribute_match_rule_empty_dict_validator(self):
self._test_build_subattribute_match_rule({})
def test_build_subattribute_match_rule_wrong_validation_info(self):
self._test_build_subattribute_match_rule(
{'type:dict': 'wrong_stuff'})
def test_enforce_subattribute(self):
action = "create_something"
target = {'tenant_id': 'fake', 'attr': {'sub_attr_1': 'x'}}
result = policy.enforce(self.context, action, target, None)
self.assertEqual(result, True)
def test_enforce_admin_only_subattribute(self):
action = "create_something"
target = {'tenant_id': 'fake', 'attr': {'sub_attr_1': 'x',
'sub_attr_2': 'y'}}
result = policy.enforce(context.get_admin_context(),
action, target, None)
self.assertEqual(result, True)
def test_enforce_admin_only_subattribute_nonadminctx_returns_403(self):
action = "create_something"
target = {'tenant_id': 'fake', 'attr': {'sub_attr_1': 'x',
'sub_attr_2': 'y'}}
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, target, None)
def test_enforce_regularuser_on_read(self):
action = "get_network"
target = {'shared': True, 'tenant_id': 'somebody_else'}
result = policy.enforce(self.context, action, target)
self.assertTrue(result)
def test_enforce_firewall_policy_shared(self):
action = "get_firewall_policy"
target = {'shared': True, 'tenant_id': 'somebody_else'}
result = policy.enforce(self.context, action, target)
self.assertTrue(result)
def test_enforce_firewall_rule_shared(self):
action = "get_firewall_rule"
target = {'shared': True, 'tenant_id': 'somebody_else'}
result = policy.enforce(self.context, action, target)
self.assertTrue(result)
def test_enforce_tenant_id_check(self):
# Trigger a policy with rule admin_or_owner
action = "create_network"
target = {'tenant_id': 'fake'}
result = policy.enforce(self.context, action, target)
self.assertTrue(result)
def test_enforce_tenant_id_check_parent_resource(self):
def fakegetnetwork(*args, **kwargs):
return {'tenant_id': 'fake'}
action = "create_port:mac"
with mock.patch.object(manager.NeutronManager.get_instance().plugin,
'get_network', new=fakegetnetwork):
target = {'network_id': 'whatever'}
result = policy.enforce(self.context, action, target)
self.assertTrue(result)
def test_enforce_plugin_failure(self):
def fakegetnetwork(*args, **kwargs):
raise NotImplementedError('Blast!')
# the policy check and plugin method we use in this test are irrelevant
# so long that we verify that, if *f* blows up, the behavior of the
# policy engine to propagate the exception is preserved
action = "create_port:mac"
with mock.patch.object(manager.NeutronManager.get_instance().plugin,
'get_network', new=fakegetnetwork):
target = {'network_id': 'whatever'}
self.assertRaises(NotImplementedError,
policy.enforce,
self.context,
action,
target)
def test_enforce_tenant_id_check_parent_resource_bw_compatibility(self):
def fakegetnetwork(*args, **kwargs):
return {'tenant_id': 'fake'}
del self.rules['admin_or_network_owner']
self.rules['admin_or_network_owner'] = common_policy.parse_rule(
"role:admin or tenant_id:%(network_tenant_id)s")
action = "create_port:mac"
with mock.patch.object(manager.NeutronManager.get_instance().plugin,
'get_network', new=fakegetnetwork):
target = {'network_id': 'whatever'}
result = policy.enforce(self.context, action, target)
self.assertTrue(result)
def test_tenant_id_check_no_target_field_raises(self):
# Try and add a bad rule
self.assertRaises(
exceptions.PolicyInitError,
common_policy.parse_rule,
'tenant_id:(wrong_stuff)')
def _test_enforce_tenant_id_raises(self, bad_rule):
self.rules['admin_or_owner'] = common_policy.parse_rule(bad_rule)
# Trigger a policy with rule admin_or_owner
action = "create_network"
target = {'tenant_id': 'fake'}
self.assertRaises(exceptions.PolicyCheckError,
policy.enforce,
self.context, action, target)
def test_enforce_tenant_id_check_malformed_target_field_raises(self):
self._test_enforce_tenant_id_raises('tenant_id:%(malformed_field)s')
def test_enforce_tenant_id_check_invalid_parent_resource_raises(self):
self._test_enforce_tenant_id_raises('tenant_id:%(foobaz_tenant_id)s')
def test_get_roles_context_is_admin_rule_missing(self):
rules = dict((k, common_policy.parse_rule(v)) for k, v in {
"some_other_rule": "role:admin",
}.items())
common_policy.set_rules(common_policy.Rules(rules))
# 'admin' role is expected for bw compatibility
self.assertEqual(['admin'], policy.get_admin_roles())
def test_get_roles_with_role_check(self):
rules = dict((k, common_policy.parse_rule(v)) for k, v in {
policy.ADMIN_CTX_POLICY: "role:admin",
}.items())
common_policy.set_rules(common_policy.Rules(rules))
self.assertEqual(['admin'], policy.get_admin_roles())
def test_get_roles_with_rule_check(self):
rules = dict((k, common_policy.parse_rule(v)) for k, v in {
policy.ADMIN_CTX_POLICY: "rule:some_other_rule",
"some_other_rule": "role:admin",
}.items())
common_policy.set_rules(common_policy.Rules(rules))
self.assertEqual(['admin'], policy.get_admin_roles())
def test_get_roles_with_or_check(self):
self.rules = dict((k, common_policy.parse_rule(v)) for k, v in {
policy.ADMIN_CTX_POLICY: "rule:rule1 or rule:rule2",
"rule1": "role:admin_1",
"rule2": "role:admin_2"
}.items())
self.assertEqual(['admin_1', 'admin_2'],
policy.get_admin_roles())
def test_get_roles_with_other_rules(self):
self.rules = dict((k, common_policy.parse_rule(v)) for k, v in {
policy.ADMIN_CTX_POLICY: "role:xxx or other:value",
}.items())
self.assertEqual(['xxx'], policy.get_admin_roles())
def _test_set_rules_with_deprecated_policy(self, input_rules,
expected_rules):
policy._set_rules(json.dumps(input_rules))
# verify deprecated policy has been removed
for pol in input_rules.keys():
self.assertNotIn(pol, common_policy._rules)
# verify deprecated policy was correctly translated. Iterate
# over items for compatibility with unittest2 in python 2.6
for rule in expected_rules:
self.assertIn(rule, common_policy._rules)
self.assertEqual(str(common_policy._rules[rule]),
expected_rules[rule])
def test_set_rules_with_deprecated_view_policy(self):
self._test_set_rules_with_deprecated_policy(
{'extension:router:view': 'rule:admin_or_owner'},
{'get_network:router:external': 'rule:admin_or_owner'})
def test_set_rules_with_deprecated_set_policy(self):
expected_policies = ['create_network:provider:network_type',
'create_network:provider:physical_network',
'create_network:provider:segmentation_id',
'update_network:provider:network_type',
'update_network:provider:physical_network',
'update_network:provider:segmentation_id']
self._test_set_rules_with_deprecated_policy(
{'extension:provider_network:set': 'rule:admin_only'},
dict((policy, 'rule:admin_only') for policy in
expected_policies))
| apache-2.0 |
joelthelion/autojump | tests/unit/autojump_utils_test.py | 6 | 3982 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import mock
import pytest
sys.path.append(os.path.join(os.getcwd(), 'bin')) # noqa
import autojump_utils
from autojump_utils import encode_local
from autojump_utils import first
from autojump_utils import get_tab_entry_info
from autojump_utils import has_uppercase
from autojump_utils import in_bash
from autojump_utils import is_python3
from autojump_utils import last
from autojump_utils import sanitize
from autojump_utils import second
from autojump_utils import surround_quotes
from autojump_utils import take
from autojump_utils import unico
if is_python3():
os.getcwdu = os.getcwd
xrange = range
def u(string):
"""
This is a unicode() wrapper since u'string' is a Python3 compiler error.
"""
if is_python3():
return string
return unicode(string, encoding='utf-8', errors='strict')
# strings
@pytest.mark.skipif(is_python3(), reason='Unicode sucks.')
@mock.patch.object(sys, 'getfilesystemencoding', return_value='ascii')
def test_encode_local_ascii(_):
assert encode_local(u('foo')) == b'foo'
@pytest.mark.skipif(is_python3(), reason='Unicode sucks.')
@pytest.mark.xfail(reason='disabled due to pytest bug: https://bitbucket.org/hpk42/pytest/issue/534/pytest-fails-to-catch-unicodedecodeerrors') # noqa
@mock.patch.object(sys, 'getfilesystemencoding', return_value='ascii')
def test_encode_local_ascii_fails(_):
with pytest.raises(UnicodeDecodeError):
encode_local(u('日本語'))
@pytest.mark.skipif(is_python3(), reason='Unicode sucks.')
@mock.patch.object(sys, 'getfilesystemencoding', return_value=None)
def test_encode_local_empty(_):
assert encode_local(b'foo') == u('foo')
@pytest.mark.skipif(is_python3(), reason='Unicode sucks.')
@mock.patch.object(sys, 'getfilesystemencoding', return_value='utf-8')
def test_encode_local_unicode(_):
assert encode_local(b'foo') == u('foo')
assert encode_local(u('foo')) == u('foo')
def test_has_uppercase():
assert has_uppercase('Foo')
assert has_uppercase('foO')
assert not has_uppercase('foo')
assert not has_uppercase('')
@mock.patch.object(autojump_utils, 'in_bash', return_value=True)
def test_surround_quotes_in_bash(_):
assert surround_quotes('foo') == '"foo"'
@mock.patch.object(autojump_utils, 'in_bash', return_value=False)
def test_dont_surround_quotes_not_in_bash(_):
assert surround_quotes('foo') == 'foo'
def test_sanitize():
assert sanitize([]) == []
assert sanitize([r'/foo/bar/', r'/']) == [u('/foo/bar'), u('/')]
@pytest.mark.skipif(is_python3(), reason='Unicode sucks.')
def test_unico():
assert unico(str('blah')) == u('blah')
assert unico(str('日本語')) == u('日本語')
assert unico(u('でもおれは中国人だ。')) == u('でもおれは中国人だ。')
# iteration
def test_first():
assert first(xrange(5)) == 0
assert first([]) is None
def test_second():
assert second(xrange(5)) == 1
assert second([]) is None
def test_last():
assert last(xrange(4)) == 3
assert last([]) is None
def test_take():
assert list(take(1, xrange(3))) == [0]
assert list(take(2, xrange(3))) == [0, 1]
assert list(take(4, xrange(3))) == [0, 1, 2]
assert list(take(10, [])) == []
# environment variables
def test_in_bash():
for path in ['/bin/bash', '/usr/bin/bash']:
os.environ['SHELL'] = path
assert in_bash()
for path in ['/bin/zsh', '/usr/bin/zsh']:
os.environ['SHELL'] = '/usr/bin/zsh'
assert not in_bash()
# helper functions
def test_get_needle():
assert get_tab_entry_info('foo__', '__') == ('foo', None, None)
def test_get_index():
assert get_tab_entry_info('foo__2', '__') == ('foo', 2, None)
def test_get_path():
assert get_tab_entry_info('foo__3__/foo/bar', '__') \
== ('foo', 3, '/foo/bar')
def test_get_none():
assert get_tab_entry_info('gibberish content', '__') == (None, None, None)
| gpl-3.0 |
damonkohler/sl4a | python/src/Lib/lib2to3/tests/data/py2_test_grammar.py | 51 | 30527 | # Python 2's Lib/test/test_grammar.py (r66189)
# Python test set -- part 1, grammar.
# This just tests whether the parser accepts them all.
# NOTE: When you run this test as a script from the command line, you
# get warnings about certain hex/oct constants. Since those are
# issued by the parser, you can't suppress them by adding a
# filterwarnings() call to this module. Therefore, to shut up the
# regression test, the filterwarnings() call has been added to
# regrtest.py.
from test.test_support import run_unittest, check_syntax_error
import unittest
import sys
# testing import *
from sys import *
class TokenTests(unittest.TestCase):
def testBackslash(self):
# Backslash means line continuation:
x = 1 \
+ 1
self.assertEquals(x, 2, 'backslash for line continuation')
# Backslash does not means continuation in comments :\
x = 0
self.assertEquals(x, 0, 'backslash ending comment')
def testPlainIntegers(self):
self.assertEquals(0xff, 255)
self.assertEquals(0377, 255)
self.assertEquals(2147483647, 017777777777)
# "0x" is not a valid literal
self.assertRaises(SyntaxError, eval, "0x")
from sys import maxint
if maxint == 2147483647:
self.assertEquals(-2147483647-1, -020000000000)
# XXX -2147483648
self.assert_(037777777777 > 0)
self.assert_(0xffffffff > 0)
for s in '2147483648', '040000000000', '0x100000000':
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
elif maxint == 9223372036854775807:
self.assertEquals(-9223372036854775807-1, -01000000000000000000000)
self.assert_(01777777777777777777777 > 0)
self.assert_(0xffffffffffffffff > 0)
for s in '9223372036854775808', '02000000000000000000000', \
'0x10000000000000000':
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
else:
self.fail('Weird maxint value %r' % maxint)
def testLongIntegers(self):
x = 0L
x = 0l
x = 0xffffffffffffffffL
x = 0xffffffffffffffffl
x = 077777777777777777L
x = 077777777777777777l
x = 123456789012345678901234567890L
x = 123456789012345678901234567890l
def testFloats(self):
x = 3.14
x = 314.
x = 0.314
# XXX x = 000.314
x = .314
x = 3e14
x = 3E14
x = 3e-14
x = 3e+14
x = 3.e14
x = .3e14
x = 3.1e4
def testStringLiterals(self):
x = ''; y = ""; self.assert_(len(x) == 0 and x == y)
x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39)
x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34)
x = "doesn't \"shrink\" does it"
y = 'doesn\'t "shrink" does it'
self.assert_(len(x) == 24 and x == y)
x = "does \"shrink\" doesn't it"
y = 'does "shrink" doesn\'t it'
self.assert_(len(x) == 24 and x == y)
x = """
The "quick"
brown fox
jumps over
the 'lazy' dog.
"""
y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n'
self.assertEquals(x, y)
y = '''
The "quick"
brown fox
jumps over
the 'lazy' dog.
'''
self.assertEquals(x, y)
y = "\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the 'lazy' dog.\n\
"
self.assertEquals(x, y)
y = '\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the \'lazy\' dog.\n\
'
self.assertEquals(x, y)
class GrammarTests(unittest.TestCase):
# single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
# XXX can't test in a script -- this rule is only used when interactive
# file_input: (NEWLINE | stmt)* ENDMARKER
# Being tested as this very moment this very module
# expr_input: testlist NEWLINE
# XXX Hard to test -- used only in calls to input()
def testEvalInput(self):
# testlist ENDMARKER
x = eval('1, 0 or 1')
def testFuncdef(self):
### 'def' NAME parameters ':' suite
### parameters: '(' [varargslist] ')'
### varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' ('**'|'*' '*') NAME]
### | ('**'|'*' '*') NAME)
### | fpdef ['=' test] (',' fpdef ['=' test])* [',']
### fpdef: NAME | '(' fplist ')'
### fplist: fpdef (',' fpdef)* [',']
### arglist: (argument ',')* (argument | *' test [',' '**' test] | '**' test)
### argument: [test '='] test # Really [keyword '='] test
def f1(): pass
f1()
f1(*())
f1(*(), **{})
def f2(one_argument): pass
def f3(two, arguments): pass
def f4(two, (compound, (argument, list))): pass
def f5((compound, first), two): pass
self.assertEquals(f2.func_code.co_varnames, ('one_argument',))
self.assertEquals(f3.func_code.co_varnames, ('two', 'arguments'))
if sys.platform.startswith('java'):
self.assertEquals(f4.func_code.co_varnames,
('two', '(compound, (argument, list))', 'compound', 'argument',
'list',))
self.assertEquals(f5.func_code.co_varnames,
('(compound, first)', 'two', 'compound', 'first'))
else:
self.assertEquals(f4.func_code.co_varnames,
('two', '.1', 'compound', 'argument', 'list'))
self.assertEquals(f5.func_code.co_varnames,
('.0', 'two', 'compound', 'first'))
def a1(one_arg,): pass
def a2(two, args,): pass
def v0(*rest): pass
def v1(a, *rest): pass
def v2(a, b, *rest): pass
def v3(a, (b, c), *rest): return a, b, c, rest
f1()
f2(1)
f2(1,)
f3(1, 2)
f3(1, 2,)
f4(1, (2, (3, 4)))
v0()
v0(1)
v0(1,)
v0(1,2)
v0(1,2,3,4,5,6,7,8,9,0)
v1(1)
v1(1,)
v1(1,2)
v1(1,2,3)
v1(1,2,3,4,5,6,7,8,9,0)
v2(1,2)
v2(1,2,3)
v2(1,2,3,4)
v2(1,2,3,4,5,6,7,8,9,0)
v3(1,(2,3))
v3(1,(2,3),4)
v3(1,(2,3),4,5,6,7,8,9,0)
# ceval unpacks the formal arguments into the first argcount names;
# thus, the names nested inside tuples must appear after these names.
if sys.platform.startswith('java'):
self.assertEquals(v3.func_code.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c'))
else:
self.assertEquals(v3.func_code.co_varnames, ('a', '.1', 'rest', 'b', 'c'))
self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,)))
def d01(a=1): pass
d01()
d01(1)
d01(*(1,))
d01(**{'a':2})
def d11(a, b=1): pass
d11(1)
d11(1, 2)
d11(1, **{'b':2})
def d21(a, b, c=1): pass
d21(1, 2)
d21(1, 2, 3)
d21(*(1, 2, 3))
d21(1, *(2, 3))
d21(1, 2, *(3,))
d21(1, 2, **{'c':3})
def d02(a=1, b=2): pass
d02()
d02(1)
d02(1, 2)
d02(*(1, 2))
d02(1, *(2,))
d02(1, **{'b':2})
d02(**{'a': 1, 'b': 2})
def d12(a, b=1, c=2): pass
d12(1)
d12(1, 2)
d12(1, 2, 3)
def d22(a, b, c=1, d=2): pass
d22(1, 2)
d22(1, 2, 3)
d22(1, 2, 3, 4)
def d01v(a=1, *rest): pass
d01v()
d01v(1)
d01v(1, 2)
d01v(*(1, 2, 3, 4))
d01v(*(1,))
d01v(**{'a':2})
def d11v(a, b=1, *rest): pass
d11v(1)
d11v(1, 2)
d11v(1, 2, 3)
def d21v(a, b, c=1, *rest): pass
d21v(1, 2)
d21v(1, 2, 3)
d21v(1, 2, 3, 4)
d21v(*(1, 2, 3, 4))
d21v(1, 2, **{'c': 3})
def d02v(a=1, b=2, *rest): pass
d02v()
d02v(1)
d02v(1, 2)
d02v(1, 2, 3)
d02v(1, *(2, 3, 4))
d02v(**{'a': 1, 'b': 2})
def d12v(a, b=1, c=2, *rest): pass
d12v(1)
d12v(1, 2)
d12v(1, 2, 3)
d12v(1, 2, 3, 4)
d12v(*(1, 2, 3, 4))
d12v(1, 2, *(3, 4, 5))
d12v(1, *(2,), **{'c': 3})
def d22v(a, b, c=1, d=2, *rest): pass
d22v(1, 2)
d22v(1, 2, 3)
d22v(1, 2, 3, 4)
d22v(1, 2, 3, 4, 5)
d22v(*(1, 2, 3, 4))
d22v(1, 2, *(3, 4, 5))
d22v(1, *(2, 3), **{'d': 4})
def d31v((x)): pass
d31v(1)
def d32v((x,)): pass
d32v((1,))
# keyword arguments after *arglist
def f(*args, **kwargs):
return args, kwargs
self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4),
{'x':2, 'y':5}))
self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)")
self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)")
# Check ast errors in *args and *kwargs
check_syntax_error(self, "f(*g(1=2))")
check_syntax_error(self, "f(**g(1=2))")
def testLambdef(self):
### lambdef: 'lambda' [varargslist] ':' test
l1 = lambda : 0
self.assertEquals(l1(), 0)
l2 = lambda : a[d] # XXX just testing the expression
l3 = lambda : [2 < x for x in [-1, 3, 0L]]
self.assertEquals(l3(), [0, 1, 0])
l4 = lambda x = lambda y = lambda z=1 : z : y() : x()
self.assertEquals(l4(), 1)
l5 = lambda x, y, z=2: x + y + z
self.assertEquals(l5(1, 2), 5)
self.assertEquals(l5(1, 2, 3), 6)
check_syntax_error(self, "lambda x: x = 2")
check_syntax_error(self, "lambda (None,): None")
### stmt: simple_stmt | compound_stmt
# Tested below
def testSimpleStmt(self):
### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x
def foo():
# verify statments that end with semi-colons
x = 1; pass; del x;
foo()
### small_stmt: expr_stmt | print_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt | exec_stmt
# Tested below
def testExprStmt(self):
# (exprlist '=')* exprlist
1
1, 2, 3
x = 1
x = 1, 2, 3
x = y = z = 1, 2, 3
x, y, z = 1, 2, 3
abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4)
check_syntax_error(self, "x + 1 = 1")
check_syntax_error(self, "a + 1 = b + 2")
def testPrintStmt(self):
# 'print' (test ',')* [test]
import StringIO
# Can't test printing to real stdout without comparing output
# which is not available in unittest.
save_stdout = sys.stdout
sys.stdout = StringIO.StringIO()
print 1, 2, 3
print 1, 2, 3,
print
print 0 or 1, 0 or 1,
print 0 or 1
# 'print' '>>' test ','
print >> sys.stdout, 1, 2, 3
print >> sys.stdout, 1, 2, 3,
print >> sys.stdout
print >> sys.stdout, 0 or 1, 0 or 1,
print >> sys.stdout, 0 or 1
# test printing to an instance
class Gulp:
def write(self, msg): pass
gulp = Gulp()
print >> gulp, 1, 2, 3
print >> gulp, 1, 2, 3,
print >> gulp
print >> gulp, 0 or 1, 0 or 1,
print >> gulp, 0 or 1
# test print >> None
def driver():
oldstdout = sys.stdout
sys.stdout = Gulp()
try:
tellme(Gulp())
tellme()
finally:
sys.stdout = oldstdout
# we should see this once
def tellme(file=sys.stdout):
print >> file, 'hello world'
driver()
# we should not see this at all
def tellme(file=None):
print >> file, 'goodbye universe'
driver()
self.assertEqual(sys.stdout.getvalue(), '''\
1 2 3
1 2 3
1 1 1
1 2 3
1 2 3
1 1 1
hello world
''')
sys.stdout = save_stdout
# syntax errors
check_syntax_error(self, 'print ,')
check_syntax_error(self, 'print >> x,')
def testDelStmt(self):
# 'del' exprlist
abc = [1,2,3]
x, y, z = abc
xyz = x, y, z
del abc
del x, y, (z, xyz)
def testPassStmt(self):
# 'pass'
pass
# flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt
# Tested below
def testBreakStmt(self):
# 'break'
while 1: break
def testContinueStmt(self):
# 'continue'
i = 1
while i: i = 0; continue
msg = ""
while not msg:
msg = "ok"
try:
continue
msg = "continue failed to continue inside try"
except:
msg = "continue inside try called except block"
if msg != "ok":
self.fail(msg)
msg = ""
while not msg:
msg = "finally block not called"
try:
continue
finally:
msg = "ok"
if msg != "ok":
self.fail(msg)
def test_break_continue_loop(self):
# This test warrants an explanation. It is a test specifically for SF bugs
# #463359 and #462937. The bug is that a 'break' statement executed or
# exception raised inside a try/except inside a loop, *after* a continue
# statement has been executed in that loop, will cause the wrong number of
# arguments to be popped off the stack and the instruction pointer reset to
# a very small number (usually 0.) Because of this, the following test
# *must* written as a function, and the tracking vars *must* be function
# arguments with default values. Otherwise, the test will loop and loop.
def test_inner(extra_burning_oil = 1, count=0):
big_hippo = 2
while big_hippo:
count += 1
try:
if extra_burning_oil and big_hippo == 1:
extra_burning_oil -= 1
break
big_hippo -= 1
continue
except:
raise
if count > 2 or big_hippo <> 1:
self.fail("continue then break in try/except in loop broken!")
test_inner()
def testReturn(self):
# 'return' [testlist]
def g1(): return
def g2(): return 1
g1()
x = g2()
check_syntax_error(self, "class foo:return 1")
def testYield(self):
check_syntax_error(self, "class foo:yield 1")
def testRaise(self):
# 'raise' test [',' test]
try: raise RuntimeError, 'just testing'
except RuntimeError: pass
try: raise KeyboardInterrupt
except KeyboardInterrupt: pass
def testImport(self):
# 'import' dotted_as_names
import sys
import time, sys
# 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names)
from time import time
from time import (time)
# not testable inside a function, but already done at top of the module
# from sys import *
from sys import path, argv
from sys import (path, argv)
from sys import (path, argv,)
def testGlobal(self):
# 'global' NAME (',' NAME)*
global a
global a, b
global one, two, three, four, five, six, seven, eight, nine, ten
def testExec(self):
# 'exec' expr ['in' expr [',' expr]]
z = None
del z
exec 'z=1+1\n'
if z != 2: self.fail('exec \'z=1+1\'\\n')
del z
exec 'z=1+1'
if z != 2: self.fail('exec \'z=1+1\'')
z = None
del z
import types
if hasattr(types, "UnicodeType"):
exec r"""if 1:
exec u'z=1+1\n'
if z != 2: self.fail('exec u\'z=1+1\'\\n')
del z
exec u'z=1+1'
if z != 2: self.fail('exec u\'z=1+1\'')"""
g = {}
exec 'z = 1' in g
if g.has_key('__builtins__'): del g['__builtins__']
if g != {'z': 1}: self.fail('exec \'z = 1\' in g')
g = {}
l = {}
import warnings
warnings.filterwarnings("ignore", "global statement", module="<string>")
exec 'global a; a = 1; b = 2' in g, l
if g.has_key('__builtins__'): del g['__builtins__']
if l.has_key('__builtins__'): del l['__builtins__']
if (g, l) != ({'a':1}, {'b':2}):
self.fail('exec ... in g (%s), l (%s)' %(g,l))
def testAssert(self):
# assert_stmt: 'assert' test [',' test]
assert 1
assert 1, 1
assert lambda x:x
assert 1, lambda x:x+1
try:
assert 0, "msg"
except AssertionError, e:
self.assertEquals(e.args[0], "msg")
else:
if __debug__:
self.fail("AssertionError not raised by assert 0")
### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef
# Tested below
def testIf(self):
# 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
if 1: pass
if 1: pass
else: pass
if 0: pass
elif 0: pass
if 0: pass
elif 0: pass
elif 0: pass
elif 0: pass
else: pass
def testWhile(self):
# 'while' test ':' suite ['else' ':' suite]
while 0: pass
while 0: pass
else: pass
# Issue1920: "while 0" is optimized away,
# ensure that the "else" clause is still present.
x = 0
while 0:
x = 1
else:
x = 2
self.assertEquals(x, 2)
def testFor(self):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
for i in 1, 2, 3: pass
for i, j, k in (): pass
else: pass
class Squares:
def __init__(self, max):
self.max = max
self.sofar = []
def __len__(self): return len(self.sofar)
def __getitem__(self, i):
if not 0 <= i < self.max: raise IndexError
n = len(self.sofar)
while n <= i:
self.sofar.append(n*n)
n = n+1
return self.sofar[i]
n = 0
for x in Squares(10): n = n+x
if n != 285:
self.fail('for over growing sequence')
result = []
for x, in [(1,), (2,), (3,)]:
result.append(x)
self.assertEqual(result, [1, 2, 3])
def testTry(self):
### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
### | 'try' ':' suite 'finally' ':' suite
### except_clause: 'except' [expr [('as' | ',') expr]]
try:
1/0
except ZeroDivisionError:
pass
else:
pass
try: 1/0
except EOFError: pass
except TypeError as msg: pass
except RuntimeError, msg: pass
except: pass
else: pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError): pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError), msg: pass
try: pass
finally: pass
def testSuite(self):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if 1: pass
if 1:
pass
if 1:
#
#
#
pass
pass
#
pass
#
def testTest(self):
### and_test ('or' and_test)*
### and_test: not_test ('and' not_test)*
### not_test: 'not' not_test | comparison
if not 1: pass
if 1 and 1: pass
if 1 or 1: pass
if not not not 1: pass
if not 1 and 1 and 1: pass
if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass
def testComparison(self):
### comparison: expr (comp_op expr)*
### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
if 1: pass
x = (1 == 1)
if 1 == 1: pass
if 1 != 1: pass
if 1 <> 1: pass
if 1 < 1: pass
if 1 > 1: pass
if 1 <= 1: pass
if 1 >= 1: pass
if 1 is 1: pass
if 1 is not 1: pass
if 1 in (): pass
if 1 not in (): pass
if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass
def testBinaryMaskOps(self):
x = 1 & 1
x = 1 ^ 1
x = 1 | 1
def testShiftOps(self):
x = 1 << 1
x = 1 >> 1
x = 1 << 1 >> 1
def testAdditiveOps(self):
x = 1
x = 1 + 1
x = 1 - 1 - 1
x = 1 - 1 + 1 - 1 + 1
def testMultiplicativeOps(self):
x = 1 * 1
x = 1 / 1
x = 1 % 1
x = 1 / 1 * 1 % 1
def testUnaryOps(self):
x = +1
x = -1
x = ~1
x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
x = -1*1/1 + 1*1 - ---1*1
def testSelectors(self):
### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME
### subscript: expr | [expr] ':' [expr]
import sys, time
c = sys.path[0]
x = time.time()
x = sys.modules['time'].time()
a = '01234'
c = a[0]
c = a[-1]
s = a[0:5]
s = a[:5]
s = a[0:]
s = a[:]
s = a[-5:]
s = a[:-1]
s = a[-4:-3]
# A rough test of SF bug 1333982. http://python.org/sf/1333982
# The testing here is fairly incomplete.
# Test cases should include: commas with 1 and 2 colons
d = {}
d[1] = 1
d[1,] = 2
d[1,2] = 3
d[1,2,3] = 4
L = list(d)
L.sort()
self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]')
def testAtoms(self):
### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING
### dictmaker: test ':' test (',' test ':' test)* [',']
x = (1)
x = (1 or 2 or 3)
x = (1 or 2 or 3, 2, 3)
x = []
x = [1]
x = [1 or 2 or 3]
x = [1 or 2 or 3, 2, 3]
x = []
x = {}
x = {'one': 1}
x = {'one': 1,}
x = {'one' or 'two': 1 or 2}
x = {'one': 1, 'two': 2}
x = {'one': 1, 'two': 2,}
x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}
x = `x`
x = `1 or 2 or 3`
self.assertEqual(`1,2`, '(1, 2)')
x = x
x = 'x'
x = 123
### exprlist: expr (',' expr)* [',']
### testlist: test (',' test)* [',']
# These have been exercised enough above
def testClassdef(self):
# 'class' NAME ['(' [testlist] ')'] ':' suite
class B: pass
class B2(): pass
class C1(B): pass
class C2(B): pass
class D(C1, C2, B): pass
class C:
def meth1(self): pass
def meth2(self, arg): pass
def meth3(self, a1, a2): pass
# decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
# decorators: decorator+
# decorated: decorators (classdef | funcdef)
def class_decorator(x):
x.decorated = True
return x
@class_decorator
class G:
pass
self.assertEqual(G.decorated, True)
def testListcomps(self):
# list comprehension tests
nums = [1, 2, 3, 4, 5]
strs = ["Apple", "Banana", "Coconut"]
spcs = [" Apple", " Banana ", "Coco nut "]
self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut'])
self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15])
self.assertEqual([x for x in nums if x > 2], [3, 4, 5])
self.assertEqual([(i, s) for i in nums for s in strs],
[(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'),
(2, 'Apple'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Apple'), (3, 'Banana'), (3, 'Coconut'),
(4, 'Apple'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Apple'), (5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]],
[(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)],
[[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]])
def test_in_func(l):
return [None < x < 3 for x in l if x > 2]
self.assertEqual(test_in_func(nums), [False, False, False])
def test_nested_front():
self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]],
[[1, 2], [3, 4], [5, 6]])
test_nested_front()
check_syntax_error(self, "[i, s for i in nums for s in strs]")
check_syntax_error(self, "[x if y]")
suppliers = [
(1, "Boeing"),
(2, "Ford"),
(3, "Macdonalds")
]
parts = [
(10, "Airliner"),
(20, "Engine"),
(30, "Cheeseburger")
]
suppart = [
(1, 10), (1, 20), (2, 20), (3, 30)
]
x = [
(sname, pname)
for (sno, sname) in suppliers
for (pno, pname) in parts
for (sp_sno, sp_pno) in suppart
if sno == sp_sno and pno == sp_pno
]
self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'),
('Macdonalds', 'Cheeseburger')])
def testGenexps(self):
# generator expression tests
g = ([x for x in range(10)] for x in range(1))
self.assertEqual(g.next(), [x for x in range(10)])
try:
g.next()
self.fail('should produce StopIteration exception')
except StopIteration:
pass
a = 1
try:
g = (a for d in a)
g.next()
self.fail('should produce TypeError')
except TypeError:
pass
self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd'])
self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy'])
a = [x for x in range(10)]
b = (x for x in (y for y in a))
self.assertEqual(sum(b), sum([x for x in range(10)]))
self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)]))
self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2]))
self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0)
check_syntax_error(self, "foo(x for x in range(10), 100)")
check_syntax_error(self, "foo(100, x for x in range(10))")
def testComprehensionSpecials(self):
# test for outmost iterable precomputation
x = 10; g = (i for i in range(x)); x = 5
self.assertEqual(len(list(g)), 10)
# This should hold, since we're only precomputing outmost iterable.
x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x))
x = 5; t = True;
self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g))
# Grammar allows multiple adjacent 'if's in listcomps and genexps,
# even though it's silly. Make sure it works (ifelse broke this.)
self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7])
self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7])
# verify unpacking single element tuples in listcomp/genexp.
self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6])
self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9])
def testIfElseExpr(self):
# Test ifelse expressions in various cases
def _checkeval(msg, ret):
"helper to check that evaluation of expressions is done correctly"
print x
return ret
self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True])
self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True])
self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True])
self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5)
self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5)
self.assertEqual((5 and 6 if 0 else 1), 1)
self.assertEqual(((5 and 6) if 0 else 1), 1)
self.assertEqual((5 and (6 if 1 else 1)), 6)
self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3)
self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1)
self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5)
self.assertEqual((not 5 if 1 else 1), False)
self.assertEqual((not 5 if 0 else 1), 1)
self.assertEqual((6 + 1 if 1 else 2), 7)
self.assertEqual((6 - 1 if 1 else 2), 5)
self.assertEqual((6 * 2 if 1 else 4), 12)
self.assertEqual((6 / 2 if 1 else 3), 3)
self.assertEqual((6 < 4 if 0 else 2), 2)
def test_main():
run_unittest(TokenTests, GrammarTests)
if __name__ == '__main__':
test_main()
| apache-2.0 |
bvisness/the-blue-alliance | cron_main.py | 3 | 8426 | #!/usr/bin/env python
import webapp2
import tba_config
from controllers.backup_controller import TbaCSVBackupEventsEnqueue, TbaCSVBackupEventDo, TbaCSVRestoreEventsEnqueue, TbaCSVRestoreEventDo
from controllers.backup_controller import TbaCSVBackupTeamsEnqueue, TbaCSVBackupTeamsDo
from controllers.datafeed_controller import TbaVideosGet, TbaVideosEnqueue
from controllers.datafeed_controller import FmsEventListGet, FmsTeamListGet
from controllers.datafeed_controller import OffseasonMatchesGet
from controllers.datafeed_controller import TwitterFrcfmsMatchesGet
from controllers.datafeed_controller import FMSAPIAwardsEnqueue, FMSAPIEventAlliancesEnqueue, FMSAPIEventRankingsEnqueue, FMSAPIMatchesEnqueue
from controllers.datafeed_controller import FMSAPIAwardsGet, FMSAPIEventAlliancesGet, FMSAPIEventRankingsGet, FMSAPIMatchesGet
from controllers.datafeed_controller import UsfirstEventDetailsEnqueue, UsfirstEventDetailsGet, UsfirstEventListGet
from controllers.datafeed_controller import UsfirstAwardsEnqueue, UsfirstAwardsGet
from controllers.datafeed_controller import UsfirstEventAlliancesEnqueue, UsfirstEventAlliancesGet
from controllers.datafeed_controller import UsfirstMatchesEnqueue, UsfirstMatchesGet, UsfirstEventRankingsEnqueue, UsfirstEventRankingsGet
from controllers.datafeed_controller import UsfirstTeamDetailsEnqueue, UsfirstTeamDetailsRollingEnqueue, UsfirstTeamDetailsGet, UsfirstTeamsTpidsGet
from controllers.datafeed_controller import UsfirstPre2003TeamEventsEnqueue, UsfirstPre2003TeamEventsGet
from controllers.cron_controller import DistrictPointsCalcEnqueue, DistrictPointsCalcDo
from controllers.cron_controller import EventShortNameCalcEnqueue, EventShortNameCalcDo
from controllers.cron_controller import EventTeamRepairDo, EventTeamUpdate, EventTeamUpdateEnqueue
from controllers.cron_controller import EventMatchstatsDo, EventMatchstatsEnqueue
from controllers.cron_controller import FinalMatchesRepairDo
from controllers.cron_controller import YearInsightsEnqueue, YearInsightsDo, OverallInsightsEnqueue, OverallInsightsDo, TypeaheadCalcEnqueue, TypeaheadCalcDo
from controllers.cron_controller import UpcomingNotificationDo
from controllers.admin.admin_cron_controller import AdminMobileClearEnqueue, AdminMobileClear, AdminSubsClearEnqueue, AdminSubsClear, \
AdminWebhooksClearEnqueue, AdminWebhooksClear
app = webapp2.WSGIApplication([('/tasks/enqueue/csv_backup_events', TbaCSVBackupEventsEnqueue),
('/tasks/enqueue/csv_backup_events/([0-9]*)', TbaCSVBackupEventsEnqueue),
('/tasks/do/csv_backup_event/(.*)', TbaCSVBackupEventDo),
('/tasks/enqueue/csv_restore_events', TbaCSVRestoreEventsEnqueue),
('/tasks/enqueue/csv_restore_events/([0-9]*)', TbaCSVRestoreEventsEnqueue),
('/tasks/do/csv_restore_event/(.*)', TbaCSVRestoreEventDo),
('/tasks/enqueue/csv_backup_teams', TbaCSVBackupTeamsEnqueue),
('/tasks/do/csv_backup_teams', TbaCSVBackupTeamsDo),
('/tasks/enqueue/tba_videos', TbaVideosEnqueue),
('/tasks/enqueue/fmsapi_awards/(.*)', FMSAPIAwardsEnqueue),
('/tasks/enqueue/fmsapi_event_alliances/(.*)', FMSAPIEventAlliancesEnqueue),
('/tasks/enqueue/fmsapi_event_rankings/(.*)', FMSAPIEventRankingsEnqueue),
('/tasks/enqueue/fmsapi_matches/(.*)', FMSAPIMatchesEnqueue),
('/tasks/enqueue/usfirst_event_alliances/(.*)', UsfirstEventAlliancesEnqueue),
('/tasks/enqueue/usfirst_event_details/([0-9]*)', UsfirstEventDetailsEnqueue),
('/tasks/enqueue/usfirst_event_rankings/(.*)', UsfirstEventRankingsEnqueue),
('/tasks/enqueue/usfirst_awards/(.*)', UsfirstAwardsEnqueue),
('/tasks/enqueue/usfirst_matches/(.*)', UsfirstMatchesEnqueue),
('/tasks/enqueue/usfirst_team_details', UsfirstTeamDetailsEnqueue),
('/tasks/enqueue/usfirst_team_details_rolling', UsfirstTeamDetailsRollingEnqueue),
('/tasks/enqueue/usfirst_pre2003_team_events', UsfirstPre2003TeamEventsEnqueue),
('/tasks/get/fms_event_list', FmsEventListGet),
('/tasks/get/fms_team_list', FmsTeamListGet),
('/tasks/get/offseason_matches/(.*)', OffseasonMatchesGet),
('/tasks/get/tba_videos/(.*)', TbaVideosGet),
('/tasks/get/twitter_frcfms_matches', TwitterFrcfmsMatchesGet),
('/tasks/get/fmsapi_awards/(.*)', FMSAPIAwardsGet),
('/tasks/get/fmsapi_event_alliances/(.*)', FMSAPIEventAlliancesGet),
('/tasks/get/fmsapi_event_rankings/(.*)', FMSAPIEventRankingsGet),
('/tasks/get/fmsapi_matches/(.*)', FMSAPIMatchesGet),
('/tasks/get/usfirst_event_alliances/(.*)', UsfirstEventAlliancesGet),
('/tasks/get/usfirst_event_list/([0-9]*)', UsfirstEventListGet),
('/tasks/get/usfirst_event_details/([0-9]*)/([0-9]*)', UsfirstEventDetailsGet),
('/tasks/get/usfirst_event_rankings/(.*)', UsfirstEventRankingsGet),
('/tasks/get/usfirst_awards/(.*)', UsfirstAwardsGet),
('/tasks/get/usfirst_matches/(.*)', UsfirstMatchesGet),
('/tasks/get/usfirst_team_details/(.*)', UsfirstTeamDetailsGet),
('/tasks/get/usfirst_teams_tpids/([0-9]*)', UsfirstTeamsTpidsGet),
('/tasks/get/usfirst_pre2003_team_events/(.*)', UsfirstPre2003TeamEventsGet),
('/tasks/math/enqueue/district_points_calc/([0-9]*)', DistrictPointsCalcEnqueue),
('/tasks/math/do/district_points_calc/(.*)', DistrictPointsCalcDo),
('/tasks/math/enqueue/event_short_name_calc_enqueue/([0-9]*)', EventShortNameCalcEnqueue),
('/tasks/math/do/event_short_name_calc_do/(.*)', EventShortNameCalcDo),
('/tasks/math/enqueue/event_matchstats/(.*)', EventMatchstatsEnqueue),
('/tasks/math/enqueue/eventteam_update/(.*)', EventTeamUpdateEnqueue),
('/tasks/math/do/event_matchstats/(.*)', EventMatchstatsDo),
('/tasks/math/do/eventteam_repair', EventTeamRepairDo),
('/tasks/math/do/eventteam_update/(.*)', EventTeamUpdate),
('/tasks/math/do/final_matches_repair/([0-9]*)', FinalMatchesRepairDo),
('/tasks/math/enqueue/overallinsights/(.*)', OverallInsightsEnqueue),
('/tasks/math/do/overallinsights/(.*)', OverallInsightsDo),
('/tasks/math/enqueue/insights/(.*)/([0-9]*)', YearInsightsEnqueue),
('/tasks/math/do/insights/(.*)/([0-9]*)', YearInsightsDo),
('/tasks/math/enqueue/typeaheadcalc', TypeaheadCalcEnqueue),
('/tasks/math/do/typeaheadcalc', TypeaheadCalcDo),
('/tasks/notifications/upcoming_match', UpcomingNotificationDo),
('/tasks/admin/enqueue/clear_mobile_duplicates', AdminMobileClearEnqueue),
('/tasks/admin/clear_mobile_duplicates', AdminMobileClear),
('/tasks/admin/enqueue/clear_old_subs', AdminSubsClearEnqueue),
('/tasks/admin/clear_old_subs', AdminSubsClear),
('/tasks/admin/enqueue/clear_old_webhooks', AdminWebhooksClearEnqueue),
('/tasks/admin/clear_old_webhooks', AdminWebhooksClear),
],
debug=tba_config.DEBUG)
| mit |
RonnyPfannschmidt/borg | src/borg/selftest.py | 1 | 2950 | """
Self testing module
===================
The selftest() function runs a small test suite of relatively fast tests that are meant to discover issues
with the way Borg was compiled or packaged and also bugs in Borg itself.
These tests are a subset of the borg/testsuite and are run with Pythons built-in unittest, hence none of
the tests used for this can or should be ported to py.test currently.
To assert that self test discovery works correctly the number of tests is kept in the SELFTEST_COUNT
variable. SELFTEST_COUNT must be updated if new tests are added or removed to or from any of the tests
used here.
"""
import sys
import time
from unittest import TestResult, TestSuite, defaultTestLoader
from .testsuite.hashindex import HashIndexDataTestCase, HashIndexRefcountingTestCase, HashIndexTestCase
from .testsuite.crypto import CryptoTestCase
from .testsuite.chunker import ChunkerTestCase
SELFTEST_CASES = [
HashIndexDataTestCase,
HashIndexRefcountingTestCase,
HashIndexTestCase,
CryptoTestCase,
ChunkerTestCase,
]
SELFTEST_COUNT = 37
class SelfTestResult(TestResult):
def __init__(self):
super().__init__()
self.successes = []
def addSuccess(self, test):
super().addSuccess(test)
self.successes.append(test)
def test_name(self, test):
return test.shortDescription() or str(test)
def log_results(self, logger):
for test, failure in self.errors + self.failures + self.unexpectedSuccesses:
logger.error('self test %s FAILED:\n%s', self.test_name(test), failure)
for test, reason in self.skipped:
logger.warning('self test %s skipped: %s', self.test_name(test), reason)
def successful_test_count(self):
return len(self.successes)
def selftest(logger):
selftest_started = time.perf_counter()
result = SelfTestResult()
test_suite = TestSuite()
for test_case in SELFTEST_CASES:
test_suite.addTest(defaultTestLoader.loadTestsFromTestCase(test_case))
test_suite.run(result)
result.log_results(logger)
successful_tests = result.successful_test_count()
count_mismatch = successful_tests != SELFTEST_COUNT
if result.wasSuccessful() and count_mismatch:
# only print this if all tests succeeded
logger.error("self test count (%d != %d) mismatch, either test discovery is broken or a test was added "
"without updating borg.selftest",
successful_tests, SELFTEST_COUNT)
if not result.wasSuccessful() or count_mismatch:
logger.error("self test failed\n"
"This is a bug either in Borg or in the package / distribution you use.")
sys.exit(2)
assert False, "sanity assertion failed: ran beyond sys.exit()"
selftest_elapsed = time.perf_counter() - selftest_started
logger.debug("%d self tests completed in %.2f seconds", successful_tests, selftest_elapsed)
| bsd-3-clause |
dcroc16/skunk_works | google_appengine/lib/protorpc-1.0/protorpc/util.py | 8 | 14626 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Common utility library."""
from __future__ import with_statement
__author__ = ['rafek@google.com (Rafe Kaplan)',
'guido@google.com (Guido van Rossum)',
]
import cgi
import datetime
import inspect
import os
import re
import sys
__all__ = ['AcceptItem',
'AcceptError',
'Error',
'choose_content_type',
'decode_datetime',
'get_package_for_module',
'pad_string',
'parse_accept_header',
'positional',
'PROTORPC_PROJECT_URL',
'TimeZoneOffset',
]
class Error(Exception):
"""Base class for protorpc exceptions."""
class AcceptError(Error):
"""Raised when there is an error parsing the accept header."""
PROTORPC_PROJECT_URL = 'http://code.google.com/p/google-protorpc'
_TIME_ZONE_RE_STRING = r"""
# Examples:
# +01:00
# -05:30
# Z12:00
((?P<z>Z) | (?P<sign>[-+])
(?P<hours>\d\d) :
(?P<minutes>\d\d))$
"""
_TIME_ZONE_RE = re.compile(_TIME_ZONE_RE_STRING, re.IGNORECASE | re.VERBOSE)
def pad_string(string):
"""Pad a string for safe HTTP error responses.
Prevents Internet Explorer from displaying their own error messages
when sent as the content of error responses.
Args:
string: A string.
Returns:
Formatted string left justified within a 512 byte field.
"""
return string.ljust(512)
def positional(max_positional_args):
"""A decorator to declare that only the first N arguments may be positional.
This decorator makes it easy to support Python 3 style keyword-only
parameters. For example, in Python 3 it is possible to write:
def fn(pos1, *, kwonly1=None, kwonly1=None):
...
All named parameters after * must be a keyword:
fn(10, 'kw1', 'kw2') # Raises exception.
fn(10, kwonly1='kw1') # Ok.
Example:
To define a function like above, do:
@positional(1)
def fn(pos1, kwonly1=None, kwonly2=None):
...
If no default value is provided to a keyword argument, it becomes a required
keyword argument:
@positional(0)
def fn(required_kw):
...
This must be called with the keyword parameter:
fn() # Raises exception.
fn(10) # Raises exception.
fn(required_kw=10) # Ok.
When defining instance or class methods always remember to account for
'self' and 'cls':
class MyClass(object):
@positional(2)
def my_method(self, pos1, kwonly1=None):
...
@classmethod
@positional(2)
def my_method(cls, pos1, kwonly1=None):
...
One can omit the argument to 'positional' altogether, and then no
arguments with default values may be passed positionally. This
would be equivalent to placing a '*' before the first argument
with a default value in Python 3. If there are no arguments with
default values, and no argument is given to 'positional', an error
is raised.
@positional
def fn(arg1, arg2, required_kw1=None, required_kw2=0):
...
fn(1, 3, 5) # Raises exception.
fn(1, 3) # Ok.
fn(1, 3, required_kw1=5) # Ok.
Args:
max_positional_arguments: Maximum number of positional arguments. All
parameters after the this index must be keyword only.
Returns:
A decorator that prevents using arguments after max_positional_args from
being used as positional parameters.
Raises:
TypeError if a keyword-only argument is provided as a positional parameter.
ValueError if no maximum number of arguments is provided and the function
has no arguments with default values.
"""
def positional_decorator(wrapped):
def positional_wrapper(*args, **kwargs):
if len(args) > max_positional_args:
plural_s = ''
if max_positional_args != 1:
plural_s = 's'
raise TypeError('%s() takes at most %d positional argument%s '
'(%d given)' % (wrapped.__name__,
max_positional_args,
plural_s, len(args)))
return wrapped(*args, **kwargs)
return positional_wrapper
if isinstance(max_positional_args, (int, long)):
return positional_decorator
else:
args, _, _, defaults = inspect.getargspec(max_positional_args)
if defaults is None:
raise ValueError(
'Functions with no keyword arguments must specify '
'max_positional_args')
return positional(len(args) - len(defaults))(max_positional_args)
# TODO(rafek): Support 'level' from the Accept header standard.
class AcceptItem(object):
"""Encapsulate a single entry of an Accept header.
Parses and extracts relevent values from an Accept header and implements
a sort order based on the priority of each requested type as defined
here:
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
Accept headers are normally a list of comma separated items. Each item
has the format of a normal HTTP header. For example:
Accept: text/plain, text/html, text/*, */*
This header means to prefer plain text over HTML, HTML over any other
kind of text and text over any other kind of supported format.
This class does not attempt to parse the list of items from the Accept header.
The constructor expects the unparsed sub header and the index within the
Accept header that the fragment was found.
Properties:
index: The index that this accept item was found in the Accept header.
main_type: The main type of the content type.
sub_type: The sub type of the content type.
q: The q value extracted from the header as a float. If there is no q
value, defaults to 1.0.
values: All header attributes parsed form the sub-header.
sort_key: A tuple (no_main_type, no_sub_type, q, no_values, index):
no_main_type: */* has the least priority.
no_sub_type: Items with no sub-type have less priority.
q: Items with lower q value have less priority.
no_values: Items with no values have less priority.
index: Index of item in accept header is the last priority.
"""
__CONTENT_TYPE_REGEX = re.compile(r'^([^/]+)/([^/]+)$')
def __init__(self, accept_header, index):
"""Parse component of an Accept header.
Args:
accept_header: Unparsed sub-expression of accept header.
index: The index that this accept item was found in the Accept header.
"""
accept_header = accept_header.lower()
content_type, values = cgi.parse_header(accept_header)
match = self.__CONTENT_TYPE_REGEX.match(content_type)
if not match:
raise AcceptError('Not valid Accept header: %s' % accept_header)
self.__index = index
self.__main_type = match.group(1)
self.__sub_type = match.group(2)
self.__q = float(values.get('q', 1))
self.__values = values
if self.__main_type == '*':
self.__main_type = None
if self.__sub_type == '*':
self.__sub_type = None
self.__sort_key = (not self.__main_type,
not self.__sub_type,
-self.__q,
not self.__values,
self.__index)
@property
def index(self):
return self.__index
@property
def main_type(self):
return self.__main_type
@property
def sub_type(self):
return self.__sub_type
@property
def q(self):
return self.__q
@property
def values(self):
"""Copy the dictionary of values parsed from the header fragment."""
return dict(self.__values)
@property
def sort_key(self):
return self.__sort_key
def match(self, content_type):
"""Determine if the given accept header matches content type.
Args:
content_type: Unparsed content type string.
Returns:
True if accept header matches content type, else False.
"""
content_type, _ = cgi.parse_header(content_type)
match = self.__CONTENT_TYPE_REGEX.match(content_type.lower())
if not match:
return False
main_type, sub_type = match.group(1), match.group(2)
if not(main_type and sub_type):
return False
return ((self.__main_type is None or self.__main_type == main_type) and
(self.__sub_type is None or self.__sub_type == sub_type))
def __cmp__(self, other):
"""Comparison operator based on sort keys."""
if not isinstance(other, AcceptItem):
return NotImplemented
return cmp(self.sort_key, other.sort_key)
def __str__(self):
"""Rebuilds Accept header."""
content_type = '%s/%s' % (self.__main_type or '*', self.__sub_type or '*')
values = self.values
if values:
value_strings = ['%s=%s' % (i, v) for i, v in values.iteritems()]
return '%s; %s' % (content_type, '; '.join(value_strings))
else:
return content_type
def __repr__(self):
return 'AcceptItem(%r, %d)' % (str(self), self.__index)
def parse_accept_header(accept_header):
"""Parse accept header.
Args:
accept_header: Unparsed accept header. Does not include name of header.
Returns:
List of AcceptItem instances sorted according to their priority.
"""
accept_items = []
for index, header in enumerate(accept_header.split(',')):
accept_items.append(AcceptItem(header, index))
return sorted(accept_items)
def choose_content_type(accept_header, supported_types):
"""Choose most appropriate supported type based on what client accepts.
Args:
accept_header: Unparsed accept header. Does not include name of header.
supported_types: List of content-types supported by the server. The index
of the supported types determines which supported type is prefered by
the server should the accept header match more than one at the same
priority.
Returns:
The preferred supported type if the accept header matches any, else None.
"""
for accept_item in parse_accept_header(accept_header):
for supported_type in supported_types:
if accept_item.match(supported_type):
return supported_type
return None
@positional(1)
def get_package_for_module(module):
"""Get package name for a module.
Helper calculates the package name of a module.
Args:
module: Module to get name for. If module is a string, try to find
module in sys.modules.
Returns:
If module contains 'package' attribute, uses that as package name.
Else, if module is not the '__main__' module, the module __name__.
Else, the base name of the module file name. Else None.
"""
if isinstance(module, basestring):
try:
module = sys.modules[module]
except KeyError:
return None
try:
return unicode(module.package)
except AttributeError:
if module.__name__ == '__main__':
try:
file_name = module.__file__
except AttributeError:
pass
else:
base_name = os.path.basename(file_name)
split_name = os.path.splitext(base_name)
if len(split_name) == 1:
return unicode(base_name)
else:
return u'.'.join(split_name[:-1])
return unicode(module.__name__)
class TimeZoneOffset(datetime.tzinfo):
"""Time zone information as encoded/decoded for DateTimeFields."""
def __init__(self, offset):
"""Initialize a time zone offset.
Args:
offset: Integer or timedelta time zone offset, in minutes from UTC. This
can be negative.
"""
super(TimeZoneOffset, self).__init__()
if isinstance(offset, datetime.timedelta):
offset = offset.total_seconds()
self.__offset = offset
def utcoffset(self, dt):
"""Get the a timedelta with the time zone's offset from UTC.
Returns:
The time zone offset from UTC, as a timedelta.
"""
return datetime.timedelta(minutes=self.__offset)
def dst(self, dt):
"""Get the daylight savings time offset.
The formats that ProtoRPC uses to encode/decode time zone information don't
contain any information about daylight savings time. So this always
returns a timedelta of 0.
Returns:
A timedelta of 0.
"""
return datetime.timedelta(0)
def decode_datetime(encoded_datetime):
"""Decode a DateTimeField parameter from a string to a python datetime.
Args:
encoded_datetime: A string in RFC 3339 format.
Returns:
A datetime object with the date and time specified in encoded_datetime.
Raises:
ValueError: If the string is not in a recognized format.
"""
# Check if the string includes a time zone offset. Break out the
# part that doesn't include time zone info. Convert to uppercase
# because all our comparisons should be case-insensitive.
time_zone_match = _TIME_ZONE_RE.search(encoded_datetime)
if time_zone_match:
time_string = encoded_datetime[:time_zone_match.start(1)].upper()
else:
time_string = encoded_datetime.upper()
if '.' in time_string:
format_string = '%Y-%m-%dT%H:%M:%S.%f'
else:
format_string = '%Y-%m-%dT%H:%M:%S'
decoded_datetime = datetime.datetime.strptime(time_string, format_string)
if not time_zone_match:
return decoded_datetime
# Time zone info was included in the parameter. Add a tzinfo
# object to the datetime. Datetimes can't be changed after they're
# created, so we'll need to create a new one.
if time_zone_match.group('z'):
offset_minutes = 0
else:
sign = time_zone_match.group('sign')
hours, minutes = [int(value) for value in
time_zone_match.group('hours', 'minutes')]
offset_minutes = hours * 60 + minutes
if sign == '-':
offset_minutes *= -1
return datetime.datetime(decoded_datetime.year,
decoded_datetime.month,
decoded_datetime.day,
decoded_datetime.hour,
decoded_datetime.minute,
decoded_datetime.second,
decoded_datetime.microsecond,
TimeZoneOffset(offset_minutes))
| mit |
YesVideo/piggyphoto | piggyphoto/__init__.py | 1 | 28733 | # piggyphoto.py
# Copyright (C) 2010 Alex Dumitrache
# Based on:
# - a small code example by Mario Boikov, http://pysnippet.blogspot.com/2009/12/when-ctypes-comes-to-rescue.html
# - libgphoto2 Python bindings by David PHAM-VAN <david@ab2r.com>
# - ctypes_gphoto2.py by Hans Ulrich Niedermann <gp@n-dimensional.de>
import platform
# Some functions return errors which can be fixed by retrying.
# For example, capture_preview on Canon 550D fails the first time, but subsequent calls are OK.
# Retries are performed on: camera.capture_preview, camera.capture_image and camera.init()
retries = 1
# This is run if gp_camera_init returns -60 (Could not lock the device) and retries >= 1.
if platform.system() == 'Darwin':
unmount_cmd = 'killall PTPCamera'
libgphoto2dll = 'libgphoto2.dylib'
elif platform.system() == 'Windows':
libgphoto2dll = 'libgphoto2.dll'
unmount_cmd = None
else:
libgphoto2dll = 'libgphoto2.so'
unmount_cmd = 'gvfs-mount -s gphoto2'
import re
import ctypes
gp = ctypes.CDLL(libgphoto2dll)
gp.gp_context_new.restype = ctypes.c_void_p
context = gp.gp_context_new()
def library_version(verbose = True):
gp.gp_library_version.restype = ctypes.POINTER(ctypes.c_char_p)
if not verbose:
arrText = gp.gp_library_version(GP_VERSION_SHORT)
else:
arrText = gp.gp_library_version(GP_VERSION_VERBOSE)
v = ''
for s in arrText:
if s is None:
break
v += '%s\n' % s
return v
import os, string, time
from ptp import *
PTR = ctypes.pointer
# gphoto structures
""" From 'gphoto2-camera.h'
typedef struct {
char name [128];
char folder [1024];
} CameraFilePath;
"""
class CameraFilePath(ctypes.Structure):
_fields_ = [('name', (ctypes.c_char * 128)),
('folder', (ctypes.c_char * 1024))]
class CameraText(ctypes.Structure):
_fields_ = [('text', (ctypes.c_char * (32 * 1024)))]
#cdef extern from "gphoto2/gphoto2-port-version.h":
# ctypedef enum GPVersionVerbosity:
GP_VERSION_SHORT = 0
GP_VERSION_VERBOSE = 1
#cdef extern from "gphoto2/gphoto2-abilities-list.h":
# ctypedef enum CameraDriverStatus:
GP_DRIVER_STATUS_PRODUCTION = 0
GP_DRIVER_STATUS_TESTING = 1
GP_DRIVER_STATUS_EXPERIMENTAL = 2
GP_DRIVER_STATUS_DEPRECATED = 3
# ctypedef enum CameraOperation:
GP_OPERATION_NONE = 0
GP_OPERATION_CAPTURE_IMAGE = 1
GP_OPERATION_CAPTURE_VIDEO = 2
GP_OPERATION_CAPTURE_AUDIO = 3
GP_OPERATION_CAPTURE_PREVIEW = 4
GP_OPERATION_CONFIG = 5
# ctypedef enum CameraFileOperation:
GP_FILE_OPERATION_NONE = 0
GP_FILE_OPERATION_DELETE = 1
GP_FILE_OPERATION_PREVIEW = 2
GP_FILE_OPERATION_RAW = 3
GP_FILE_OPERATION_AUDIO = 4
GP_FILE_OPERATION_EXIF = 5
# ctypedef enum CameraFolderOperation:
GP_FOLDER_OPERATION_NONE = 0
GP_FOLDER_OPERATION_DELETE_ALL = 1
GP_FOLDER_OPERATION_PUT_FILE = 2
GP_FOLDER_OPERATION_MAKE_DIR = 3
GP_FOLDER_OPERATION_REMOVE_DIR = 4
#cdef extern from "gphoto2/gphoto2-port-info-list.h":
# ctypedef enum GPPortType:
GP_PORT_NONE = 0
GP_PORT_SERIAL = 1
GP_PORT_USB = 2
class CameraAbilities(ctypes.Structure):
_fields_ = [('model', (ctypes.c_char * 128)),
('status', ctypes.c_int),
('port', ctypes.c_int),
('speed', (ctypes.c_int * 64)),
('operations', ctypes.c_int),
('file_operations', ctypes.c_int),
('folder_operations', ctypes.c_int),
('usb_vendor', ctypes.c_int),
('usb_product', ctypes.c_int),
('usb_class', ctypes.c_int),
('usb_subclass', ctypes.c_int),
('usb_protocol', ctypes.c_int),
('library', (ctypes.c_char * 1024)),
('id', (ctypes.c_char * 1024)),
('device_type', ctypes.c_int),
('reserved2', ctypes.c_int),
('reserved3', ctypes.c_int),
('reserved4', ctypes.c_int),
('reserved5', ctypes.c_int),
('reserved6', ctypes.c_int),
('reserved7', ctypes.c_int),
('reserved8', ctypes.c_int)]
# the GPPortInfo data structure is a pointer in SVN
# in stable versions, it is a struct
if library_version().split('\n')[0] == '2.4.99':
class PortInfo(ctypes.c_void_p):
pass
else:
class PortInfo(ctypes.Structure):
_fields_ = [
('type', ctypes.c_int), # enum is 32 bits on 32 and 64 bit Linux
('name', ctypes.c_char_p),
('path', ctypes.c_char_p),
('library_filename', ctypes.c_char_p)
]
# gphoto constants
# Defined in 'gphoto2-port-result.h'
GP_OK = 0
# CameraCaptureType enum in 'gphoto2-camera.h'
GP_CAPTURE_IMAGE = 0
# CameraFileType enum in 'gphoto2-file.h'
GP_FILE_TYPE_NORMAL = 1
GP_WIDGET_WINDOW = 0 # Window widget This is the toplevel configuration widget. It should likely contain multiple GP_WIDGET_SECTION entries.
GP_WIDGET_SECTION = 1 # Section widget (think Tab).
GP_WIDGET_TEXT = 2 # Text widget.
GP_WIDGET_RANGE = 3 # Slider widget.
GP_WIDGET_TOGGLE = 4 # Toggle widget (think check box).
GP_WIDGET_RADIO = 5 # Radio button widget.
GP_WIDGET_MENU = 6 # Menu widget (same as RADIO).
GP_WIDGET_BUTTON = 7 # Button press widget.
GP_WIDGET_DATE = 8 # Date entering widget.
widget_types = ['Window', 'Section', 'Text', 'Range', 'Toggle', 'Radio', 'Menu', 'Button', 'Date']
class CameraWidget(ctypes.Structure):
_fields_ = [('type', ctypes.c_int),
('label', (ctypes.c_char * 256)),
('info', (ctypes.c_char * 1024)),
('name', (ctypes.c_char * 256)),
('parent', (ctypes.c_void_p)),
('value_string', ctypes.c_char_p),
('value_int', ctypes.c_int),
('value_float', ctypes.c_float),
('choice', (ctypes.c_void_p)),
('choice_count', ctypes.c_int),
('min', ctypes.c_float),
('max', ctypes.c_float),
('increment', ctypes.c_float),
('children', (ctypes.c_void_p)),
('children_count', (ctypes.c_int)),
('changed', (ctypes.c_int)),
('readonly', (ctypes.c_int)),
('ref_count', (ctypes.c_int)),
('id', (ctypes.c_int)),
('callback', (ctypes.c_void_p))]
class libgphoto2error(Exception):
def __init__(self, result, message):
self.result = result
self.message = message
def __str__(self):
return self.message + ' (' + str(self.result) + ')'
def check(result):
if result < 0:
gp.gp_result_as_string.restype = ctypes.c_char_p
message = gp.gp_result_as_string(result)
raise libgphoto2error(result, message)
return result
def check_unref(result, camfile):
if result!=0:
gp.gp_file_unref(camfile._cf)
gp.gp_result_as_string.restype = ctypes.c_char_p
message = gp.gp_result_as_string(result)
raise libgphoto2error(result, message)
class camera(object):
def __init__(self, autoInit = True):
self._cam = ctypes.c_void_p()
self._leave_locked = False
check(gp.gp_camera_new(PTR(self._cam)))
self.initialized = False
if autoInit:
self.init()
def init(self):
if self.initialized:
print "Camera is already initialized."
else:
ans = 0
for i in range(1 + retries):
gp.gp_camera_init.argtypes = [ctypes.c_void_p]*2
ans = gp.gp_camera_init(self._cam, context)
if ans == 0:
break
elif (ans == -60 or ans == -53) and (unmount_cmd != None):
print "***", unmount_cmd
os.system(unmount_cmd)
time.sleep(1)
print "camera.init() retry #%d..." % (i)
check(ans)
self.initialized = True
def reinit(self):
pi = self.port_info
pi_copy = PortInfo()
ctypes.memmove(PTR(pi_copy), PTR(pi), ctypes.sizeof(PortInfo))
gp.gp_camera_free(self._cam)
self.__init__(autoInit=False)
self.port_info = pi_copy
self.init()
def __del__(self):
if not self._leave_locked:
check(gp.gp_camera_exit(self._cam))
check(gp.gp_camera_free(self._cam))
def leave_locked(self):
self._leave_locked = True
def ref(self):
check(gp.gp_camera_ref(self._cam))
def unref(self):
check(gp.gp_camera_unref(self._cam))
def exit(self):
check(gp.gp_camera_exit(self._cam, context))
def _get_summary(self):
txt = CameraText()
check(gp.gp_camera_get_summary(self._cam, PTR(txt), context))
return txt.text
summary = property(_get_summary, None)
def _get_manual(self):
txt = CameraText()
check(gp.gp_camera_get_manual(self._cam, PTR(txt), context))
return txt.text
manual = property(_get_manual, None)
def _get_about(self):
txt = CameraText()
check(gp.gp_camera_get_about(self._cam, PTR(txt), context))
return txt.text
about = property(_get_about, None)
def _get_abilities(self):
ab = cameraAbilities()
check(gp.gp_camera_get_abilities(self._cam, PTR(ab._ab)))
return ab
def _set_abilities(self, ab):
check(gp.gp_camera_set_abilities(self._cam, ab._ab))
abilities = property(_get_abilities, _set_abilities)
def _get_config(self):
window = cameraWidget(GP_WIDGET_WINDOW)
gp.gp_camera_get_config.argtypes = [ctypes.c_void_p]*3
check(gp.gp_camera_get_config(self._cam, PTR(window._w), context))
window.populate_children()
return window
def _set_config(self, window):
gp.gp_camera_set_config.argtypes = [ctypes.c_void_p]*3
check(gp.gp_camera_set_config(self._cam, window._w, context))
config = property(_get_config, _set_config)
def _get_port_info(self):
infop = ctypes.POINTER(PortInfo)()
gp.gp_camera_get_port_info.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.POINTER(PortInfo))]
check(gp.gp_camera_get_port_info(self._cam, ctypes.byref(infop)))
return infop.contents
def _set_port_info(self, info):
gp.gp_camera_set_port_info.argtypes = [ctypes.c_void_p]*2
check(gp.gp_camera_set_port_info(self._cam, PTR(info)))
port_info = property(_get_port_info, _set_port_info)
def capture_image(self, destpath = None):
path = CameraFilePath()
ans = 0
for i in range(1 + retries):
gp.gp_camera_capture.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p]
ans = gp.gp_camera_capture(self._cam, GP_CAPTURE_IMAGE, PTR(path), context)
if ans == 0: break
else: print "capture_image(%s) retry #%d..." % (destpath, i)
check(ans)
if destpath:
self.download_file(path.folder, path.name, destpath)
else:
return (path.folder, path.name)
def capture_preview(self, destpath = None):
path = CameraFilePath()
cfile = cameraFile()
ans = 0
for i in range(1 + retries):
gp.gp_camera_capture_preview.argtypes = [ctypes.c_void_p]*3
ans = gp.gp_camera_capture_preview(self._cam, cfile._cf, context)
if ans == 0: break
else: print "capture_preview(%s) retry #%d..." % (destpath, i)
check(ans)
if destpath:
cfile.save(destpath)
else:
return cfile
def download_file(self, srcfolder, srcfilename, destpath):
cfile = cameraFile(self._cam, srcfolder, srcfilename)
cfile.save(destpath)
gp.gp_file_unref(cfile._cf)
def trigger_capture(self):
check(gp.gp_camera_trigger_capture(self._cam, context))
def wait_for_event(self, timeout):
raise NotImplementedError
def list_folders(self, path = "/"):
l = cameraList()
gp.gp_camera_folder_list_folders.argtypes = [ctypes.c_void_p]*4
check(gp.gp_camera_folder_list_folders(self._cam, str(path), l._l, context));
return l.toList()
def list_files(self, path = "/"):
l = cameraList()
gp.gp_camera_folder_list_files.argtypes = [ctypes.c_void_p]*4
check(gp.gp_camera_folder_list_files(self._cam, str(path), l._l, context));
return l.toList()
def _list_config(self, widget, cfglist, path):
children = widget.children
if children:
for c in children:
self._list_config(c, cfglist, path + "." + c.name)
else:
print path, "=", widget.value
cfglist.append(path)
def list_config(self):
cfglist = []
cfg = self.config
self._list_config(cfg, cfglist, cfg.name)
return cfglist
def ptp_canon_eos_requestdevicepropvalue(self, prop):
params = ctypes.c_void_p(self._cam.value + 12)
gp.ptp_generic_no_data(params, PTP_OC_CANON_EOS_RequestDevicePropValue, 1, prop)
# TODO: port_speed, init, config
class cameraFile(object):
def __init__(self, cam = None, srcfolder = None, srcfilename = None):
self._cf = ctypes.c_void_p()
check(gp.gp_file_new(PTR(self._cf)))
if cam:
gp.gp_camera_file_get.argtypes = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p]
check_unref(gp.gp_camera_file_get(cam, srcfolder, srcfilename, GP_FILE_TYPE_NORMAL, self._cf, context), self)
def open(self, filename):
check(gp.gp_file_open(PTR(self._cf), filename))
def save(self, filename = None):
if filename is None: filename = self.name
check(gp.gp_file_save(self._cf, filename))
def ref(self):
check(gp.gp_file_ref(self._cf))
def unref(self):
check(gp.gp_file_unref(self._cf))
def clean(self):
check(gp.gp_file_clean(self._cf))
def copy(self, source):
check(gp.gp_file_copy(self._cf, source._cf))
def __dealoc__(self, filename):
check(gp.gp_file_free(self._cf))
def _get_name(self):
name = ctypes.c_char_p()
check(gp.gp_file_get_name(self._cf, PTR(name)))
return name.value
def _set_name(self, name):
check(gp.gp_file_set_name(self._cf, str(name)))
name = property(_get_name, _set_name)
# TODO: new_from_fd (?), new_from_handler (?), mime_tipe, mtime, detect_mime_type, adjust_name_for_mime_type, data_and_size, append, slurp, python file object?
class cameraAbilitiesList(object):
_static_l = None
def __init__(self):
if cameraAbilitiesList._static_l is None:
cameraAbilitiesList._static_l = ctypes.c_void_p()
check(gp.gp_abilities_list_new(PTR(cameraAbilitiesList._static_l)))
check(gp.gp_abilities_list_load(cameraAbilitiesList._static_l, context))
self._l = cameraAbilitiesList._static_l
def __del__(self):
# don't free, since it is only created once
#check(gp.gp_abilities_list_free(self._l))
pass
def detect(self, il, l):
check(gp.gp_abilities_list_detect(self._l, il._l, l._l, context))
def lookup_model(self, model):
return check(gp.gp_abilities_list_lookup_model(self._l, model))
def get_abilities(self, model_index, ab):
check(gp.gp_abilities_list_get_abilities(self._l, model_index, PTR(ab._ab)))
class cameraAbilities(object):
def __init__(self):
self._ab = CameraAbilities()
def __repr__(self):
return "Model : %s\nStatus : %d\nPort : %d\nOperations : %d\nFile Operations : %d\nFolder Operations : %d\nUSB (vendor/product) : 0x%x/0x%x\nUSB class : 0x%x/0x%x/0x%x\nLibrary : %s\nId : %s\n" % (self._ab.model, self._ab.status, self._ab.port, self._ab.operations, self._ab.file_operations, self._ab.folder_operations, self._ab.usb_vendor, self._ab.usb_product, self._ab.usb_class, self._ab.usb_subclass, self._ab.usb_protocol, self._ab.library, self._ab.id)
model = property(lambda self: self._ab.model, None)
status = property(lambda self: self._ab.status, None)
port = property(lambda self: self._ab.port, None)
operations = property(lambda self: self._ab.operations, None)
file_operations = property(lambda self: self._ab.file_operations, None)
folder_operations = property(lambda self: self._ab.folder_operations, None)
usb_vendor = property(lambda self: self._ab.usb_vendor, None)
usb_product = property(lambda self: self._ab.usb_product, None)
usb_class = property(lambda self: self._ab.usb_class, None)
usb_subclass = property(lambda self: self._ab.usb_subclass, None)
usb_protocol = property(lambda self: self._ab.usb_protocol, None)
library = property(lambda self: self._ab.library, None)
id = property(lambda self: self._ab.id, None)
class portInfoList(object):
_static_l = None
def __init__(self):
if portInfoList._static_l is None:
portInfoList._static_l = ctypes.c_void_p()
check(gp.gp_port_info_list_new(PTR(portInfoList._static_l)))
check(gp.gp_port_info_list_load(portInfoList._static_l))
self._l = portInfoList._static_l
def __del__(self):
# don't free, since it is only created once
#check(gp.gp_port_info_list_free(self._l))
pass
def count(self):
c = gp.gp_port_info_list_count(self._l)
check(c)
return c
def lookup_path(self, path):
index = gp.gp_port_info_list_lookup_path(self._l, path)
check(index)
return index
def get_info(self, path_index):
infop = ctypes.POINTER(PortInfo)()
gp.gp_port_info_list_get_info.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.POINTER(ctypes.POINTER(PortInfo))]
check(gp.gp_port_info_list_get_info(self._l, path_index, ctypes.byref(infop)))
return infop.contents
class cameraList(object):
def __init__(self, autodetect=False):
self._l = ctypes.c_void_p()
check(gp.gp_list_new(PTR(self._l)))
if autodetect == True:
if hasattr(gp, 'gp_camera_autodetect'):
gp.gp_camera_autodetect.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
gp.gp_camera_autodetect(self._l, context)
else:
# this is for stable versions of gphoto <= 2.4.10.1
xlist = cameraList()
il = portInfoList()
il.count()
al = cameraAbilitiesList()
al.detect(il, xlist)
for i in xrange(xlist.count()):
model = xlist.get_name(i)
path = xlist.get_value(i)
if re.match(r'usb:\d{3},\d{3}', path):
self.append(model, path)
del al
del il
del xlist
def ref(self):
check(gp.gp_list_ref(self._l))
def unref(self):
check(gp.gp_list_ref(self._l))
def __del__(self):
# this failed once in gphoto 2.4.6
check(gp.gp_list_free(self._l))
pass
def reset(self):
#check(gp.gp_list_free(self._l))
#check(gp.gp_list_new(PTR(self._l)))
check(gp.gp_list_reset(self._l))
def append(self, name, value):
check(gp.gp_list_append(self._l, str(name), str(value)))
def sort(self):
check(gp.gp_list_sort(self._l))
def count(self):
return check(gp.gp_list_count(self._l))
def find_by_name(self, name):
index = ctypes.c_int()
check(gp.gp_list_find_by_name(self._l, PTR(index), str(name)))
return index.value
def get_name(self, index):
name = ctypes.c_char_p()
check(gp.gp_list_get_name(self._l, int(index), PTR(name)))
return name.value
def get_value(self, index):
value = ctypes.c_char_p()
check(gp.gp_list_get_value(self._l, int(index), PTR(value)))
return value.value
def set_name(self, index, name):
check(gp.gp_list_set_name(self._l, int(index), str(name)))
def set_value(self, index, value):
check(gp.gp_list_set_value(self._l, int(index), str(value)))
def __str__(self):
header = "cameraList object with %d elements:\n" % self.count()
contents = ["%d: (%s, %s)" % (i, self.get_name(i), self.get_value(i))
for i in range(self.count())]
return header + string.join(contents, "\n")
def toList(self):
return [(self.get_name(i), self.get_value(i)) for i in xrange(self.count())]
xlist = []
for i in range(self.count()):
n, v = self.get_name(i), self.get_value(i)
if v is None:
xlist.append(n)
else:
xlist.append((n, v))
return xlist
def toDict(self):
return dict(self.toList())
class cameraWidget(object):
def __init__(self, type = None, label = ""):
self._w = ctypes.c_void_p()
if type is not None:
check(gp.gp_widget_new(int(type), str(label), PTR(self._w)))
check(gp.gp_widget_ref(self._w))
else:
self._w = ctypes.c_void_p()
def ref(self):
check(gp.gp_widget_ref(self._w))
def unref(self):
check(gp.gp_widget_unref(self._w))
def __del__(self):
# TODO fix this or find a good reason not to
#print "widget(%s) __del__" % self.name
#check(gp.gp_widget_unref(self._w))
pass
def _get_info(self):
info = ctypes.c_char_p()
check(gp.gp_widget_get_info(self._w, PTR(info)))
return info.value
def _set_info(self, info):
check(gp.gp_widget_set_info(self._w, str(info)))
info = property(_get_info, _set_info)
def _get_name(self):
name = ctypes.c_char_p()
check(gp.gp_widget_get_name(self._w, PTR(name)))
return name.value
def _set_name(self, name):
check(gp.gp_widget_set_name(self._w, str(name)))
name = property(_get_name, _set_name)
def _get_id(self):
id = ctypes.c_int()
check(gp.gp_widget_get_id(self._w, PTR(id)))
return id.value
id = property(_get_id, None)
def _set_changed(self, changed):
check(gp.gp_widget_set_changed(self._w, str(changed)))
def _get_changed(self):
return gp.gp_widget_changed(self._w)
changed = property(_get_changed, _set_changed)
def _get_readonly(self):
readonly = ctypes.c_int()
check(gp.gp_widget_get_readonly(self._w, PTR(readonly)))
return readonly.value
def _set_readonly(self, readonly):
check(gp.gp_widget_set_readonly(self._w, int(readonly)))
readonly = property(_get_readonly, _set_readonly)
def _get_type(self):
type = ctypes.c_int()
check(gp.gp_widget_get_type(self._w, PTR(type)))
return type.value
type = property(_get_type, None)
def _get_typestr(self):
return widget_types[self.type]
typestr = property(_get_typestr, None)
def _get_label(self):
label = ctypes.c_char_p()
check(gp.gp_widget_get_label(self._w, PTR(label)))
return label.value
def _set_label(self, label):
check(gp.gp_widget_set_label(self._w, str(label)))
label = property(_get_label, _set_label)
def _get_value(self):
value = ctypes.c_void_p()
ans = gp.gp_widget_get_value(self._w, PTR(value))
if self.type in [GP_WIDGET_MENU, GP_WIDGET_RADIO, GP_WIDGET_TEXT]:
value = ctypes.cast(value.value, ctypes.c_char_p)
elif self.type == GP_WIDGET_RANGE:
value = ctypes.cast(value.value, ctypes.c_float_p)
elif self.type in [GP_WIDGET_TOGGLE, GP_WIDGET_DATE]:
#value = ctypes.cast(value.value, ctypes.c_int_p)
pass
else:
return None
check(ans)
return value.value
def _set_value(self, value):
if self.type in (GP_WIDGET_MENU, GP_WIDGET_RADIO, GP_WIDGET_TEXT):
value = ctypes.c_char_p(value)
elif self.type == GP_WIDGET_RANGE:
value = ctypes.c_float_p(value) # this line not tested
elif self.type in (GP_WIDGET_TOGGLE, GP_WIDGET_DATE):
value = PTR(ctypes.c_int(value))
else:
return None # this line not tested
gp.gp_widget_set_value.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
check(gp.gp_widget_set_value(self._w, value))
value = property(_get_value, _set_value)
def append(self, child):
check(gp.gp_widget_append(self._w, child._w))
def prepend(self, child):
check(gp.gp_widget_prepend(self._w, child._w))
def count_children(self):
return gp.gp_widget_count_children(self._w)
def get_child(self, child_number):
w = cameraWidget()
check(gp.gp_widget_get_child(self._w, int(child_number), PTR(w._w)))
check(gp.gp_widget_ref(w._w))
return w
def get_child_by_label(self, label):
w = cameraWidget()
check(gp.gp_widget_get_child_by_label(self._w, str(label), PTR(w._w)))
return w
def get_child_by_id(self, id):
w = cameraWidget()
check(gp.gp_widget_get_child_by_id(self._w, int(id), PTR(w._w)))
return w
def get_child_by_name(self, name):
w = cameraWidget()
# this fails in 2.4.6 (Ubuntu 9.10)
check(gp.gp_widget_get_child_by_name(self._w, str(name), PTR(w._w)))
return w
def __getitem__(self, key):
return next((x for x in ([self] + self.children) if key in [x.name, x.label]), None)
def _get_children(self):
children = []
for i in range(self.count_children()):
children.append(self.get_child(i))
return children
children = property(_get_children, None)
def _get_parent(self):
w = cameraWidget()
check(gp.gp_widget_get_parent(self._w, PTR(w._w)))
return w
parent = property(_get_parent, None)
def _get_root(self):
w = cameraWidget()
check(gp.gp_widget_get_root(self._w, PTR(w._w)))
return w
root = property(_get_root, None)
def _set_range(self, range):
"""cameraWidget.range = (min, max, increment)"""
float = ctypes.c_float
min, max, increment = range
check(gp.gp_widget_set_range(self._w, float(min), float(max), float(increment)))
return w
def _get_range(self, range):
"""cameraWidget.range => (min, max, increment)"""
min, max, increment = ctypes.c_float(), ctypes.c_float(), ctypes.c_float()
check(gp.gp_widget_set_range(self._w, PTR(min), PTR(max), PTR(increment)))
return (min.value, max.value, increment.value)
range = property(_get_range, _set_range)
def add_choice(self, choice):
check(gp.gp_widget_add_choice(self._w, str(choice)))
def count_choices(self):
return gp.gp_widget_count_choices(self._w)
def get_choice(self, choice_number):
choice = ctypes.c_char_p()
check(gp.gp_widget_get_choice(self._w, int(choice_number), PTR(choice)))
return choice.value
def createdoc(self):
label = "Label: " + self.label
info = "Info: " + (self.info if self.info != "" else "n/a")
type = "Type: " + self.typestr
#value = "Current value: " + str(self.value)
childs = []
for c in self.children:
childs.append(" - " + c.name + ": " + c.label)
if len(childs):
childstr = "Children:\n" + string.join(childs, "\n")
return label + "\n" + info + "\n" + type + "\n" + childstr
else:
return label + "\n" + info + "\n" + type
def _pop(widget, simplewidget):
#print widget
for c in widget.children:
simplechild = cameraWidgetSimple()
if c.count_children():
setattr(simplewidget, c.name, simplechild)
simplechild.__doc__ = c.createdoc()
c._pop(simplechild)
else:
setattr(simplewidget, c.name, c)
#print c.name, simplewidget.__doc__
#print dir(simplewidget)
def populate_children(self):
simplewidget = cameraWidgetSimple()
setattr(self, self.name, simplewidget)
simplewidget.__doc__ = self.createdoc()
self._pop(simplewidget)
def __repr__(self):
return "%s:%s:%s:%s:%s" % (self.label, self.name, self.info, self.typestr, self.value)
class cameraWidgetSimple(object):
pass
__version__ = "0.1dev" | mit |
isnnn/Sick-Beard-TPB | lib/requests/packages/charade/gb2312prober.py | 2994 | 1681 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
from .mbcssm import GB2312SMModel
class GB2312Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(GB2312SMModel)
self._mDistributionAnalyzer = GB2312DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "GB2312"
| gpl-3.0 |
bratsche/Neutron-Drive | neutron-drive/django/conf/locale/nl/formats.py | 329 | 3056 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y' # '20 januari 2009'
TIME_FORMAT = 'H:i' # '15:23'
DATETIME_FORMAT = 'j F Y H:i' # '20 januari 2009 15:23'
YEAR_MONTH_FORMAT = 'F Y' # 'januari 2009'
MONTH_DAY_FORMAT = 'j F' # '20 januari'
SHORT_DATE_FORMAT = 'j-n-Y' # '20-1-2009'
SHORT_DATETIME_FORMAT = 'j-n-Y H:i' # '20-1-2009 15:23'
FIRST_DAY_OF_WEEK = 1 # Monday (in Dutch 'maandag')
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d-%m-%Y', '%d-%m-%y', '%Y-%m-%d', # '20-01-2009', '20-01-09', '2009-01-20'
# '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09'
# '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 09'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '15:23:35'
'%H.%M:%S', # '15.23:35'
'%H.%M', # '15.23'
'%H:%M', # '15:23'
)
DATETIME_INPUT_FORMATS = (
# With time in %H:%M:%S :
'%d-%m-%Y %H:%M:%S', '%d-%m-%y %H:%M:%S', '%Y-%m-%d %H:%M:%S', # '20-01-2009 15:23:35', '20-01-09 15:23:35', '2009-01-20 15:23:35'
# '%d %b %Y %H:%M:%S', '%d %b %y %H:%M:%S', # '20 jan 2009 15:23:35', '20 jan 09 15:23:35'
# '%d %B %Y %H:%M:%S', '%d %B %y %H:%M:%S', # '20 januari 2009 15:23:35', '20 januari 2009 15:23:35'
# With time in %H.%M:%S :
'%d-%m-%Y %H.%M:%S', '%d-%m-%y %H.%M:%S', # '20-01-2009 15.23:35', '20-01-09 15.23:35'
# '%d %b %Y %H.%M:%S', '%d %b %y %H.%M:%S', # '20 jan 2009 15.23:35', '20 jan 09 15.23:35'
# '%d %B %Y %H.%M:%S', '%d %B %y %H.%M:%S', # '20 januari 2009 15.23:35', '20 januari 2009 15.23:35'
# With time in %H:%M :
'%d-%m-%Y %H:%M', '%d-%m-%y %H:%M', '%Y-%m-%d %H:%M', # '20-01-2009 15:23', '20-01-09 15:23', '2009-01-20 15:23'
# '%d %b %Y %H:%M', '%d %b %y %H:%M', # '20 jan 2009 15:23', '20 jan 09 15:23'
# '%d %B %Y %H:%M', '%d %B %y %H:%M', # '20 januari 2009 15:23', '20 januari 2009 15:23'
# With time in %H.%M :
'%d-%m-%Y %H.%M', '%d-%m-%y %H.%M', # '20-01-2009 15.23', '20-01-09 15.23'
# '%d %b %Y %H.%M', '%d %b %y %H.%M', # '20 jan 2009 15.23', '20 jan 09 15.23'
# '%d %B %Y %H.%M', '%d %B %y %H.%M', # '20 januari 2009 15.23', '20 januari 2009 15.23'
# Without time :
'%d-%m-%Y', '%d-%m-%y', '%Y-%m-%d', # '20-01-2009', '20-01-09', '2009-01-20'
# '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09'
# '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 2009'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
campbe13/openhatch | vendor/packages/sphinx/doc/conf.py | 15 | 4196 | # -*- coding: utf-8 -*-
#
# Sphinx documentation build configuration file
import re
import sphinx
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
'sphinx.ext.autosummary', 'sphinx.ext.extlinks']
master_doc = 'contents'
templates_path = ['_templates']
exclude_patterns = ['_build']
project = 'Sphinx'
copyright = '2007-2013, Georg Brandl'
version = sphinx.__released__
release = version
show_authors = True
html_theme = 'sphinx13'
html_theme_path = ['_themes']
modindex_common_prefix = ['sphinx.']
html_static_path = ['_static']
html_sidebars = {'index': ['indexsidebar.html', 'searchbox.html']}
html_additional_pages = {'index': 'index.html'}
html_use_opensearch = 'http://sphinx-doc.org'
htmlhelp_basename = 'Sphinxdoc'
epub_theme = 'epub'
epub_basename = 'sphinx'
epub_author = 'Georg Brandl'
epub_publisher = 'http://sphinx-doc.org/'
epub_scheme = 'url'
epub_identifier = epub_publisher
epub_pre_files = [('index.html', 'Welcome')]
epub_post_files = [('install.html', 'Installing Sphinx'),
('develop.html', 'Sphinx development')]
epub_exclude_files = ['_static/opensearch.xml', '_static/doctools.js',
'_static/jquery.js', '_static/searchtools.js', '_static/underscore.js',
'_static/basic.css', 'search.html', '_static/websupport.js']
epub_fix_images = False
epub_max_image_width = 0
epub_show_urls = 'inline'
epub_use_index = False
epub_guide = (('toc', 'contents.html', u'Table of Contents'),)
latex_documents = [('contents', 'sphinx.tex', 'Sphinx Documentation',
'Georg Brandl', 'manual', 1)]
latex_logo = '_static/sphinx.png'
latex_elements = {
'fontpkg': '\\usepackage{palatino}',
}
latex_show_urls = 'footnote'
autodoc_member_order = 'groupwise'
todo_include_todos = True
extlinks = {'duref': ('http://docutils.sourceforge.net/docs/ref/rst/'
'restructuredtext.html#%s', ''),
'durole': ('http://docutils.sourceforge.net/docs/ref/rst/'
'roles.html#%s', ''),
'dudir': ('http://docutils.sourceforge.net/docs/ref/rst/'
'directives.html#%s', '')}
man_pages = [
('contents', 'sphinx-all', 'Sphinx documentation generator system manual',
'Georg Brandl', 1),
('man/sphinx-build', 'sphinx-build', 'Sphinx documentation generator tool',
'', 1),
('man/sphinx-quickstart', 'sphinx-quickstart', 'Sphinx documentation '
'template generator', '', 1),
('man/sphinx-apidoc', 'sphinx-apidoc', 'Sphinx API doc generator tool',
'', 1),
]
texinfo_documents = [
('contents', 'sphinx', 'Sphinx Documentation', 'Georg Brandl',
'Sphinx', 'The Sphinx documentation builder.', 'Documentation tools',
1),
]
# We're not using intersphinx right now, but if we did, this would be part of
# the mapping:
intersphinx_mapping = {'python': ('http://docs.python.org/dev', None)}
# Sphinx document translation with sphinx gettext feature uses these settings:
locale_dirs = ['locale/']
gettext_compact = False
# -- Extension interface -------------------------------------------------------
from sphinx import addnodes
event_sig_re = re.compile(r'([a-zA-Z-]+)\s*\((.*)\)')
def parse_event(env, sig, signode):
m = event_sig_re.match(sig)
if not m:
signode += addnodes.desc_name(sig, sig)
return sig
name, args = m.groups()
signode += addnodes.desc_name(name, name)
plist = addnodes.desc_parameterlist()
for arg in args.split(','):
arg = arg.strip()
plist += addnodes.desc_parameter(arg, arg)
signode += plist
return name
def setup(app):
from sphinx.ext.autodoc import cut_lines
from sphinx.util.docfields import GroupedField
app.connect('autodoc-process-docstring', cut_lines(4, what=['module']))
app.add_object_type('confval', 'confval',
objname='configuration value',
indextemplate='pair: %s; configuration value')
fdesc = GroupedField('parameter', label='Parameters',
names=['param'], can_collapse=True)
app.add_object_type('event', 'event', 'pair: %s; event', parse_event,
doc_field_types=[fdesc])
| agpl-3.0 |
computersalat/ansible | lib/ansible/module_utils/facts/system/fips.py | 232 | 1338 | # Determine if a system is in 'fips' mode
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.utils import get_file_content
from ansible.module_utils.facts.collector import BaseFactCollector
class FipsFactCollector(BaseFactCollector):
name = 'fips'
_fact_ids = set()
def collect(self, module=None, collected_facts=None):
# NOTE: this is populated even if it is not set
fips_facts = {}
fips_facts['fips'] = False
data = get_file_content('/proc/sys/crypto/fips_enabled')
if data and data == '1':
fips_facts['fips'] = True
return fips_facts
| gpl-3.0 |
openstack-hyper-v-python/numpy | numpy/polynomial/__init__.py | 17 | 1136 | """
A sub-package for efficiently dealing with polynomials.
Within the documentation for this sub-package, a "finite power series,"
i.e., a polynomial (also referred to simply as a "series") is represented
by a 1-D numpy array of the polynomial's coefficients, ordered from lowest
order term to highest. For example, array([1,2,3]) represents
``P_0 + 2*P_1 + 3*P_2``, where P_n is the n-th order basis polynomial
applicable to the specific module in question, e.g., `polynomial` (which
"wraps" the "standard" basis) or `chebyshev`. For optimal performance,
all operations on polynomials, including evaluation at an argument, are
implemented as operations on the coefficients. Additional (module-specific)
information can be found in the docstring for the module of interest.
"""
from __future__ import division, absolute_import, print_function
import warnings
from .polynomial import Polynomial
from .chebyshev import Chebyshev
from .legendre import Legendre
from .hermite import Hermite
from .hermite_e import HermiteE
from .laguerre import Laguerre
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
| bsd-3-clause |
stitzelj/Ficlatte | django-bbcode-master/bbcode/util.py | 3 | 3388 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# django-bbcode: util.py
##
import re
import bbcode.settings
def to_html(text,
tags_alternative_definition={},
escape_html=True,
method='disable',
*tags):
"""
Convert a string with BBCode markup into its corresponding HTML markup.
Basic Usage
-----------
The first parameter is the string off BBCode markup to be processed
>>> text = "[b]some bold text to markup[/b]"
>>> output = bbcode.util.to_html(text)
>>> print output
<strong>some bold text to markup</strong>
Custom BBCode translations
--------------------------
You can supply your own BBCode markup translations to create your own
custom markup, or override the default BBRuby translations (parameter is a
dictionary of custom translations).
The dictionary takes the following format:
'name': [regexp, replacement, description, example, enable_symbol]
For example,
custom_blockquote = {
'Quote': [
r"\[quote(:.*)?=(.*?)\](.*?)\[\/quote\1?\]",
'<div class="quote"><p><cite>\\2</cite></p><blockquote>\\3</blockquote></div>',
'Quote with citation',
'[quote=mike]please quote me[/quote]',
'quote' ],
}
Enable and Disable specific tags
--------------------------------
Django-bbcode will allow you to only enable certain BBCode tags, or to
explicitly disable certain tags. Pass in either 'disable' or 'enable' to
set your method, followed by the comma-separated list of tags you wish to
disable or enable.
##
# Translate BBCode to HTML, enabling 'image', 'bold', and 'quote' tags
# *only*.
bbcode.util.to_html(text, {}, True,
'enable', 'image', 'bold', 'quote')
##
# Translate BBCode to HTML, enabling all tags *except* 'image',
# 'bold', and 'quote'.
bbcode.util.to_html(text, {}, True,
'disable', 'image', 'video', 'color')
"""
# escape <, >, and & to remove any html
if escape_html:
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
# merge in the alternate definitions, if any. Unfortunately there is no
# Python 2.3 compatible efficient way to do this.
tags_definition = bbcode.settings.TAGS
for x in tags_alternative_definition:
tags_definition[x] = tags_alternative_definition[x]
# parse bbcode tags
if method == 'enable':
for x in tags_definition:
t = tags_definition[x]
if t[4] in tags:
regex = re.compile(t[0], re.IGNORECASE|re.DOTALL)
text = regex.sub(t[1], text)
if method == 'disable':
# this works nicely because the default is disable and the default set
# of tags is [] (so none disabled) :)
for x in tags_definition:
t = tags_definition[x]
if t[4] not in tags:
regex = re.compile(t[0], re.IGNORECASE|re.DOTALL)
text = regex.sub(t[1], text)
# parse spacing
text = re.sub(r"\r\n?", "\n", text)
text = re.sub(r"\n", "<br />", text)
# return markup
return text
##
# End of File
##
| agpl-3.0 |
martonw/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/test/main.py | 120 | 10405 | # Copyright (C) 2012 Google, Inc.
# Copyright (C) 2010 Chris Jerdonek (cjerdonek@webkit.org)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""unit testing code for webkitpy."""
import logging
import multiprocessing
import optparse
import os
import StringIO
import sys
import time
import traceback
import unittest
from webkitpy.common.system.filesystem import FileSystem
from webkitpy.test.finder import Finder
from webkitpy.test.printer import Printer
from webkitpy.test.runner import Runner, unit_test_name
_log = logging.getLogger(__name__)
def main():
up = os.path.dirname
webkit_root = up(up(up(up(up(os.path.abspath(__file__))))))
tester = Tester()
tester.add_tree(os.path.join(webkit_root, 'Tools', 'Scripts'), 'webkitpy')
tester.add_tree(os.path.join(webkit_root, 'Source', 'WebKit2', 'Scripts'), 'webkit2')
tester.skip(('webkitpy.common.checkout.scm.scm_unittest',), 'are really, really, slow', 31818)
if sys.platform == 'win32':
tester.skip(('webkitpy.common.checkout', 'webkitpy.common.config', 'webkitpy.tool'), 'fail horribly on win32', 54526)
# This only needs to run on Unix, so don't worry about win32 for now.
appengine_sdk_path = '/usr/local/google_appengine'
if os.path.exists(appengine_sdk_path):
if not appengine_sdk_path in sys.path:
sys.path.append(appengine_sdk_path)
import dev_appserver
from google.appengine.dist import use_library
use_library('django', '1.2')
dev_appserver.fix_sys_path()
tester.add_tree(os.path.join(webkit_root, 'Tools', 'QueueStatusServer'))
else:
_log.info('Skipping QueueStatusServer tests; the Google AppEngine Python SDK is not installed.')
return not tester.run()
class Tester(object):
def __init__(self, filesystem=None):
self.finder = Finder(filesystem or FileSystem())
self.printer = Printer(sys.stderr)
self._options = None
def add_tree(self, top_directory, starting_subdirectory=None):
self.finder.add_tree(top_directory, starting_subdirectory)
def skip(self, names, reason, bugid):
self.finder.skip(names, reason, bugid)
def _parse_args(self, argv=None):
parser = optparse.OptionParser(usage='usage: %prog [options] [args...]')
parser.add_option('-a', '--all', action='store_true', default=False,
help='run all the tests')
parser.add_option('-c', '--coverage', action='store_true', default=False,
help='generate code coverage info (requires http://pypi.python.org/pypi/coverage)')
parser.add_option('-i', '--integration-tests', action='store_true', default=False,
help='run integration tests as well as unit tests'),
parser.add_option('-j', '--child-processes', action='store', type='int', default=(1 if sys.platform == 'win32' else multiprocessing.cpu_count()),
help='number of tests to run in parallel (default=%default)')
parser.add_option('-p', '--pass-through', action='store_true', default=False,
help='be debugger friendly by passing captured output through to the system')
parser.add_option('-q', '--quiet', action='store_true', default=False,
help='run quietly (errors, warnings, and progress only)')
parser.add_option('-t', '--timing', action='store_true', default=False,
help='display per-test execution time (implies --verbose)')
parser.add_option('-v', '--verbose', action='count', default=0,
help='verbose output (specify once for individual test results, twice for debug messages)')
parser.epilog = ('[args...] is an optional list of modules, test_classes, or individual tests. '
'If no args are given, all the tests will be run.')
return parser.parse_args(argv)
def run(self):
self._options, args = self._parse_args()
self.printer.configure(self._options)
self.finder.clean_trees()
names = self.finder.find_names(args, self._options.all)
if not names:
_log.error('No tests to run')
return False
return self._run_tests(names)
def _run_tests(self, names):
# Make sure PYTHONPATH is set up properly.
sys.path = self.finder.additional_paths(sys.path) + sys.path
# We autoinstall everything up so that we can run tests concurrently
# and not have to worry about autoinstalling packages concurrently.
self.printer.write_update("Checking autoinstalled packages ...")
from webkitpy.thirdparty import autoinstall_everything
installed_something = autoinstall_everything()
# FIXME: There appears to be a bug in Python 2.6.1 that is causing multiprocessing
# to hang after we install the packages in a clean checkout.
if installed_something:
_log.warning("We installed new packages, so running things serially at first")
self._options.child_processes = 1
if self._options.coverage:
_log.warning("Checking code coverage, so running things serially")
self._options.child_processes = 1
import webkitpy.thirdparty.autoinstalled.coverage as coverage
cov = coverage.coverage(omit=["/usr/*", "*/webkitpy/thirdparty/autoinstalled/*", "*/webkitpy/thirdparty/BeautifulSoup.py"])
cov.start()
self.printer.write_update("Checking imports ...")
if not self._check_imports(names):
return False
self.printer.write_update("Finding the individual test methods ...")
loader = _Loader()
parallel_tests, serial_tests = self._test_names(loader, names)
self.printer.write_update("Running the tests ...")
self.printer.num_tests = len(parallel_tests) + len(serial_tests)
start = time.time()
test_runner = Runner(self.printer, loader)
test_runner.run(parallel_tests, self._options.child_processes)
test_runner.run(serial_tests, 1)
self.printer.print_result(time.time() - start)
if self._options.coverage:
cov.stop()
cov.save()
cov.report(show_missing=False)
return not self.printer.num_errors and not self.printer.num_failures
def _check_imports(self, names):
for name in names:
if self.finder.is_module(name):
# if we failed to load a name and it looks like a module,
# try importing it directly, because loadTestsFromName()
# produces lousy error messages for bad modules.
try:
__import__(name)
except ImportError:
_log.fatal('Failed to import %s:' % name)
self._log_exception()
return False
return True
def _test_names(self, loader, names):
parallel_test_method_prefixes = ['test_']
serial_test_method_prefixes = ['serial_test_']
if self._options.integration_tests:
parallel_test_method_prefixes.append('integration_test_')
serial_test_method_prefixes.append('serial_integration_test_')
parallel_tests = []
loader.test_method_prefixes = parallel_test_method_prefixes
for name in names:
parallel_tests.extend(self._all_test_names(loader.loadTestsFromName(name, None)))
serial_tests = []
loader.test_method_prefixes = serial_test_method_prefixes
for name in names:
serial_tests.extend(self._all_test_names(loader.loadTestsFromName(name, None)))
# loader.loadTestsFromName() will not verify that names begin with one of the test_method_prefixes
# if the names were explicitly provided (e.g., MainTest.test_basic), so this means that any individual
# tests will be included in both parallel_tests and serial_tests, and we need to de-dup them.
serial_tests = list(set(serial_tests).difference(set(parallel_tests)))
return (parallel_tests, serial_tests)
def _all_test_names(self, suite):
names = []
if hasattr(suite, '_tests'):
for t in suite._tests:
names.extend(self._all_test_names(t))
else:
names.append(unit_test_name(suite))
return names
def _log_exception(self):
s = StringIO.StringIO()
traceback.print_exc(file=s)
for l in s.buflist:
_log.error(' ' + l.rstrip())
class _Loader(unittest.TestLoader):
test_method_prefixes = []
def getTestCaseNames(self, testCaseClass):
def isTestMethod(attrname, testCaseClass=testCaseClass):
if not hasattr(getattr(testCaseClass, attrname), '__call__'):
return False
return (any(attrname.startswith(prefix) for prefix in self.test_method_prefixes))
testFnNames = filter(isTestMethod, dir(testCaseClass))
testFnNames.sort()
return testFnNames
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
josefschneider/switchboard | setup.py | 1 | 2064 |
import sys
import os
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
# `$ python setup.py test' installs minimal requirements and runs tests
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = [
'--doctest-modules', '--verbose', '-s',
'./switchboard', './tests'
]
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def get_console_scripts():
console_scripts = [
'switchboard=switchboard.__main__:main',
'swbclient=cli.__main__:main'
]
setup_py_path = os.path.dirname(os.path.realpath(__file__))
apps_dir = setup_py_path + '/apps'
# Create a file called app_list.py that can be imported so that
# the command-line interface can tab-complete for apps to launch
app_list = open(apps_dir + '/app_list.py', 'w')
app_list.write('# WARNING: this file is autogenerated and should not be modified\n')
app_list.write('APP_LIST = [\n')
for f in os.listdir(apps_dir):
if f.startswith('swb_'):
swb_client_name = os.path.splitext(f)[0]
print('Installing {}'.format(swb_client_name))
app_list.write(' "{}",\n'.format(swb_client_name))
console_scripts.append('{0}=apps.{0}.__main__:main'.format(swb_client_name))
app_list.write(']\n')
app_list.close()
return console_scripts
tests_require = [
'pytest',
'mock'
]
install_requires = [
'requests',
'termcolor',
'bottle',
'bottle_websocket',
'gevent',
'websocket-client',
'psutil'
]
console_scripts = get_console_scripts()
setup(
name='switchboard',
version='0.1.0',
packages=find_packages(),
package_data={ '': ['*.html'] },
entry_points={
'console_scripts': console_scripts,
},
tests_require=tests_require,
install_requires=install_requires,
cmdclass={'test': PyTest}
)
| mit |
MiniSEC/GRR_clone | lib/build.py | 1 | 37732 | #!/usr/bin/env python
"""Classes for handling build and repackaging of clients.
This handles invocations for the build across the supported platforms including
handling Visual Studio, pyinstaller and other packaging mechanisms.
"""
import cStringIO
import logging
import os
import shutil
import struct
import subprocess
import sys
import time
import zipfile
from grr.lib import config_lib
from grr.lib import rdfvalue
from grr.lib import type_info
from grr.lib import utils
class PathTypeInfo(type_info.String):
"""A path to a file or a directory."""
def __init__(self, must_exist=True, **kwargs):
self.must_exist = must_exist
super(PathTypeInfo, self).__init__(**kwargs)
def Validate(self, value):
value = super(PathTypeInfo, self).Validate(value)
if self.must_exist and not os.access(value, os.R_OK):
raise type_info.TypeValueError(
"Path %s does not exist for %s" % (value, self.name))
return value
def FromString(self, string):
return os.path.normpath(string)
# PyInstaller build configuration.
config_lib.DEFINE_option(PathTypeInfo(
name="PyInstaller.path", must_exist=False,
default="c:/grr_build/pyinstaller/pyinstaller.py",
help="Path to the main pyinstaller.py file."))
config_lib.DEFINE_option(PathTypeInfo(
name="PyInstaller.pathex", must_exist=False,
default="", help="Additional paths for searching for libraries."))
config_lib.DEFINE_bool(
"ClientBuilder.console", default=False,
help="Should the application be built as a console program. "
"This aids debugging in windows.")
config_lib.DEFINE_string(
name="PyInstaller.spec",
help="The spec file contents to use for building the client.",
default=r"""
# By default build in one dir mode.
a = Analysis\(
["%(ClientBuilder.source)/grr/client/client.py"],
pathex=%(PyInstaller.pathex),
hiddenimports=[],
hookspath=None\)
pyz = PYZ\(
a.pure\)
exe = EXE\(
pyz,
a.scripts,
exclude_binaries=1,
name='build/%(Client.binary_name)',
debug=False,
strip=False,
upx=True,
console=True,
version='%(PyInstaller.build_dir)/version.txt',
icon='%(PyInstaller.build_dir)/grr.ico'\)
coll = COLLECT\(
exe,
a.binaries,
a.zipfiles,
a.datas,
strip=False,
upx=True,
name='%(PyInstaller.output_basename)'\)
""")
config_lib.DEFINE_string(
name="PyInstaller.distpath",
help=("Passed to PyInstaller as the --distpath flag. This sets the output "
"directory for PyInstaller."),
default="./dist")
config_lib.DEFINE_string(
name="PyInstaller.version",
help="The version.txt file contents to use for building the client.",
default=r"""
VSVersionInfo\(
ffi=FixedFileInfo\(
filevers=\(%(Client.version_major), %(Client.version_minor),
%(Client.version_revision), %(Client.version_release)\),
prodvers=\(%(Client.version_major), %(Client.version_minor),
%(Client.version_revision), %(Client.version_release)\),
mask=0x3f,
flags=0x0,
OS=0x40004,
fileType=0x1,
subtype=0x0,
date=\(0, 0\)
\),
kids=[
StringFileInfo\(
[
StringTable\(
'040904B0',
[StringStruct\('CompanyName', "%(Client.company_name)"\),
StringStruct\('FileDescription', "%(Client.description)"\),
StringStruct\('FileVersion', '%(Client.version_string)'\),
StringStruct\('InternalName', '%(Client.description)' \),
StringStruct\('OriginalFilename', '%(ClientBuilder.package_name)' \)]\),
]\),
VarFileInfo\([VarStruct\('Translation', [1033, 1200]\)]\)
]
\)
""")
config_lib.DEFINE_bytes(
"PyInstaller.icon",
"%(%(ClientBuilder.source)/grr/gui/static/images/grr.ico|file)",
"The icon file contents to use for building the client.")
config_lib.DEFINE_string(
"PyInstaller.build_dir",
"./build",
"The path to the build directory.")
config_lib.DEFINE_string(
"PyInstaller.dist_dir",
"./dist/",
"The path to the build directory.")
config_lib.DEFINE_string(
name="PyInstaller.output_basename",
default="%(Client.name)_%(Client.version_string)_%(Client.arch)",
help="The base name of the output package.")
config_lib.DEFINE_string(
name="ClientBuilder.source",
default=os.path.normpath(os.path.dirname(__file__) + "/../.."),
help="The location of the source tree.")
config_lib.DEFINE_option(type_info.PathTypeInfo(
name="ClientBuilder.nanny_source_dir", must_exist=True,
default="%(ClientBuilder.source)/grr/client/nanny/",
help="Path to the windows nanny VS solution file."))
config_lib.DEFINE_choice(
name="ClientBuilder.build_type",
default="Release",
choices=["Release", "Debug"],
help="Type of build (Debug, Release)")
config_lib.DEFINE_string(name="ClientBuilder.template_extension",
default=".zip",
help="The extension to appear on templates.")
config_lib.DEFINE_option(type_info.PathTypeInfo(
name="ClientBuilder.template_path", must_exist=False,
default=(
"%(ClientBuilder.executables_path)/%(Client.platform)"
"/templates/%(Client.arch)/%(Client.version_string)/"
"%(PyInstaller.output_basename)%(ClientBuilder.template_extension)"),
help="The full path to the executable template file."))
config_lib.DEFINE_option(type_info.PathTypeInfo(
name="ClientBuilder.executables_path", must_exist=False,
default="%(ClientBuilder.source)/grr/executables",
help="The path to the grr executables directory."))
config_lib.DEFINE_option(type_info.PathTypeInfo(
name="ClientBuilder.output_path", must_exist=False,
default=(
"%(ClientBuilder.executables_path)/%(Client.platform)"
"/installers/%(Client.arch)/%(Client.version_string)/"
"%(PyInstaller.output_basename)"
"%(ClientBuilder.output_extension)"),
help="The full path to the generated installer file."))
config_lib.DEFINE_option(type_info.PathTypeInfo(
name="ClientBuilder.generated_config_path", must_exist=False,
default=(
"%(ClientBuilder.executables_path)/%(Client.platform)"
"/config/%(PyInstaller.output_basename).yaml"),
help="The full path to where we write a generated config."))
config_lib.DEFINE_option(type_info.PathTypeInfo(
name="ClientBuilder.unzipsfx_stub", must_exist=False,
default=("%(ClientBuilder.executables_path)/%(Client.platform)"
"/templates/unzipsfx/unzipsfx-%(Client.arch).exe"),
help="The full path to the zip self extracting stub."))
config_lib.DEFINE_string(
name="ClientBuilder.config_filename",
default="%(Client.binary_name).yaml",
help=("The name of the configuration file which will be embedded in the "
"deployable binary."))
config_lib.DEFINE_string(
name="ClientBuilder.autorun_command_line",
default=("%(Client.binary_name) --install "
"--config %(ClientBuilder.config_filename)"),
help=("The command that the installer will execute after "
"unpacking the package."))
config_lib.DEFINE_list(
name="ClientBuilder.installer_plugins",
default=[],
help="Plugins that will copied to the client installation file and run "
"at install time.")
config_lib.DEFINE_list(
name="ClientBuilder.plugins",
default=[],
help="Plugins that will copied to the client installation file and run when"
"the client is running.")
config_lib.DEFINE_string(
name="ClientBuilder.client_logging_filename",
default="%(Logging.path)/GRRlog.txt",
help="Filename for logging, to be copied to Client section in the client "
"that gets built.")
config_lib.DEFINE_string(
name="ClientBuilder.client_logging_path",
default="/tmp",
help="Filename for logging, to be copied to Client section in the client "
"that gets built.")
config_lib.DEFINE_list(
name="ClientBuilder.client_logging_engines",
default=["stderr", "file"],
help="Enabled logging engines, to be copied to Logging.engines in client "
"configuration.")
config_lib.DEFINE_string(
name="ClientBuilder.client_installer_logfile",
default="%(Logging.path)/%(Client.name)_installer.txt",
help="Logfile for logging the client installation process, to be copied to"
" Installer.logfile in client built.")
config_lib.DEFINE_string(
name="ClientBuilder.maintainer",
default="GRR <grr-dev@googlegroups.com>",
help="The client package's maintainer.")
config_lib.DEFINE_string(
name="ClientBuilder.debian_build_time",
default=time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()),
help="The build time put into the debian package. Needs to be formatted"
" like the output of 'date -R'.")
config_lib.DEFINE_string(
name="ClientBuilder.debian_version",
default="%(Client.version_numeric)",
help="The version of the debian package.")
config_lib.DEFINE_string(
name="ClientBuilder.debian_package_base",
default=("%(ClientBuilder.package_name)_"
"%(ClientBuilder.debian_version)_%(Client.arch)"),
help="The filename of the debian package without extension.")
config_lib.DEFINE_string(
name="ClientBuilder.package_name",
default="%(Client.name)",
help="The debian package name.")
class ClientBuilder(object):
"""Abstract client builder class, used by the OS specific implementations."""
CONFIG_SECTIONS = ["CA", "Client", "Logging", "Config", "Nanny", "Installer"]
# Config options that should never make it to a deployable binary.
SKIP_OPTION_LIST = ["Client.certificate", "Client.private_key"]
def __init__(self, context=None):
self.context = context or config_lib.CONFIG.context[:]
self.context = ["ClientBuilder Context"] + self.context
def FindLibraryPaths(self):
"""Figure out where distorm is so PyInstaller can find it."""
logging.info("Searching for external libraries.")
librarypaths = ["."]
try:
import distorm3 # pylint: disable=g-import-not-at-top
librarypaths.append(os.path.dirname(distorm3.__file__))
except ImportError:
logging.warn("Distorm not found - expect reduced functionality.")
config_lib.CONFIG.Set("PyInstaller.pathex", repr(librarypaths))
try:
from volatility import session # pylint: disable=g-import-not-at-top
_ = session
except ImportError:
logging.warn("Volatility Tech Preview was not found. "
"Client side Memory Analysis will not be available.")
def ValidateEndConfig(self, config, errors_fatal=True):
"""Given a generated client config, attempt to check for common errors."""
errors = []
location = config.Get("Client.location", context=self.context)
if not location.startswith("http"):
errors.append("Bad Client.location specified %s" % location)
keys = ["Client.executable_signing_public_key",
"Client.driver_signing_public_key"]
for key in keys:
key_data = config.Get(key, default=None, context=self.context)
if key_data is None:
errors.append("Missing private %s." % key)
continue
if not key_data.startswith("-----BEGIN PUBLIC"):
errors.append("Invalid private %s" % key)
certificate = config.Get("CA.certificate", default=None,
context=self.context)
if (certificate is None or
not certificate.startswith("-----BEGIN CERTIF")):
errors.append("CA certificate missing from config.")
for bad_opt in ["Client.certificate", "Client.private_key"]:
if config.Get(bad_opt, context=self.context, default=""):
errors.append("Client cert in conf, this should be empty at deployment"
" %s" % bad_opt)
if errors_fatal and errors:
for error in errors:
print "Build Config Error: %s" % error
raise RuntimeError("Bad configuration generated. Terminating.")
else:
return errors
def MakeBuildDirectory(self):
"""Prepares the build directory."""
# Create the build directory and let pyinstaller loose on it.
self.build_dir = config_lib.CONFIG.Get("PyInstaller.build_dir",
context=self.context)
self.CleanDirectory(self.build_dir)
def CleanDirectory(self, directory):
logging.info("Clearing directory %s", directory)
try:
shutil.rmtree(directory)
except OSError:
pass
self.EnsureDirExists(directory)
def EnsureDirExists(self, path):
try:
os.makedirs(path)
except OSError:
pass
def BuildWithPyInstaller(self):
"""Use pyinstaller to build a client package."""
self.CleanDirectory(config_lib.CONFIG.Get("PyInstaller.dist_dir",
context=self.context))
self.FindLibraryPaths()
logging.info("Copying pyinstaller support files")
self.spec_file = os.path.join(self.build_dir, "grr.spec")
with open(self.spec_file, "wb") as fd:
fd.write(config_lib.CONFIG.Get("PyInstaller.spec", context=self.context))
with open(os.path.join(self.build_dir, "version.txt"), "wb") as fd:
fd.write(config_lib.CONFIG.Get("PyInstaller.version",
context=self.context))
with open(os.path.join(self.build_dir, "grr.ico"), "wb") as fd:
fd.write(config_lib.CONFIG.Get("PyInstaller.icon", context=self.context))
# We expect the onedir output at this location.
self.output_dir = os.path.join(
config_lib.CONFIG.Get("PyInstaller.dist_dir", context=self.context),
config_lib.CONFIG.Get("PyInstaller.output_basename",
context=self.context))
subprocess.check_call([sys.executable,
config_lib.CONFIG.Get("PyInstaller.path",
context=self.context),
"--distpath",
config_lib.CONFIG.Get("PyInstaller.distpath",
context=self.context),
self.spec_file,
])
def MakeExecutableTemplate(self):
"""Create the executable template.
The client is build in two phases. First an executable template is created
with the client binaries contained inside a zip file. Then the installation
package is created by appending the SFX extractor to this template and
writing a config file into the zip file.
This technique allows the client build to be carried out once on the
supported platform (e.g. windows with MSVS), but the deployable installer
can be build on any platform which supports python.
"""
self.MakeBuildDirectory()
self.BuildWithPyInstaller()
self.EnsureDirExists(os.path.dirname(
config_lib.CONFIG.Get("ClientBuilder.template_path",
context=self.context)))
output_file = config_lib.CONFIG.Get("ClientBuilder.template_path",
context=self.context)
logging.info("Generating zip template file at %s", output_file)
self.MakeZip(self.output_dir, output_file)
def GetClientConfig(self, context):
"""Generates the client config file for inclusion in deployable binaries."""
with utils.TempDirectory() as tmp_dir:
# Make sure we write the file in yaml format.
filename = os.path.join(
tmp_dir, config_lib.CONFIG.Get(
"ClientBuilder.config_filename", context=context))
new_config = config_lib.CONFIG.MakeNewConfig()
new_config.SetWriteBack(filename)
new_config.Set("Client.build_time",
str(rdfvalue.RDFDatetime().Now()))
# Only copy certain sections to the client. We enumerate all
# defined options and then resolve those from the config in the
# client's context. The result is the raw option as if the
# client read our config file.
for descriptor in sorted(config_lib.CONFIG.type_infos,
key=lambda x: x.name):
if descriptor.name in self.SKIP_OPTION_LIST:
continue
if descriptor.section in self.CONFIG_SECTIONS:
value = config_lib.CONFIG.GetRaw(
descriptor.name, context=context,
default=None)
if value is not None:
logging.debug("Copying config option to client: %s",
descriptor.name)
new_config.SetRaw(descriptor.name, value)
new_config.Write()
self.ValidateEndConfig(new_config)
return open(filename, "rb").read()
def GenerateDirectory(self, input_dir=None, output_dir=None,
replacements=None):
input_dir = utils.NormalizePath(input_dir)
output_dir = utils.NormalizePath(output_dir)
replacements = replacements or []
for (root, _, files) in os.walk(input_dir):
for filename in files:
in_file = utils.JoinPath(root, filename)
out_file = in_file.replace(input_dir, output_dir)
for (s, replacement) in replacements:
out_file = out_file.replace(s, replacement)
self.EnsureDirExists(os.path.dirname(out_file))
self.GenerateFile(in_file, out_file)
def GenerateFile(self, input_filename=None, output_filename=None):
"""Generates a file from a template, interpolating config values."""
if input_filename is None:
input_filename = output_filename + ".in"
if output_filename[-3:] == ".in":
output_filename = output_filename[:-3]
data = open(input_filename, "rb").read()
print "Generating file %s from %s" % (output_filename, input_filename)
with open(output_filename, "wb") as fd:
fd.write(config_lib.CONFIG.InterpolateValue(data, context=self.context))
def MakeZip(self, input_dir, output_file):
"""Creates a ZIP archive of the files in the input directory.
Args:
input_dir: the name of the input directory.
output_file: the name of the output ZIP archive without extension.
"""
basename, _ = os.path.splitext(output_file)
shutil.make_archive(basename, "zip",
base_dir=".",
root_dir=input_dir,
verbose=True)
class WindowsClientBuilder(ClientBuilder):
"""Builder class for the Windows client."""
def __init__(self, context=None):
super(WindowsClientBuilder, self).__init__(context=context)
self.context.append("Target:Windows")
def BuildNanny(self):
"""Use VS2010 to build the windows Nanny service."""
logging.info("Copying Nanny build files.")
self.nanny_dir = os.path.join(self.build_dir, "grr/client/nanny")
shutil.copytree(config_lib.CONFIG.Get("ClientBuilder.nanny_source_dir",
context=self.context), self.nanny_dir)
build_type = config_lib.CONFIG.Get(
"ClientBuilder.build_type", context=self.context)
vs_arch = config_lib.CONFIG.Get("ClientBuilder.vs_arch", default=None,
context=self.context)
# We have to set up the Visual Studio environment first and then call
# msbuild.
env_script = config_lib.CONFIG.Get("ClientBuilder.vs_env_script",
default=None, context=self.context)
if vs_arch is None or env_script is None or not os.path.exists(env_script):
raise RuntimeError("no such Visual Studio script: %s" % env_script)
subprocess.check_call(
"cmd /c \"\"%s\" && cd \"%s\" && msbuild /p:Configuration=%s\"" % (
env_script, self.nanny_dir, build_type))
shutil.copy(
os.path.join(self.nanny_dir, vs_arch, build_type, "GRRNanny.exe"),
os.path.join(self.output_dir,
config_lib.CONFIG.Get("Nanny.service_binary_name",
context=self.context)))
def MakeExecutableTemplate(self):
"""Windows templates also include the nanny."""
self.MakeBuildDirectory()
self.BuildWithPyInstaller()
self.BuildNanny()
self.EnsureDirExists(os.path.dirname(
config_lib.CONFIG.Get("ClientBuilder.template_path",
context=self.context)))
output_file = config_lib.CONFIG.Get("ClientBuilder.template_path",
context=self.context)
logging.info("Generating zip template file at %s", output_file)
self.MakeZip(self.output_dir, output_file)
def ValidateEndConfig(self, config, errors_fatal=True):
"""Windows specific config validations."""
errors = super(WindowsClientBuilder, self).ValidateEndConfig(
config, errors_fatal=errors_fatal)
if config.GetRaw("Logging.path").startswith("/"):
errors.append("Logging.path starts with /, probably has Unix path. %s" %
config["Logging.path"])
if "Windows\\" in config.GetRaw("Logging.path"):
errors.append("Windows in Logging.path, you probably want "
"%(WINDIR|env) instead")
if not config["Client.binary_name"].endswith(".exe"):
errors.append("Missing .exe extension on binary_name %s" %
config["Client.binary_name"])
if not config["Nanny.nanny_binary"].endswith(".exe"):
errors.append("Missing .exe extension on nanny_binary")
if errors_fatal and errors:
for error in errors:
print "Build Config Error: %s" % error
raise RuntimeError("Bad configuration generated. Terminating.")
else:
return errors
def MakeDeployableBinary(self, template_path, output_path=None):
"""Repackage the template zip with the installer."""
if output_path is None:
output_path = config_lib.CONFIG.Get("ClientBuilder.output_path",
context=self.context)
context = self.context + ["Client Context"]
client_config_content = self.GetClientConfig(context)
zip_data = cStringIO.StringIO()
output_zip = zipfile.ZipFile(
zip_data, mode="w", compression=zipfile.ZIP_DEFLATED)
z_template = zipfile.ZipFile(open(template_path, "rb"))
completed_files = [] # Track which files we've copied already.
# Change the name of the main binary to the configured name.
client_bin_name = config_lib.CONFIG.Get(
"Client.binary_name", context=context)
try:
bin_name = z_template.getinfo(client_bin_name)
except KeyError:
bin_name = z_template.getinfo("GRR.exe")
bin_dat = cStringIO.StringIO()
bin_dat.write(z_template.read(bin_name))
# Set output to console on binary if needed.
SetPeSubsystem(bin_dat, console=config_lib.CONFIG.Get(
"ClientBuilder.console", context=context))
output_zip.writestr(client_bin_name, bin_dat.getvalue())
CopyFileInZip(z_template, "%s.manifest" % bin_name.filename, output_zip,
"%s.manifest" % client_bin_name)
completed_files.append(bin_name.filename)
completed_files.append("%s.manifest" % bin_name.filename)
# Change the name of the service binary to the configured name.
service_bin_name = config_lib.CONFIG.Get("Nanny.service_binary_name",
context=context)
try:
bin_name = z_template.getinfo(service_bin_name)
except KeyError:
bin_name = z_template.getinfo("GRRservice.exe")
# Set output to console on service binary if needed.
service_bin_dat = cStringIO.StringIO()
service_bin_dat.write(z_template.read(bin_name))
SetPeSubsystem(service_bin_dat,
console=config_lib.CONFIG.Get("ClientBuilder.console",
context=context))
output_zip.writestr(bin_name, service_bin_dat.getvalue())
completed_files.append(bin_name.filename)
# Copy the rest of the files from the template to the new zip.
for template_file in z_template.namelist():
if template_file not in completed_files:
CopyFileInZip(z_template, template_file, output_zip)
# The zip file comment is used by the self extractor to run
# the installation script
output_zip.comment = "$AUTORUN$>%s" % config_lib.CONFIG.Get(
"ClientBuilder.autorun_command_line", context=context)
# Add any additional plugins to the deployment binary.
plugins = (config_lib.CONFIG.Get(
"ClientBuilder.plugins", context=context) +
config_lib.CONFIG.Get(
"ClientBuilder.installer_plugins", context=context))
for plugin in plugins:
output_zip.writestr(os.path.basename(plugin),
open(plugin, "rb").read(), zipfile.ZIP_STORED)
output_zip.writestr(
config_lib.CONFIG.Get(
"ClientBuilder.config_filename", context=context),
client_config_content, compress_type=zipfile.ZIP_STORED)
output_zip.close()
self.EnsureDirExists(os.path.dirname(output_path))
with open(output_path, "wb") as fd:
# First write the installer stub
stub_data = cStringIO.StringIO()
unzipsfx_stub = config_lib.CONFIG.Get("ClientBuilder.unzipsfx_stub",
context=context)
stub_raw = open(unzipsfx_stub, "rb").read()
# Check stub has been compiled with the requireAdministrator manifest.
if "level=\"requireAdministrator" not in stub_raw:
raise RuntimeError("Bad unzip binary in use. Not compiled with the"
"requireAdministrator manifest option.")
stub_data.write(stub_raw)
# If in verbose mode, modify the unzip bins PE header to run in console
# mode for easier debugging.
SetPeSubsystem(
stub_data,
console=config_lib.CONFIG.Get(
"ClientBuilder.console", context=context))
# Now patch up the .rsrc section to contain the payload.
end_of_file = zip_data.tell() + stub_data.tell()
# This is the IMAGE_SECTION_HEADER.Name which is also the start of
# IMAGE_SECTION_HEADER.
offset_to_rsrc = stub_data.getvalue().find(".rsrc")
# IMAGE_SECTION_HEADER.PointerToRawData is a 32 bit int.
stub_data.seek(offset_to_rsrc + 20)
start_of_rsrc_section = struct.unpack("<I", stub_data.read(4))[0]
# Adjust IMAGE_SECTION_HEADER.SizeOfRawData to span from the old start to
# the end of file.
stub_data.seek(offset_to_rsrc + 16)
stub_data.write(struct.pack("<I", end_of_file - start_of_rsrc_section))
# Now write the file out. Stub data first.
fd.write(stub_data.getvalue())
# Then append the payload zip file.
fd.write(zip_data.getvalue())
logging.info("Deployable binary generated at %s", output_path)
return output_path
class DarwinClientBuilder(ClientBuilder):
"""Builder class for the Mac OS X (Darwin) client."""
def __init__(self, context=None):
"""Initialize the Mac OS X client builder."""
super(DarwinClientBuilder, self).__init__(context=context)
self.context.append("Target:Darwin")
def MakeExecutableTemplate(self):
"""Create the executable template.
This technique allows the client build to be carried out once on the
supported platform (e.g. windows with MSVS), but the deployable installer
can be build on any platform which supports python.
"""
self.MakeBuildDirectory()
self.BuildWithPyInstaller()
self.BuildInstallerPkg()
# WARNING: change with care since the PackageMaker files are fragile!
def BuildInstallerPkg(self):
"""Builds a package (.pkg) using PackageMaker."""
build_files_dir = os.path.join(self.src_dir, "config", "macosx", "client")
pmdoc_dir = os.path.join(build_files_dir, "grr.pmdoc")
plist_dir = config_lib.CONFIG.Get(
"ClientBuildDarwin.plist_binary_directory", context=self.context)
plist_name = config_lib.CONFIG.Get("ClientBuildDarwin.plist_filename",
context=self.context)
out_build_files_dir = build_files_dir.replace(self.src_dir, self.build_dir)
out_pmdoc_dir = os.path.join(self.build_dir, "%s.pmdoc" % plist_dir)
self.EnsureDirExists(out_build_files_dir)
self.EnsureDirExists(out_pmdoc_dir)
self.EnsureDirExists(config_lib.CONFIG.Get("ClientBuildDarwin.package_dir",
context=self.context))
self.GenerateFile(
input_filename=os.path.join(build_files_dir, "grr.plist.in"),
output_filename=os.path.join(self.build_dir, plist_name))
self.GenerateFile(
input_filename=os.path.join(pmdoc_dir, "index.xml.in"),
output_filename=os.path.join(out_pmdoc_dir, "index.xml"))
self.GenerateFile(
input_filename=os.path.join(pmdoc_dir, "01grr.xml.in"),
output_filename=os.path.join(out_pmdoc_dir, "01%s.xml" % plist_dir))
self.GenerateFile(
input_filename=os.path.join(pmdoc_dir, "01grr-contents.xml"),
output_filename=os.path.join(out_pmdoc_dir,
"01%s-contents.xml" % plist_dir))
self.GenerateFile(
input_filename=os.path.join(pmdoc_dir, "02com.xml.in"),
output_filename=os.path.join(out_pmdoc_dir, "02com.xml"))
self.GenerateFile(
input_filename=os.path.join(pmdoc_dir, "02com-contents.xml"),
output_filename=os.path.join(out_pmdoc_dir, "02com-contents.xml"))
self.GenerateFile(
input_filename=os.path.join(build_files_dir, "preinstall.sh.in"),
output_filename=os.path.join(self.build_dir, "preinstall.sh"))
self.GenerateFile(
input_filename=os.path.join(build_files_dir, "postinstall.sh.in"),
output_filename=os.path.join(self.build_dir, "postinstall.sh"))
# Generate a config file.
with open(os.path.join(
config_lib.CONFIG.Get("PyInstaller.build_dir", context=self.context),
config_lib.CONFIG.Get("PyInstaller.output_basename",
context=self.context),
config_lib.CONFIG.Get("PyInstaller.config_filename",
context=self.context)), "wb") as fd:
fd.write(self.GetClientConfig(("Client Context", "Platform:Darwin")))
print "Fixing file ownership and permissions"
command = ["sudo", "chown", "-R", "root:wheel", self.build_dir]
# Change the owner, group and permissions of the binaries
print "Running: %s" % " ".join(command)
subprocess.call(command)
command = ["sudo", "chmod", "-R", "755", self.build_dir]
print "Running: %s" % " ".join(command)
subprocess.call(command)
pkg = "%s-%s.pkg" % (
config_lib.CONFIG.Get("ClientBuildDarwin.package_maker_name",
context=self.context),
config_lib.CONFIG.Get("Client.version_string", context=self.context))
command = [
config_lib.CONFIG.Get("ClientBuildDarwin.package_maker_path",
context=self.context),
"--doc", out_pmdoc_dir, "--out",
os.path.join(config_lib.CONFIG.Get("ClientBuildDarwin.package_dir",
context=self.context), pkg)]
subprocess.call(command)
def MakeDeployableBinary(self, template_path, output_path=None):
"""This will add the config to the client template."""
if output_path is None:
output_path = config_lib.CONFIG.Get("ClientBuilder.output_path",
context=self.context)
context = self.context + ["Client Context"]
self.EnsureDirExists(os.path.dirname(output_path))
client_config_data = self.GetClientConfig(context)
shutil.copyfile(template_path, output_path)
zip_file = zipfile.ZipFile(output_path, mode="a")
zip_info = zipfile.ZipInfo(filename="config.txt")
zip_file.writestr(zip_info, client_config_data)
zip_file.close()
return output_path
class LinuxClientBuilder(ClientBuilder):
"""Builder class for the Linux client."""
def __init__(self, context=None):
super(LinuxClientBuilder, self).__init__(context=context)
self.context.append("Target:Linux")
def MakeExecutableTemplate(self):
self.MakeBuildDirectory()
self.CleanDirectory(config_lib.CONFIG.Get("PyInstaller.dpkg_root",
context=self.context))
self.BuildWithPyInstaller()
self.BuildInstallerPkg()
def BuildInstallerPkg(self):
"""This builds the .deb package."""
dpkg_dir = config_lib.CONFIG.Get("PyInstaller.dpkg_root",
context=self.context)
src_dir = config_lib.CONFIG.Get("PyInstaller.build_root_dir",
context=self.context)
# Copy files needed for dpkg-buildpackage.
self.GenerateDirectory(
os.path.join(src_dir, "grr/config/debian/dpkg_client/"),
dpkg_dir,
[("grr-client", config_lib.CONFIG.Get("ClientBuilder.package_name",
context=self.context))])
# Generate directories for the /usr/sbin link.
self.EnsureDirExists(os.path.join(
dpkg_dir, "debian/%s/usr/sbin" %
config_lib.CONFIG.Get("ClientBuilder.package_name",
context=self.context)))
# Generate the upstart template.
self.GenerateFile(
os.path.join(src_dir, "grr/config/debian/upstart/grr-client.conf"),
os.path.join(dpkg_dir, "debian/%s.upstart" %
config_lib.CONFIG.Get("ClientBuilder.package_name",
context=self.context)))
# Now zip up the template.
template_path = config_lib.CONFIG.Get("ClientBuilder.template_path",
context=self.context)
self.EnsureDirExists(os.path.dirname(template_path))
zf = zipfile.ZipFile(template_path, "w")
oldwd = os.getcwd()
os.chdir(config_lib.CONFIG.Get("PyInstaller.dpkg_root",
context=self.context))
for root, _, files in os.walk("debian"):
for f in files:
zf.write(os.path.join(root, f))
zf.close()
os.chdir(oldwd)
def MakeDeployableBinary(self, template_path, output_path=None):
"""This will add the config to the client template and create a .deb."""
if output_path is None:
output_path = config_lib.CONFIG.Get("ClientBuilder.output_path",
context=self.context)
with utils.TempDirectory() as tmp_dir:
template_dir = os.path.join(tmp_dir, "dist")
self.EnsureDirExists(template_dir)
zf = zipfile.ZipFile(template_path)
for name in zf.namelist():
dirname = os.path.dirname(name)
self.EnsureDirExists(os.path.join(template_dir, dirname))
with open(os.path.join(template_dir, name), "wb") as fd:
fd.write(zf.read(name))
# Create a client config.
client_config_content = self.GetClientConfig(
("Client Context", "Platform:Linux"))
# We need to strip leading /'s or .join will ignore everything that comes
# before it.
target_dir = config_lib.CONFIG.Get("ClientBuilder.target_dir",
context=self.context).lstrip("/")
agent_dir = os.path.join(
template_dir, "debian",
config_lib.CONFIG.Get("ClientBuilder.package_name",
context=self.context),
target_dir)
with open(os.path.join(agent_dir,
config_lib.CONFIG.Get(
"ClientBuilder.config_filename",
context=self.context)),
"wb") as fd:
fd.write(client_config_content)
# Set the daemon to executable.
os.chmod(os.path.join(
agent_dir, config_lib.CONFIG.Get(
"Client.binary_name", context=self.context)),
0755)
buildpackage_binary = "/usr/bin/dpkg-buildpackage"
if not os.path.exists(buildpackage_binary):
print "dpkg-buildpackage not found, unable to repack client."
return
oldwd = os.getcwd()
os.chdir(template_dir)
command = [buildpackage_binary, "-b"]
subprocess.call(command)
os.chdir(oldwd)
filename_base = config_lib.CONFIG.Get("ClientBuilder.debian_package_base",
context=self.context)
package_name = "%s%s" % (
filename_base, config_lib.CONFIG.Get(
"ClientBuilder.output_extension",
context=self.context))
changes = "%s%s" % (filename_base, ".changes")
changes_output = "%s.changes" % (config_lib.CONFIG.Get(
"PyInstaller.output_basename", context=self.context))
self.EnsureDirExists(os.path.dirname(output_path))
shutil.move(os.path.join(tmp_dir, package_name), output_path)
shutil.move(os.path.join(tmp_dir, changes),
os.path.join(os.path.dirname(output_path), changes_output))
print "Created package %s" % output_path
return output_path
def CopyFileInZip(from_zip, from_name, to_zip, to_name=None):
"""Read a file from a ZipFile and write it to a new ZipFile."""
data = from_zip.read(from_name)
if to_name is None:
to_name = from_name
to_zip.writestr(to_name, data)
def SetPeSubsystem(fd, console=True):
"""Takes file like obj and returns (offset, value) for the PE subsystem."""
current_pos = fd.tell()
fd.seek(0x3c) # _IMAGE_DOS_HEADER.e_lfanew
header_offset = struct.unpack("<I", fd.read(4))[0]
# _IMAGE_NT_HEADERS.OptionalHeader.Subsystem ( 0x18 + 0x44)
subsystem_offset = header_offset + 0x5c
fd.seek(subsystem_offset)
if console:
fd.write("\x03")
else:
fd.write("\x02")
fd.seek(current_pos)
| apache-2.0 |
mchelem/cref2 | cref/app/terminal.py | 1 | 4737 | #!/usr/bin/env python
import os
import argparse
import logging
import importlib
import tempfile
import subprocess
import pandas
from Bio import SeqIO
from cref.app import BaseApp
from cref.libs import rcsb
logger = logging.getLogger('CReF')
class TerminalApp(BaseApp):
"""
App to be run on the terminal
"""
def reporter(self, state):
pass
def run_cref(aa_sequence, output_dir, params):
pandas.set_option('display.max_columns', 0)
pandas.set_option('display.max_rows', 5)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
app = TerminalApp(params)
return app.run(aa_sequence, output_dir)
def configure_logger(log_level='INFO', include_pathname=False):
logger = logging.getLogger('CReF')
level = getattr(logging, log_level.upper(), None)
if not isinstance(level, int):
raise ValueError('Invalid log level: %s' % log_level)
logger.propagate = False
logger = logging.getLogger('CReF')
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if include_pathname:
template = ('%(asctime)s - %(name)s - %(levelname)s'
'(%(pathname)s, %(lineno)d)- %(message)s')
else:
template = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(template, datefmt='%d/%m/%Y %I:%M:%S %p')
ch.setFormatter(formatter)
logger.addHandler(ch)
def parse_args():
parser = argparse.ArgumentParser(
description='CReF: Protein structure prediction')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
'--sequence', dest='sequence',
help='Aminoacid sequence using one letter code',
)
group.add_argument(
'--fasta', dest='fasta',
help='File containing the fasta sequence',
)
group.add_argument(
'--pdb', dest='pdb',
help='PDB Code from where the sequence will be extracted',
)
parser.add_argument(
'--config', dest='config',
help='File specifying the configurations'
)
parser.add_argument(
'--output', dest='output_dir',
default='predictions/tmp',
help='Directory to save the results'
)
parser.add_argument(
'--log', dest='log_level',
default='INFO',
help='Log level to be used (DEBUG, INFO, WARN, ERROR)'
)
parser.add_argument(
'--pymol', dest='pymol', action='store_true',
help='View prediction in PyMOL'
)
return parser.parse_args()
def read_fasta(filepath):
records = []
with open(filepath, 'rU') as fasta_file:
records = list(SeqIO.parse(fasta_file, 'fasta'))
return records
def predict_fasta(filepath, output_dir, params):
sequences = read_fasta(filepath)
output_filepaths = []
for sequence in sequences:
seq = str(sequence.seq).replace('X', '')
output_dir = os.path.join(output_dir, sequence.id.split(':')[0] + '/')
output = run_cref(seq, output_dir, params)
sequence_file = os.path.join(output_dir, 'sequence.txt')
with open(sequence_file, 'w') as sequence_output:
sequence_output.write(seq)
output_filepaths.append(output)
return output_filepaths
def read_config(module):
try:
config = importlib.import_module(module)
except Exception as e:
logger.error(e)
raise Exception('Invalid config file')
return config
def run_pymol(pdb_code, predicted_filepath):
filepath = os.path.join(
os.path.dirname(predicted_filepath),
'experimental_structure.pdb'
)
experimental_pdb = rcsb.download_pdb(pdb_code, filepath)
subprocess.call([
'pymol',
predicted_filepath,
experimental_pdb,
'-r',
'cref/utils/pymol.py'
])
def main():
params = {}
args = parse_args()
configure_logger(args.log_level)
if args.config:
config = read_config(args.config)
params = config.params
# Sequence input
if args.sequence:
run_cref(args.sequence, args.output_dir, params)
# Fasta file input
elif args.fasta:
predict_fasta(args.fasta, args.output_dir, params)
# PDB code input
elif args.pdb:
handler, fasta_file = tempfile.mkstemp(suffix='.fasta', prefix='tmp')
rcsb.download_fasta(args.pdb, fasta_file)
params['pdb'] = args.pdb
output_files = predict_fasta(fasta_file, args.output_dir, params)
os.remove(fasta_file)
if args.pymol:
run_pymol(args.pdb, output_files[0])
else:
raise ValueError('You must specify a sequence, fasta file or pdb code')
if __name__ == '__main__':
main()
| mit |
ikegwukc/INFO597-DeepLearning-GameTheory | basicGames/strategies/deep_q_learning.py | 1 | 18366 | import random
import math
import tensorflow as tf
import numpy as np
from collections import deque
class MLP(object):
def __init__(self, input_sizes, hiddens, nonlinearities, scope=None, given_layers=None):
self.input_sizes = input_sizes
self.hiddens = hiddens
self.input_nonlinearity, self.layer_nonlinearities = nonlinearities[0], nonlinearities[1:]
self.scope = scope or "MLP"
# print('MLP init scope', self.scope)
assert len(hiddens) == len(nonlinearities), \
"Number of hiddens must be equal to number of nonlinearities"
with tf.variable_scope(self.scope):
if given_layers is not None:
self.input_layer = given_layers[0]
self.layers = given_layers[1:]
else:
# print('var scope', self.scope)
self.input_layer = Layer(input_sizes, hiddens[0], scope=self.scope+"_input_layer")
self.layers = []
for l_idx, (h_from, h_to) in enumerate(zip(hiddens[:-1], hiddens[1:])):
self.layers.append(Layer(h_from, h_to, scope="hidden_layer_%d" % (l_idx,)))
def __call__(self, xs):
if type(xs) != list:
xs = [xs]
with tf.variable_scope(self.scope):
hidden = self.input_nonlinearity(self.input_layer(xs))
for layer, nonlinearity in zip(self.layers, self.layer_nonlinearities):
hidden = nonlinearity(layer(hidden))
return hidden
def variables(self):
res = self.input_layer.variables()
for layer in self.layers:
res.extend(layer.variables())
return res
def copy(self, scope=None):
scope = scope or self.scope + "_copy"
scope = self.scope + "_copy"
nonlinearities = [self.input_nonlinearity] + self.layer_nonlinearities
given_layers = [self.input_layer.copy()] + [layer.copy() for layer in self.layers]
return MLP(self.input_sizes, self.hiddens, nonlinearities, scope=scope,
given_layers=given_layers)
class TargetMLP(object):
def __init__(self, input_sizes, hiddens, nonlinearities, scope=None, given_layers=None):
self.input_sizes = input_sizes
self.hiddens = hiddens
self.input_nonlinearity, self.layer_nonlinearities = nonlinearities[0], nonlinearities[1:]
self.scope = scope or "MLP"
# print('MLP init scope', self.scope)
assert len(hiddens) == len(nonlinearities), \
"Number of hiddens must be equal to number of nonlinearities"
with tf.variable_scope(self.scope):
if given_layers is not None:
self.input_layer = given_layers[0]
self.layers = given_layers[1:]
else:
# print('var scope', self.scope)
self.input_layer = Layer(input_sizes, hiddens[0], scope=self.scope+"_input_layer_copy")
self.layers = []
for l_idx, (h_from, h_to) in enumerate(zip(hiddens[:-1], hiddens[1:])):
self.layers.append(Layer(h_from, h_to, scope="hidden_layer_%d_copy" % (l_idx,)))
def __call__(self, xs):
if type(xs) != list:
xs = [xs]
with tf.variable_scope(self.scope):
hidden = self.input_nonlinearity(self.input_layer(xs))
for layer, nonlinearity in zip(self.layers, self.layer_nonlinearities):
hidden = nonlinearity(layer(hidden))
return hidden
def variables(self):
res = self.input_layer.variables()
for layer in self.layers:
res.extend(layer.variables())
return res
def copy(self, scope=None):
scope = scope or self.scope + "_copy"
scope = self.scope + "_copy"
nonlinearities = [self.input_nonlinearity] + self.layer_nonlinearities
given_layers = [self.input_layer.copy()] + [layer.copy() for layer in self.layers]
return MLP(self.input_sizes, self.hiddens, nonlinearities, scope=scope,
given_layers=given_layers)
class DiscreteDeepQ(object):
def __init__(self, observation_size,
num_actions,
observation_to_actions,
target_actions,
optimizer,
session,
exploration_random_prob=1.0,
exploitation_random_prob=0.05,
exploration_period=1000,
store_every_nth=5,
train_every_nth=5,
minibatch_size=32,
discount_rate=0.95,
max_experience=30000,
target_network_update_rate=0.01,
summary_writer=None):
"""Warning, this class is a modification of:
https://github.com/nivwusquorum/tensorflow-deepq/blob/master/tf_rl/controller/discrete_deepq.py
Initialized the Deepq object.
Based on:
https://www.cs.toronto.edu/~vmnih/docs/dqn.pdf
Parameters
-------
observation_size : int
length of the vector passed as observation
num_actions : int
number of actions that the model can execute
observation_to_actions: dali model
model that implements activate function
that can take in observation vector or a batch
and returns scores (of unbounded values) for each
action for each observation.
input shape: [batch_size, observation_size]
output shape: [batch_size, num_actions]
optimizer: tf.solver.*
optimizer for prediction error
session: tf.Session
session on which to execute the computation
random_action_probability: float (0 to 1)
exploration_period: int
probability of choosing a random
action (epsilon form paper) annealed linearly
from 1 to random_action_probability over
exploration_period
store_every_nth: int
to further decorrelate samples do not all
transitions, but rather every nth transition.
For example if store_every_nth is 5, then
only 20% of all the transitions is stored.
train_every_nth: int
normally training_step is invoked every
time action is executed. Depending on the
setup that might be too often. When this
variable is set set to n, then only every
n-th time training_step is called will
the training procedure actually be executed.
minibatch_size: int
number of state,action,reward,newstate
tuples considered during experience reply
dicount_rate: float (0 to 1)
how much we care about future rewards.
max_experience: int
maximum size of the reply buffer
target_network_update_rate: float
how much to update target network after each
iteration. Let's call target_network_update_rate
alpha, target network T, and network N. Every
time N gets updated we execute:
T = (1-alpha)*T + alpha*N
summary_writer: tf.train.SummaryWriter
writer to log metrics
"""
# memorize arguments
self.observation_size = observation_size
self.num_actions = num_actions
self.q_network = observation_to_actions
self.target_q_network = target_actions
self.optimizer = optimizer
self.s = session
self.exploration_random_prob = exploration_random_prob
self.exploitation_random_prob = exploitation_random_prob
self.exploration_period = exploration_period
self.store_every_nth = store_every_nth
self.train_every_nth = train_every_nth
self.minibatch_size = minibatch_size
self.discount_rate = tf.constant(discount_rate)
self.max_experience = max_experience
self.target_network_update_rate = \
tf.constant(target_network_update_rate)
# deepq state
self.actions_executed_so_far = 0
self.experience = deque()
self.iteration = 0
self.summary_writer = summary_writer
self.number_of_times_store_called = 0
self.number_of_times_train_called = 0
self.create_variables()
def linear_annealing(self, n, total, p_initial, p_final):
"""Linear annealing between p_initial and p_final
over total steps - computes value at step n"""
if n >= total:
return p_final
else:
return p_initial - (n * (p_initial - p_final)) / (total)
def create_variables(self):
#self.target_q_network = self.q_network.copy() # scope="target_network"
# FOR REGULAR ACTION SCORE COMPUTATION
with tf.name_scope("taking_action"):
self.observation = tf.placeholder(tf.float32, (None, self.observation_size), name="observation")
self.action_scores = tf.identity(self.q_network(self.observation), name="action_scores")
tf.summary.histogram("action_scores", self.action_scores)
self.predicted_actions = tf.argmax(self.action_scores, dimension=1, name="predicted_actions")
with tf.name_scope("estimating_future_rewards"):
# FOR PREDICTING TARGET FUTURE REWARDS
self.next_observation = tf.placeholder(tf.float32, (None, self.observation_size), name="next_observation")
self.next_observation_mask = tf.placeholder(tf.float32, (None,), name="next_observation_mask")
self.next_action_scores = tf.stop_gradient(self.target_q_network(self.next_observation))
tf.summary.histogram("target_action_scores", self.next_action_scores)
self.rewards = tf.placeholder(tf.float32, (None,), name="rewards")
target_values = tf.reduce_max(self.next_action_scores, reduction_indices=[1,]) * self.next_observation_mask
self.future_rewards = self.rewards + self.discount_rate * target_values
with tf.name_scope("q_value_precition"):
# FOR PREDICTION ERROR
self.action_mask = tf.placeholder(tf.float32, (None, self.num_actions), name="action_mask")
self.masked_action_scores = tf.reduce_sum(self.action_scores * self.action_mask, reduction_indices=[1,])
temp_diff = self.masked_action_scores - self.future_rewards
self.prediction_error = tf.reduce_mean(tf.square(temp_diff))
gradients = self.optimizer.compute_gradients(self.prediction_error)
for i, (grad, var) in enumerate(gradients):
if grad is not None:
gradients[i] = (tf.clip_by_norm(grad, 5), var)
# Add histograms for gradients.
for grad, var in gradients:
tf.summary.histogram(var.name, var)
if grad is not None:
tf.summary.histogram(var.name + '/gradients', grad)
self.train_op = self.optimizer.apply_gradients(gradients)
# UPDATE TARGET NETWORK
with tf.name_scope("target_network_update"):
self.target_network_update = []
for v_source, v_target in zip(self.q_network.variables(), self.target_q_network.variables()):
# this is equivalent to target = (1-alpha) * target + alpha * source
update_op = v_target.assign_sub(self.target_network_update_rate * (v_target - v_source))
self.target_network_update.append(update_op)
self.target_network_update = tf.group(*self.target_network_update)
# summaries
tf.summary.scalar("prediction_error", self.prediction_error)
self.summarize = tf.summary.merge_all()
self.no_op1 = tf.no_op()
def action(self, observation):
"""Given observation returns the action that should be chosen using
DeepQ learning strategy. Does not backprop."""
assert len(observation.shape) == 1, \
"Action is performed based on single observation."
self.actions_executed_so_far += 1
exploration_p = self.linear_annealing(self.actions_executed_so_far,
self.exploration_period,
self.exploration_random_prob,
self.exploitation_random_prob)
if random.random() < exploration_p:
return random.randint(0, self.num_actions - 1)
else:
# print('self.predicted_actions', self.predicted_actions)
# print('obs', observation[np.newaxis,:])
return self.s.run(self.predicted_actions, {self.observation: observation[np.newaxis,:]})[0]
def store(self, observation, action, reward, newobservation):
"""Store experience, where starting with observation and
execution action, we arrived at the newobservation and got thetarget_network_update
reward reward
If newstate is None, the state/action pair is assumed to be terminal
"""
if self.number_of_times_store_called % self.store_every_nth == 0:
self.experience.append((observation, action, reward, newobservation))
if len(self.experience) > self.max_experience:
self.experience.popleft()
self.number_of_times_store_called += 1
def training_step(self):
"""Pick a self.minibatch_size exeperiences from reply buffer
and backpropage the value function.
"""
if self.number_of_times_train_called % self.train_every_nth == 0:
if len(self.experience) < self.minibatch_size:
return
# sample experience.
samples = random.sample(range(len(self.experience)), self.minibatch_size)
samples = [self.experience[i] for i in samples]
# bach states
states = np.empty((len(samples), self.observation_size))
newstates = np.empty((len(samples), self.observation_size))
action_mask = np.zeros((len(samples), self.num_actions))
newstates_mask = np.empty((len(samples),))
rewards = np.empty((len(samples),))
#print(samples)
for i, (state, action, reward, newstate) in enumerate(samples):
states[i] = state
action_mask[i] = 0
#if action == 'defect': action = 0
#elif action == ''
action_mask[i][action] = 1
rewards[i] = reward
if newstate is not None:
newstates[i] = newstate
newstates_mask[i] = 1
else:
newstates[i] = 0
newstates_mask[i] = 0
calculate_summaries = self.iteration % 100 == 0 and \
self.summary_writer is not None
cost, _, summary_str = self.s.run([
self.prediction_error,
self.train_op,
self.summarize if calculate_summaries else self.no_op1,
], {
self.observation: states,
self.next_observation: newstates,
self.next_observation_mask: newstates_mask,
self.action_mask: action_mask,
self.rewards: rewards,
})
self.s.run(self.target_network_update)
if calculate_summaries:
self.summary_writer.add_summary(summary_str, self.iteration)
self.iteration += 1
self.number_of_times_train_called += 1
def linear_annealing(n, total=8000, p_initial=1.0, p_final=0.05):
"""Linear annealing between p_initial and p_final
over total steps - computes value at step n"""
if n >= total:
return p_final
else:
return p_initial - (n * (p_initial - p_final)) / (total)
def base_name(var):
"""Extracts value passed to name= when creating a variable"""
return var.name.split('/')[-1].split(':')[0]
class Layer(object):
def __init__(self, input_sizes, output_size, scope):
"""Cretes a neural network layer."""
if type(input_sizes) != list:
input_sizes = [input_sizes]
self.input_sizes = input_sizes
self.output_size = output_size
self.scope = scope or "Layer"
# print('scope: ', self.scope)
with tf.variable_scope(self.scope):
self.Ws = []
for input_idx, input_size in enumerate(input_sizes):
W_name = "W_%d" % (input_idx,)
# print(W_name)
W_initializer = tf.random_uniform_initializer(
-1.0 / math.sqrt(input_size), 1.0 / math.sqrt(input_size))
W_var = tf.get_variable(W_name, (input_size, output_size), initializer=W_initializer)
self.Ws.append(W_var)
self.b = tf.get_variable("b", (output_size,), initializer=tf.constant_initializer(0))
def __call__(self, xs):
if type(xs) != list:
xs = [xs]
assert len(xs) == len(self.Ws), \
"Expected %d input vectors, got %d" % (len(self.Ws), len(xs))
with tf.variable_scope(self.scope):
return sum([tf.matmul(x, W) for x, W in zip(xs, self.Ws)]) + self.b
def variables(self):
return [self.b] + self.Ws
def copy(self, scope=None):
scope = scope or self.scope + "_copy"
with tf.variable_scope(scope) as sc:
for v in self.variables():
tf.get_variable(base_name(v), v.get_shape(),
initializer=lambda x,dtype=tf.float32: v.initialized_value())
sc.reuse_variables()
return Layer(self.input_sizes, self.output_size, scope=sc)
| mit |
Juniper/tempest | tempest/lib/services/identity/v2/roles_client.py | 2 | 4575 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.lib.common import rest_client
class RolesClient(rest_client.RestClient):
api_version = "v2.0"
def create_role(self, **kwargs):
"""Create a role.
For a full list of available parameters, please refer to the official
API reference:
https://developer.openstack.org/api-ref/identity/v2-ext/index.html#create-a-role
"""
post_body = json.dumps({'role': kwargs})
resp, body = self.post('OS-KSADM/roles', post_body)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def show_role(self, role_id_or_name):
"""Get a role by its id or name.
For a full list of available parameters, please refer to the official
API reference:
https://developer.openstack.org/api-ref/identity/v2-ext/index.html#show-a-role
OR
https://developer.openstack.org/api-ref/identity/v2-ext/index.html#show-role-information-by-name
"""
resp, body = self.get('OS-KSADM/roles/%s' % role_id_or_name)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def list_roles(self, **params):
"""Returns roles.
For a full list of available parameters, please refer to the official
API reference:
https://developer.openstack.org/api-ref/identity/v2-ext/index.html#list-all-roles
"""
url = 'OS-KSADM/roles'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def delete_role(self, role_id):
"""Delete a role.
For a full list of available parameters, please refer to the official
API reference:
https://developer.openstack.org/api-ref/identity/v2-ext/index.html#delete-a-role
"""
resp, body = self.delete('OS-KSADM/roles/%s' % role_id)
self.expected_success(204, resp.status)
return rest_client.ResponseBody(resp, body)
def create_user_role_on_project(self, tenant_id, user_id, role_id):
"""Add roles to a user on a tenant.
For a full list of available parameters, please refer to the official
API reference:
https://developer.openstack.org/api-ref/identity/v2-ext/index.html#grant-roles-to-user-on-tenant
"""
resp, body = self.put('/tenants/%s/users/%s/roles/OS-KSADM/%s' %
(tenant_id, user_id, role_id), "")
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def list_user_roles_on_project(self, tenant_id, user_id, **params):
"""Returns a list of roles assigned to a user for a tenant."""
# TODO(gmann): Need to write API-ref link, Bug# 1592711
url = '/tenants/%s/users/%s/roles' % (tenant_id, user_id)
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def delete_role_from_user_on_project(self, tenant_id, user_id, role_id):
"""Removes a role assignment for a user on a tenant.
For a full list of available parameters, please refer to the official
API reference:
https://developer.openstack.org/api-ref/identity/v2-ext/index.html#revoke-role-from-user-on-tenant
"""
resp, body = self.delete('/tenants/%s/users/%s/roles/OS-KSADM/%s' %
(tenant_id, user_id, role_id))
self.expected_success(204, resp.status)
return rest_client.ResponseBody(resp, body)
| apache-2.0 |
ScreamingUdder/mantid | Framework/PythonInterface/test/python/mantid/kernel/InstrumentInfoTest.py | 3 | 1738 | from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.kernel import InstrumentInfo, ConfigService
class InstrumentInfoTest(unittest.TestCase):
def _get_test_instrument(self):
facility = ConfigService.getFacility("ISIS")
return facility.instrument("CRISP")
def test_construction_raies_an_error(self):
self.assertRaises(RuntimeError, InstrumentInfo)
def test_instrument_attributes(self):
inst = self._get_test_instrument()
# Just testing functionality; values can be updated if needed
self.assertEquals(inst.name(), "CRISP")
self.assertEquals(inst.shortName(), "CSP")
self.assertEquals(str(inst), "CSP")
self.assertEquals(inst.zeroPadding(99777), 5)
self.assertEquals(inst.zeroPadding(99778), 8)
self.assertEquals(inst.filePrefix(99777), "CSP")
self.assertEquals(inst.filePrefix(99778), "CRISP")
self.assertEquals(inst.delimiter(), "")
self.assertEquals(str(inst.techniques()), "set('Reflectometry')")
self.assertEquals(inst.facility().name(), "ISIS")
self.assertEquals(inst.liveListener(), "ISISHistoDataListener")
self.assertEquals(inst.liveListener("histo"), "ISISHistoDataListener")
self.assertRaises(RuntimeError, inst.liveListener, "invalid_name")
self.assertEquals(inst.liveDataAddress(), "NDXCRISP:6789")
self.assertEquals(inst.liveDataAddress("histo"), "NDXCRISP:6789")
self.assertRaises(RuntimeError, inst.liveDataAddress, "invalid_name")
self.assertTrue(inst.hasLiveListenerInfo())
self.assertEqual(len(inst.liveListenerInfoList()), 1)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
ilius/hazm | data.py | 1 | 5329 | # coding: utf8
from __future__ import print_function, unicode_literals
import codecs, subprocess
from collections import Counter
from sklearn.cross_validation import train_test_split
from hazm import *
from hazm.Chunker import tree2brackets
def create_words_file(dic_file='resources/persian.dic', output='hazm/data/words.dat'):
""" prepares list of persian word words from [Virastyar](https://sourceforge.net/projects/virastyar/) dic file.
"""
dic_words = sorted([line.split('\t')[0] for line in codecs.open(dic_file, encoding='utf8')])
print(*dic_words, sep='\n', file=codecs.open(output, 'w', 'utf8'))
print(output, 'created')
def evaluate_lemmatizer(conll_file='resources/train.conll', peykare_root='corpora/peykare'):
lemmatizer = Lemmatizer()
errors = []
with codecs.open('resources/lemmatizer_errors.txt', 'w', 'utf8') as output:
dadegan = DadeganReader(conll_file)
for tree in dadegan.trees():
for node in tree.nodelist[1:]:
word, lemma, pos = node['word'], node['lemma'], node['mtag']
if lemmatizer.lemmatize(word, pos) != lemma:
errors.append((word, lemma, pos, lemmatizer.lemmatize(word, pos)))
print(len(errors), 'errors', file=output)
counter = Counter(errors)
for item, count in sorted(counter.items(), key=lambda t: t[1], reverse=True):
print(count, *item, file=output)
missed = []
with codecs.open('resources/lemmatizer_missed.txt', 'w', 'utf8') as output:
peykare = PeykareReader(peykare_root)
for sentence in peykare.sents():
for word in sentence:
if word[1] == 'V':
if word[0] == lemmatizer.lemmatize(word[0]):
missed.append(word[0])
print(len(missed), 'missed', file=output)
counter = Counter(missed)
for item, count in sorted(counter.items(), key=lambda t: t[1], reverse=True):
print(count, item, file=output)
def evaluate_chunker(treebank_root='corpora/treebank'):
treebank = TreebankReader(treebank_root, join_clitics=True, join_verb_parts=True)
chunker = Chunker()
chunked_trees = list(treebank.chunked_trees())
print(chunker.evaluate(chunked_trees))
output = codecs.open('resources/chunker_errors.txt', 'w', 'utf8')
for sentence, gold in zip(treebank.sents(), chunked_trees):
chunked = chunker.parse(sentence)
if chunked != gold:
print(tree2brackets(chunked), file=output)
print(tree2brackets(gold), file=output)
print(file=output)
def train_postagger(peykare_root='corpora/peykare', path_to_model='resources/persian.tagger', path_to_jar='resources/stanford-postagger.jar', properties_file='resources/stanford-postagger.props', memory_min='-Xms1g', memory_max='-Xmx6g', test_size=.1):
peykare = PeykareReader(peykare_root)
train_file = 'resources/tagger_train_data.txt'
train, test = train_test_split(list(peykare.sents()), test_size=float(test_size), random_state=0)
print('Peykare loaded.')
output = codecs.open(train_file, 'w', 'utf8')
for sentence in train:
print(*(map(lambda w: '/'.join(w).replace(' ', '_'), sentence)), file=output)
subprocess.Popen(['java', memory_min, memory_max, '-classpath', path_to_jar, 'edu.stanford.nlp.tagger.maxent.MaxentTagger', '-prop', properties_file, '-model', path_to_model, '-trainFile', train_file, '-tagSeparator', '/', '-search', 'owlqn2']).wait()
tagger = POSTagger()
print('Tagger Accuracy on Test Split:')
print(tagger.evaluate(test))
def train_maltparser(train_file='resources/train.conll', validation_file='resources/validation.conll', test_file='resources/test.conll', model_file='langModel.mco', path_to_jar='resources/malt.jar', options_file='resources/malt-options.xml', features_file='resources/malt-features.xml', memory_min='-Xms7g', memory_max='-Xmx8g'):
lemmatizer, tagger = Lemmatizer(), POSTagger()
train, validation, test = DadeganReader(train_file), DadeganReader(validation_file), DadeganReader(test_file)
train_sents = list(train.sents()) + list(validation.sents())
train_trees = list(train.trees()) + list(validation.trees())
train_data = train_file +'.data'
with codecs.open(train_data, 'w', 'utf8') as output:
for tree, sentence in zip(train_trees, tagger.tag_sents(train_sents)):
for i, (node, word) in enumerate(zip(tree.nodelist[1:], sentence), start=1):
node['tag'] = word[1]
node['lemma'] = lemmatizer.lemmatize(node['word'].replace('_', ' '), node['tag'])
print(i, node['word'].replace(' ', '_'), node['lemma'].replace(' ', '_'), node['tag'], node['tag'], '_', node['head'], node['rel'], '_', '_', sep='\t', file=output)
print(file=output)
subprocess.Popen(['java', memory_min, memory_max, '-jar', path_to_jar, '-w', 'resources', '-c', model_file, '-i', train_data, '-f', options_file, '-F', features_file, '-m', 'learn']).wait()
# evaluation
print('\nEvaluating trained model on test data:')
parser = DependencyParser(tagger=tagger, model_file=model_file)
tagged = tagger.tag_sents(test.sents())
parsed = parser.tagged_parse_sents(tagged)
test_data, test_results = test_file +'.data', test_file +'.results'
print('\n'.join([sentence.to_conll(10).replace('/', '') for sentence in test.trees()]).strip(), file=codecs.open(test_data, 'w', 'utf8'))
print('\n'.join([sentence.to_conll(10) for sentence in parsed]).strip(), file=codecs.open(test_results, 'w', 'utf8'))
subprocess.Popen(['java', '-jar', 'resources/MaltEval.jar', '-g', test_data, '-s', test_results]).wait()
| mit |
rodoviario/Tomb | extras/dismissed/qt/tombqt/ui_create.py | 10 | 22605 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'create.ui'
#
# Created: Thu Nov 3 00:16:51 2011
# by: PyQt4 UI code generator 4.8.6
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Wizard(object):
def setupUi(self, Wizard):
Wizard.setObjectName(_fromUtf8("Wizard"))
Wizard.resize(710, 368)
Wizard.setWindowTitle(QtGui.QApplication.translate("Wizard", "Wizard", None, QtGui.QApplication.UnicodeUTF8))
Wizard.setOptions(QtGui.QWizard.HaveHelpButton|QtGui.QWizard.IndependentPages)
self.wizardPage_intro = QtGui.QWizardPage()
self.wizardPage_intro.setTitle(QtGui.QApplication.translate("Wizard", "Tomb", None, QtGui.QApplication.UnicodeUTF8))
self.wizardPage_intro.setSubTitle(QtGui.QApplication.translate("Wizard", "tomb creation", None, QtGui.QApplication.UnicodeUTF8))
self.wizardPage_intro.setObjectName(_fromUtf8("wizardPage_intro"))
self.verticalLayout = QtGui.QVBoxLayout(self.wizardPage_intro)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(self.wizardPage_intro)
self.label.setText(QtGui.QApplication.translate("Wizard", "This wizard will guide you to the creation of a tomb.<br> It will be fun!", None, QtGui.QApplication.UnicodeUTF8))
self.label.setTextFormat(QtCore.Qt.RichText)
self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
Wizard.addPage(self.wizardPage_intro)
self.wizardPage_check = QtGui.QWizardPage()
self.wizardPage_check.setTitle(QtGui.QApplication.translate("Wizard", "Requirements check", None, QtGui.QApplication.UnicodeUTF8))
self.wizardPage_check.setObjectName(_fromUtf8("wizardPage_check"))
self.verticalLayout_12 = QtGui.QVBoxLayout(self.wizardPage_check)
self.verticalLayout_12.setObjectName(_fromUtf8("verticalLayout_12"))
self.label_check = QtGui.QLabel(self.wizardPage_check)
self.label_check.setText(QtGui.QApplication.translate("Wizard", "Checking...", None, QtGui.QApplication.UnicodeUTF8))
self.label_check.setObjectName(_fromUtf8("label_check"))
self.verticalLayout_12.addWidget(self.label_check)
self.groupBox_swap = QtGui.QGroupBox(self.wizardPage_check)
self.groupBox_swap.setEnabled(True)
self.groupBox_swap.setTitle(QtGui.QApplication.translate("Wizard", "Swap error", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_swap.setObjectName(_fromUtf8("groupBox_swap"))
self.verticalLayout_13 = QtGui.QVBoxLayout(self.groupBox_swap)
self.verticalLayout_13.setObjectName(_fromUtf8("verticalLayout_13"))
self.label_7 = QtGui.QLabel(self.groupBox_swap)
self.label_7.setText(QtGui.QApplication.translate("Wizard", "It seems that you have swap activated. It is very dangerous, since you could leave LOT of traces on your computer UNencrypted. You have some options:", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setWordWrap(True)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.verticalLayout_13.addWidget(self.label_7)
self.verticalLayout_swapoff = QtGui.QVBoxLayout()
self.verticalLayout_swapoff.setObjectName(_fromUtf8("verticalLayout_swapoff"))
self.radioButton_swapoff = QtGui.QRadioButton(self.groupBox_swap)
self.radioButton_swapoff.setText(QtGui.QApplication.translate("Wizard", "Swapoff", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_swapoff.setObjectName(_fromUtf8("radioButton_swapoff"))
self.verticalLayout_swapoff.addWidget(self.radioButton_swapoff)
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
spacerItem = QtGui.QSpacerItem(20, 20, QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem)
self.label_swapoff = QtGui.QLabel(self.groupBox_swap)
self.label_swapoff.setText(QtGui.QApplication.translate("Wizard", "Note: swapoff could take a long time, and, in case the memory is not enough, could even make your system crash. Your system seems to have %freeram%MB free", None, QtGui.QApplication.UnicodeUTF8))
self.label_swapoff.setWordWrap(True)
self.label_swapoff.setObjectName(_fromUtf8("label_swapoff"))
self.horizontalLayout_6.addWidget(self.label_swapoff)
self.verticalLayout_swapoff.addLayout(self.horizontalLayout_6)
self.verticalLayout_13.addLayout(self.verticalLayout_swapoff)
self.verticalLayout_11 = QtGui.QVBoxLayout()
self.verticalLayout_11.setObjectName(_fromUtf8("verticalLayout_11"))
self.radioButton_ignore = QtGui.QRadioButton(self.groupBox_swap)
self.radioButton_ignore.setText(QtGui.QApplication.translate("Wizard", "Ignore", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_ignore.setObjectName(_fromUtf8("radioButton_ignore"))
self.verticalLayout_11.addWidget(self.radioButton_ignore)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
spacerItem1 = QtGui.QSpacerItem(20, 20, QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_7.addItem(spacerItem1)
self.label_9 = QtGui.QLabel(self.groupBox_swap)
self.label_9.setText(QtGui.QApplication.translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'DejaVu Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">Note:</span> You should use this only if you are sure that your swap is encrypted, or that you are using compcache.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">If this is not the case, DON\'T select this, as it is <span style=\" font-weight:600;\">VERY DANGEROUS </span>to use encryption with swap activated</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setWordWrap(True)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.horizontalLayout_7.addWidget(self.label_9)
self.verticalLayout_11.addLayout(self.horizontalLayout_7)
self.verticalLayout_13.addLayout(self.verticalLayout_11)
self.verticalLayout_12.addWidget(self.groupBox_swap)
Wizard.addPage(self.wizardPage_check)
self.wizardPage_tomb_size = QtGui.QWizardPage()
self.wizardPage_tomb_size.setObjectName(_fromUtf8("wizardPage_tomb_size"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.wizardPage_tomb_size)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.label_2 = QtGui.QLabel(self.wizardPage_tomb_size)
self.label_2.setText(QtGui.QApplication.translate("Wizard", "Please enter tomb size. Digging the tomb will require some time: usually, one minute per GB, but your mileage may vary. <br>Keep in mind that resizing it in the future is still NOT implemented", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setWordWrap(True)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.verticalLayout_2.addWidget(self.label_2)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_3 = QtGui.QLabel(self.wizardPage_tomb_size)
self.label_3.setText(QtGui.QApplication.translate("Wizard", "Enter tomb size, in MB. 1GB=1000MB)", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout.addWidget(self.label_3)
self.spinBox_size = QtGui.QSpinBox(self.wizardPage_tomb_size)
self.spinBox_size.setMinimum(10)
self.spinBox_size.setMaximum(100000)
self.spinBox_size.setProperty("value", 100)
self.spinBox_size.setObjectName(_fromUtf8("spinBox_size"))
self.horizontalLayout.addWidget(self.spinBox_size)
self.verticalLayout_2.addLayout(self.horizontalLayout)
Wizard.addPage(self.wizardPage_tomb_size)
self.wizardPage_tomb_location = QtGui.QWizardPage()
self.wizardPage_tomb_location.setObjectName(_fromUtf8("wizardPage_tomb_location"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.wizardPage_tomb_location)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.label_4 = QtGui.QLabel(self.wizardPage_tomb_location)
self.label_4.setText(QtGui.QApplication.translate("Wizard", "Where do you want your tomb to be digged?", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setWordWrap(True)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.verticalLayout_3.addWidget(self.label_4)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.lineEdit_tombpath = QtGui.QLineEdit(self.wizardPage_tomb_location)
self.lineEdit_tombpath.setFrame(True)
self.lineEdit_tombpath.setPlaceholderText(QtGui.QApplication.translate("Wizard", "/path/to/file.tomb", None, QtGui.QApplication.UnicodeUTF8))
self.lineEdit_tombpath.setObjectName(_fromUtf8("lineEdit_tombpath"))
self.horizontalLayout_2.addWidget(self.lineEdit_tombpath)
self.button_tombpath = QtGui.QPushButton(self.wizardPage_tomb_location)
self.button_tombpath.setText(QtGui.QApplication.translate("Wizard", "Open file", None, QtGui.QApplication.UnicodeUTF8))
self.button_tombpath.setObjectName(_fromUtf8("button_tombpath"))
self.horizontalLayout_2.addWidget(self.button_tombpath)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
Wizard.addPage(self.wizardPage_tomb_location)
self.wizardPage_key_location = QtGui.QWizardPage()
self.wizardPage_key_location.setTitle(QtGui.QApplication.translate("Wizard", "Key creation", None, QtGui.QApplication.UnicodeUTF8))
self.wizardPage_key_location.setSubTitle(QtGui.QApplication.translate("Wizard", "Choose the location for your key", None, QtGui.QApplication.UnicodeUTF8))
self.wizardPage_key_location.setObjectName(_fromUtf8("wizardPage_key_location"))
self.verticalLayout_6 = QtGui.QVBoxLayout(self.wizardPage_key_location)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.label_5 = QtGui.QLabel(self.wizardPage_key_location)
self.label_5.setText(QtGui.QApplication.translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans Serif\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Now, you have to decide where to put the <span style=\" font-weight:600;\">key</span> for your tomb<br />You should not leave your key at the door, as this will lower security A LOT</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Also, the keyfile is very small (less than a KB), so disk space is not an issue</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setTextFormat(QtCore.Qt.RichText)
self.label_5.setWordWrap(True)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.verticalLayout_6.addWidget(self.label_5)
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.verticalLayout_4 = QtGui.QVBoxLayout()
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.radioButton_usb = QtGui.QRadioButton(self.wizardPage_key_location)
self.radioButton_usb.setEnabled(False)
self.radioButton_usb.setText(QtGui.QApplication.translate("Wizard", "On a USB pen (best security)", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_usb.setCheckable(True)
self.radioButton_usb.setChecked(False)
self.radioButton_usb.setObjectName(_fromUtf8("radioButton_usb"))
self.verticalLayout_4.addWidget(self.radioButton_usb)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem2)
self.label_6 = QtGui.QLabel(self.wizardPage_key_location)
self.label_6.setEnabled(False)
self.label_6.setText(QtGui.QApplication.translate("Wizard", "If you choose to do so, do not insert it NOW. Do it when you are asked to do so", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setObjectName(_fromUtf8("label_6"))
self.horizontalLayout_4.addWidget(self.label_6)
self.verticalLayout_4.addLayout(self.horizontalLayout_4)
self.verticalLayout_5.addLayout(self.verticalLayout_4)
self.radioButton_near = QtGui.QRadioButton(self.wizardPage_key_location)
self.radioButton_near.setToolTip(QtGui.QApplication.translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans Serif\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">It will be created as a regular file in the same directory of your tomb.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">It is much easier to use, but also much more <span style=\" font-style:italic;\">insecure</span>: all your security will be guaranteed by your <span style=\" font-weight:600;\">password</span>. If you really want to do this, choose a strong password (lot of random/non-dictionary words, spaces, numbers, odd characters)</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_near.setText(QtGui.QApplication.translate("Wizard", "Near to the tomb itself (this is BAD)", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_near.setChecked(True)
self.radioButton_near.setObjectName(_fromUtf8("radioButton_near"))
self.verticalLayout_5.addWidget(self.radioButton_near)
self.radioButton_custom = QtGui.QRadioButton(self.wizardPage_key_location)
self.radioButton_custom.setText(QtGui.QApplication.translate("Wizard", "Specify location", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_custom.setObjectName(_fromUtf8("radioButton_custom"))
self.verticalLayout_5.addWidget(self.radioButton_custom)
self.verticalLayout_6.addLayout(self.verticalLayout_5)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem3)
self.lineEdit_custom = QtGui.QLineEdit(self.wizardPage_key_location)
self.lineEdit_custom.setEnabled(False)
self.lineEdit_custom.setObjectName(_fromUtf8("lineEdit_custom"))
self.horizontalLayout_3.addWidget(self.lineEdit_custom)
self.pushButton_custom = QtGui.QPushButton(self.wizardPage_key_location)
self.pushButton_custom.setEnabled(False)
self.pushButton_custom.setText(QtGui.QApplication.translate("Wizard", "Choose location", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_custom.setObjectName(_fromUtf8("pushButton_custom"))
self.horizontalLayout_3.addWidget(self.pushButton_custom)
self.verticalLayout_6.addLayout(self.horizontalLayout_3)
self.label_11 = QtGui.QLabel(self.wizardPage_key_location)
self.label_11.setText(QtGui.QApplication.translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans Serif\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">Help: </span>the key file is very small, so disk usage is not an issue</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setWordWrap(True)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.verticalLayout_6.addWidget(self.label_11)
Wizard.addPage(self.wizardPage_key_location)
self.wizardPage_progress = QtGui.QWizardPage()
self.wizardPage_progress.setTitle(QtGui.QApplication.translate("Wizard", "Creating", None, QtGui.QApplication.UnicodeUTF8))
self.wizardPage_progress.setSubTitle(QtGui.QApplication.translate("Wizard", "Please wait", None, QtGui.QApplication.UnicodeUTF8))
self.wizardPage_progress.setObjectName(_fromUtf8("wizardPage_progress"))
self.verticalLayout_9 = QtGui.QVBoxLayout(self.wizardPage_progress)
self.verticalLayout_9.setObjectName(_fromUtf8("verticalLayout_9"))
self.verticalLayout_7 = QtGui.QVBoxLayout()
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.progressBar = QtGui.QProgressBar(self.wizardPage_progress)
self.progressBar.setProperty("value", 0)
self.progressBar.setTextVisible(True)
self.progressBar.setInvertedAppearance(False)
self.progressBar.setObjectName(_fromUtf8("progressBar"))
self.verticalLayout_7.addWidget(self.progressBar)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.label_progress = QtGui.QLabel(self.wizardPage_progress)
self.label_progress.setText(QtGui.QApplication.translate("Wizard", "Creating tomb, please wait...", None, QtGui.QApplication.UnicodeUTF8))
self.label_progress.setObjectName(_fromUtf8("label_progress"))
self.horizontalLayout_5.addWidget(self.label_progress)
self.verticalLayout_7.addLayout(self.horizontalLayout_5)
self.verticalLayout_9.addLayout(self.verticalLayout_7)
self.textBrowser_log = QtGui.QTextBrowser(self.wizardPage_progress)
self.textBrowser_log.setDocumentTitle(QtGui.QApplication.translate("Wizard", "Log", None, QtGui.QApplication.UnicodeUTF8))
self.textBrowser_log.setHtml(QtGui.QApplication.translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><title>Log</title><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'DejaVu Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Sans Serif\'; font-size:9pt; font-weight:600;\">Log</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Sans Serif\'; font-size:9pt; font-weight:600;\"></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.textBrowser_log.setObjectName(_fromUtf8("textBrowser_log"))
self.verticalLayout_9.addWidget(self.textBrowser_log)
Wizard.addPage(self.wizardPage_progress)
self.wizardPage_end = QtGui.QWizardPage()
self.wizardPage_end.setObjectName(_fromUtf8("wizardPage_end"))
self.verticalLayout_8 = QtGui.QVBoxLayout(self.wizardPage_end)
self.verticalLayout_8.setObjectName(_fromUtf8("verticalLayout_8"))
self.label_10 = QtGui.QLabel(self.wizardPage_end)
self.label_10.setText(QtGui.QApplication.translate("Wizard", "You successfully created the tomb!", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.verticalLayout_8.addWidget(self.label_10)
self.checkBox_open = QtGui.QCheckBox(self.wizardPage_end)
self.checkBox_open.setEnabled(False)
self.checkBox_open.setText(QtGui.QApplication.translate("Wizard", "Open the just-created tomb NOW!", None, QtGui.QApplication.UnicodeUTF8))
self.checkBox_open.setChecked(False)
self.checkBox_open.setTristate(False)
self.checkBox_open.setObjectName(_fromUtf8("checkBox_open"))
self.verticalLayout_8.addWidget(self.checkBox_open)
Wizard.addPage(self.wizardPage_end)
self.label_3.setBuddy(self.spinBox_size)
self.label_4.setBuddy(self.lineEdit_tombpath)
self.retranslateUi(Wizard)
QtCore.QObject.connect(self.radioButton_custom, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), self.lineEdit_custom.setEnabled)
QtCore.QObject.connect(self.radioButton_custom, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), self.pushButton_custom.setEnabled)
QtCore.QObject.connect(Wizard, QtCore.SIGNAL(_fromUtf8("currentIdChanged(int)")), self.label_11.hide)
QtCore.QObject.connect(Wizard, QtCore.SIGNAL(_fromUtf8("helpRequested()")), self.label_11.show)
QtCore.QMetaObject.connectSlotsByName(Wizard)
def retranslateUi(self, Wizard):
pass
| gpl-3.0 |
lihui7115/ChromiumGStreamerBackend | third_party/tlslite/tlslite/utils/rijndael.py | 205 | 11056 | # Authors:
# Bram Cohen
# Trevor Perrin - various changes
#
# See the LICENSE file for legal information regarding use of this file.
# Also see Bram Cohen's statement below
"""
A pure python (slow) implementation of rijndael with a decent interface
To include -
from rijndael import rijndael
To do a key setup -
r = rijndael(key, block_size = 16)
key must be a string of length 16, 24, or 32
blocksize must be 16, 24, or 32. Default is 16
To use -
ciphertext = r.encrypt(plaintext)
plaintext = r.decrypt(ciphertext)
If any strings are of the wrong length a ValueError is thrown
"""
# ported from the Java reference code by Bram Cohen, bram@gawth.com, April 2001
# this code is public domain, unless someone makes
# an intellectual property claim against the reference
# code, in which case it can be made public domain by
# deleting all the comments and renaming all the variables
import copy
import string
shifts = [[[0, 0], [1, 3], [2, 2], [3, 1]],
[[0, 0], [1, 5], [2, 4], [3, 3]],
[[0, 0], [1, 7], [3, 5], [4, 4]]]
# [keysize][block_size]
num_rounds = {16: {16: 10, 24: 12, 32: 14}, 24: {16: 12, 24: 12, 32: 14}, 32: {16: 14, 24: 14, 32: 14}}
A = [[1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 0, 0, 0, 1]]
# produce log and alog tables, needed for multiplying in the
# field GF(2^m) (generator = 3)
alog = [1]
for i in range(255):
j = (alog[-1] << 1) ^ alog[-1]
if j & 0x100 != 0:
j ^= 0x11B
alog.append(j)
log = [0] * 256
for i in range(1, 255):
log[alog[i]] = i
# multiply two elements of GF(2^m)
def mul(a, b):
if a == 0 or b == 0:
return 0
return alog[(log[a & 0xFF] + log[b & 0xFF]) % 255]
# substitution box based on F^{-1}(x)
box = [[0] * 8 for i in range(256)]
box[1][7] = 1
for i in range(2, 256):
j = alog[255 - log[i]]
for t in range(8):
box[i][t] = (j >> (7 - t)) & 0x01
B = [0, 1, 1, 0, 0, 0, 1, 1]
# affine transform: box[i] <- B + A*box[i]
cox = [[0] * 8 for i in range(256)]
for i in range(256):
for t in range(8):
cox[i][t] = B[t]
for j in range(8):
cox[i][t] ^= A[t][j] * box[i][j]
# S-boxes and inverse S-boxes
S = [0] * 256
Si = [0] * 256
for i in range(256):
S[i] = cox[i][0] << 7
for t in range(1, 8):
S[i] ^= cox[i][t] << (7-t)
Si[S[i] & 0xFF] = i
# T-boxes
G = [[2, 1, 1, 3],
[3, 2, 1, 1],
[1, 3, 2, 1],
[1, 1, 3, 2]]
AA = [[0] * 8 for i in range(4)]
for i in range(4):
for j in range(4):
AA[i][j] = G[i][j]
AA[i][i+4] = 1
for i in range(4):
pivot = AA[i][i]
if pivot == 0:
t = i + 1
while AA[t][i] == 0 and t < 4:
t += 1
assert t != 4, 'G matrix must be invertible'
for j in range(8):
AA[i][j], AA[t][j] = AA[t][j], AA[i][j]
pivot = AA[i][i]
for j in range(8):
if AA[i][j] != 0:
AA[i][j] = alog[(255 + log[AA[i][j] & 0xFF] - log[pivot & 0xFF]) % 255]
for t in range(4):
if i != t:
for j in range(i+1, 8):
AA[t][j] ^= mul(AA[i][j], AA[t][i])
AA[t][i] = 0
iG = [[0] * 4 for i in range(4)]
for i in range(4):
for j in range(4):
iG[i][j] = AA[i][j + 4]
def mul4(a, bs):
if a == 0:
return 0
r = 0
for b in bs:
r <<= 8
if b != 0:
r = r | mul(a, b)
return r
T1 = []
T2 = []
T3 = []
T4 = []
T5 = []
T6 = []
T7 = []
T8 = []
U1 = []
U2 = []
U3 = []
U4 = []
for t in range(256):
s = S[t]
T1.append(mul4(s, G[0]))
T2.append(mul4(s, G[1]))
T3.append(mul4(s, G[2]))
T4.append(mul4(s, G[3]))
s = Si[t]
T5.append(mul4(s, iG[0]))
T6.append(mul4(s, iG[1]))
T7.append(mul4(s, iG[2]))
T8.append(mul4(s, iG[3]))
U1.append(mul4(t, iG[0]))
U2.append(mul4(t, iG[1]))
U3.append(mul4(t, iG[2]))
U4.append(mul4(t, iG[3]))
# round constants
rcon = [1]
r = 1
for t in range(1, 30):
r = mul(2, r)
rcon.append(r)
del A
del AA
del pivot
del B
del G
del box
del log
del alog
del i
del j
del r
del s
del t
del mul
del mul4
del cox
del iG
class rijndael:
def __init__(self, key, block_size = 16):
if block_size != 16 and block_size != 24 and block_size != 32:
raise ValueError('Invalid block size: ' + str(block_size))
if len(key) != 16 and len(key) != 24 and len(key) != 32:
raise ValueError('Invalid key size: ' + str(len(key)))
self.block_size = block_size
ROUNDS = num_rounds[len(key)][block_size]
BC = block_size // 4
# encryption round keys
Ke = [[0] * BC for i in range(ROUNDS + 1)]
# decryption round keys
Kd = [[0] * BC for i in range(ROUNDS + 1)]
ROUND_KEY_COUNT = (ROUNDS + 1) * BC
KC = len(key) // 4
# copy user material bytes into temporary ints
tk = []
for i in range(0, KC):
tk.append((key[i * 4] << 24) | (key[i * 4 + 1] << 16) |
(key[i * 4 + 2] << 8) | key[i * 4 + 3])
# copy values into round key arrays
t = 0
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t // BC][t % BC] = tk[j]
Kd[ROUNDS - (t // BC)][t % BC] = tk[j]
j += 1
t += 1
tt = 0
rconpointer = 0
while t < ROUND_KEY_COUNT:
# extrapolate using phi (the round key evolution function)
tt = tk[KC - 1]
tk[0] ^= (S[(tt >> 16) & 0xFF] & 0xFF) << 24 ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 16 ^ \
(S[ tt & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) ^ \
(rcon[rconpointer] & 0xFF) << 24
rconpointer += 1
if KC != 8:
for i in range(1, KC):
tk[i] ^= tk[i-1]
else:
for i in range(1, KC // 2):
tk[i] ^= tk[i-1]
tt = tk[KC // 2 - 1]
tk[KC // 2] ^= (S[ tt & 0xFF] & 0xFF) ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 16) & 0xFF] & 0xFF) << 16 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) << 24
for i in range(KC // 2 + 1, KC):
tk[i] ^= tk[i-1]
# copy values into round key arrays
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t // BC][t % BC] = tk[j]
Kd[ROUNDS - (t // BC)][t % BC] = tk[j]
j += 1
t += 1
# inverse MixColumn where needed
for r in range(1, ROUNDS):
for j in range(BC):
tt = Kd[r][j]
Kd[r][j] = U1[(tt >> 24) & 0xFF] ^ \
U2[(tt >> 16) & 0xFF] ^ \
U3[(tt >> 8) & 0xFF] ^ \
U4[ tt & 0xFF]
self.Ke = Ke
self.Kd = Kd
def encrypt(self, plaintext):
if len(plaintext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
Ke = self.Ke
BC = self.block_size // 4
ROUNDS = len(Ke) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][0]
s2 = shifts[SC][2][0]
s3 = shifts[SC][3][0]
a = [0] * BC
# temporary work array
t = []
# plaintext to ints + key
for i in range(BC):
t.append((plaintext[i * 4 ] << 24 |
plaintext[i * 4 + 1] << 16 |
plaintext[i * 4 + 2] << 8 |
plaintext[i * 4 + 3] ) ^ Ke[0][i])
# apply round transforms
for r in range(1, ROUNDS):
for i in range(BC):
a[i] = (T1[(t[ i ] >> 24) & 0xFF] ^
T2[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T3[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T4[ t[(i + s3) % BC] & 0xFF] ) ^ Ke[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in range(BC):
tt = Ke[ROUNDS][i]
result.append((S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((S[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((S[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((S[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return bytearray(result)
def decrypt(self, ciphertext):
if len(ciphertext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
Kd = self.Kd
BC = self.block_size // 4
ROUNDS = len(Kd) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][1]
s2 = shifts[SC][2][1]
s3 = shifts[SC][3][1]
a = [0] * BC
# temporary work array
t = [0] * BC
# ciphertext to ints + key
for i in range(BC):
t[i] = (ciphertext[i * 4 ] << 24 |
ciphertext[i * 4 + 1] << 16 |
ciphertext[i * 4 + 2] << 8 |
ciphertext[i * 4 + 3] ) ^ Kd[0][i]
# apply round transforms
for r in range(1, ROUNDS):
for i in range(BC):
a[i] = (T5[(t[ i ] >> 24) & 0xFF] ^
T6[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T7[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T8[ t[(i + s3) % BC] & 0xFF] ) ^ Kd[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in range(BC):
tt = Kd[ROUNDS][i]
result.append((Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((Si[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((Si[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((Si[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return bytearray(result)
def encrypt(key, block):
return rijndael(key, len(block)).encrypt(block)
def decrypt(key, block):
return rijndael(key, len(block)).decrypt(block)
def test():
def t(kl, bl):
b = 'b' * bl
r = rijndael('a' * kl, bl)
assert r.decrypt(r.encrypt(b)) == b
t(16, 16)
t(16, 24)
t(16, 32)
t(24, 16)
t(24, 24)
t(24, 32)
t(32, 16)
t(32, 24)
t(32, 32)
| bsd-3-clause |
waseem18/oh-mainline | vendor/packages/html5lib/html5lib/tests/mockParser.py | 74 | 1164 | import sys
import os
if __name__ == '__main__':
#Allow us to import from the src directory
os.chdir(os.path.split(os.path.abspath(__file__))[0])
sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, "src")))
from tokenizer import HTMLTokenizer
class HTMLParser(object):
""" Fake parser to test tokenizer output """
def parse(self, stream, output=True):
tokenizer = HTMLTokenizer(stream)
for token in tokenizer:
if output:
print token
if __name__ == "__main__":
x = HTMLParser()
if len(sys.argv) > 1:
if len(sys.argv) > 2:
import hotshot, hotshot.stats
prof = hotshot.Profile('stats.prof')
prof.runcall(x.parse, sys.argv[1], False)
prof.close()
stats = hotshot.stats.load('stats.prof')
stats.strip_dirs()
stats.sort_stats('time')
stats.print_stats()
else:
x.parse(sys.argv[1])
else:
print """Usage: python mockParser.py filename [stats]
If stats is specified the hotshots profiler will run and output the
stats instead.
"""
| agpl-3.0 |
pdellaert/ansible | test/units/modules/network/check_point/test_cp_mgmt_group.py | 19 | 3888 | # Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_group
OBJECT = {
"name": "New Group 5",
"members": [
"New Host 1",
"My Test Host 3"
]
}
CREATE_PAYLOAD = {
"name": "New Group 5",
"members": [
"New Host 1",
"My Test Host 3"
]
}
UPDATE_PAYLOAD = {
"name": "New Group 5"
}
OBJECT_AFTER_UPDATE = UPDATE_PAYLOAD
DELETE_PAYLOAD = {
"name": "New Group 5",
"state": "absent"
}
function_path = 'ansible.modules.network.check_point.cp_mgmt_group.api_call'
api_call_object = 'group'
class TestCheckpointGroup(object):
module = cp_mgmt_group
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_create(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert result['changed']
assert OBJECT.items() == result[api_call_object].items()
def test_create_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert not result['changed']
def test_update(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert result['changed']
assert OBJECT_AFTER_UPDATE.items() == result[api_call_object].items()
def test_update_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert not result['changed']
def test_delete(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True}
result = self._run_module(DELETE_PAYLOAD)
assert result['changed']
def test_delete_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False}
result = self._run_module(DELETE_PAYLOAD)
assert not result['changed']
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
| gpl-3.0 |
mavenlin/tensorflow | tensorflow/python/kernel_tests/bcast_ops_test.py | 102 | 4542 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.kernels.bcast_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.gen_array_ops import _broadcast_args
from tensorflow.python.ops.gen_array_ops import _broadcast_gradient_args
from tensorflow.python.platform import test
class BcastOpsTest(test.TestCase):
def _GetBroadcastShape(self, xs, ys):
with self.test_session() as sess:
return sess.run(_broadcast_args(xs, ys))
def _GetGradientArgs(self, xs, ys):
with self.test_session() as sess:
return sess.run(_broadcast_gradient_args(xs, ys))
def testBasic(self):
r = self._GetBroadcastShape([2, 3, 5], [1])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([1], [2, 3, 5])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([2, 3, 5], [5])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([5], [2, 3, 5])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([2, 3, 5], [3, 5])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([3, 5], [2, 3, 5])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([2, 3, 5], [3, 1])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([3, 1], [2, 3, 5])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([2, 1, 5], [3, 1])
self.assertAllEqual(r, [2, 3, 5])
r = self._GetBroadcastShape([3, 1], [2, 1, 5])
self.assertAllEqual(r, [2, 3, 5])
def testBasicGradient(self):
r0, r1 = self._GetGradientArgs([2, 3, 5], [1])
self.assertAllEqual(r0, [])
self.assertAllEqual(r1, [0, 1, 2])
r0, r1 = self._GetGradientArgs([1], [2, 3, 5])
self.assertAllEqual(r0, [0, 1, 2])
self.assertAllEqual(r1, [])
r0, r1 = self._GetGradientArgs([2, 3, 5], [5])
self.assertAllEqual(r0, [])
self.assertAllEqual(r1, [0, 1])
r0, r1 = self._GetGradientArgs([5], [2, 3, 5])
self.assertAllEqual(r0, [0, 1])
self.assertAllEqual(r1, [])
r0, r1 = self._GetGradientArgs([2, 3, 5], [3, 5])
self.assertAllEqual(r0, [])
self.assertAllEqual(r1, [0])
r0, r1 = self._GetGradientArgs([3, 5], [2, 3, 5])
self.assertAllEqual(r0, [0])
self.assertAllEqual(r1, [])
r0, r1 = self._GetGradientArgs([2, 3, 5], [3, 1])
self.assertAllEqual(r0, [])
self.assertAllEqual(r1, [0, 2])
r0, r1 = self._GetGradientArgs([3, 1], [2, 3, 5])
self.assertAllEqual(r0, [0, 2])
self.assertAllEqual(r1, [])
r0, r1 = self._GetGradientArgs([2, 1, 5], [3, 1])
self.assertAllEqual(r0, [1])
self.assertAllEqual(r1, [0, 2])
r0, r1 = self._GetGradientArgs([3, 1], [2, 1, 5])
self.assertAllEqual(r0, [0, 2])
self.assertAllEqual(r1, [1])
def testZeroDims(self):
r = self._GetBroadcastShape([2, 0, 3, 0, 5], [3, 0, 5])
self.assertAllEqual(r, [2, 0, 3, 0, 5])
r = self._GetBroadcastShape([3, 0, 5], [2, 0, 3, 0, 5])
self.assertAllEqual(r, [2, 0, 3, 0, 5])
r = self._GetBroadcastShape([2, 0, 3, 0, 5], [3, 1, 5])
self.assertAllEqual(r, [2, 0, 3, 0, 5])
r = self._GetBroadcastShape([3, 1, 5], [2, 0, 3, 0, 5])
self.assertAllEqual(r, [2, 0, 3, 0, 5])
def testZeroDimsGradient(self):
r0, r1 = self._GetGradientArgs([2, 0, 3, 0, 5], [3, 0, 5])
self.assertAllEqual(r0, [])
self.assertAllEqual(r1, [0, 1])
r0, r1 = self._GetGradientArgs([3, 0, 5], [2, 0, 3, 0, 5])
self.assertAllEqual(r0, [0, 1])
self.assertAllEqual(r1, [])
r0, r1 = self._GetGradientArgs([2, 0, 3, 0, 5], [3, 1, 5])
self.assertAllEqual(r0, [])
self.assertAllEqual(r1, [0, 1, 3])
r0, r1 = self._GetGradientArgs([3, 1, 5], [2, 0, 3, 0, 5])
self.assertAllEqual(r0, [0, 1, 3])
self.assertAllEqual(r1, [])
if __name__ == "__main__":
test.main()
| apache-2.0 |
anntzer/numpy | numpy/ma/__init__.py | 17 | 1404 | """
=============
Masked Arrays
=============
Arrays sometimes contain invalid or missing data. When doing operations
on such arrays, we wish to suppress invalid values, which is the purpose masked
arrays fulfill (an example of typical use is given below).
For example, examine the following array:
>>> x = np.array([2, 1, 3, np.nan, 5, 2, 3, np.nan])
When we try to calculate the mean of the data, the result is undetermined:
>>> np.mean(x)
nan
The mean is calculated using roughly ``np.sum(x)/len(x)``, but since
any number added to ``NaN`` [1]_ produces ``NaN``, this doesn't work. Enter
masked arrays:
>>> m = np.ma.masked_array(x, np.isnan(x))
>>> m
masked_array(data = [2.0 1.0 3.0 -- 5.0 2.0 3.0 --],
mask = [False False False True False False False True],
fill_value=1e+20)
Here, we construct a masked array that suppress all ``NaN`` values. We
may now proceed to calculate the mean of the other values:
>>> np.mean(m)
2.6666666666666665
.. [1] Not-a-Number, a floating point value that is the result of an
invalid operation.
.. moduleauthor:: Pierre Gerard-Marchant
.. moduleauthor:: Jarrod Millman
"""
from . import core
from .core import *
from . import extras
from .extras import *
__all__ = ['core', 'extras']
__all__ += core.__all__
__all__ += extras.__all__
from numpy._pytesttester import PytestTester
test = PytestTester(__name__)
del PytestTester
| bsd-3-clause |
leighpauls/k2cro4 | tools/swarm_client/tests/isolateserver_archive_smoke_test.py | 1 | 1956 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
import tempfile
import time
import unittest
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Ensure that the testing machine has access to this server.
ISOLATE_SERVER = 'https://isolateserver.appspot.com/'
# The directory containing the test data files.
TEST_DATA_DIR = os.path.join(ROOT_DIR, 'tests', 'isolateserver_archive')
# Some basic binary data stored as a byte string.
BINARY_DATA = (chr(0) + chr(57) + chr(128) + chr(255)) * 2
class IsolateServerArchiveSmokeTest(unittest.TestCase):
def setUp(self):
self.namespace = 'temporary' + str(long(time.time())).split('.', 1)[0]
def _archive_given_files(self, files):
"""Given a list of files, call isolateserver_archive.py with them."""
args = [
sys.executable,
os.path.join(ROOT_DIR, 'isolateserver_archive.py'),
'--outdir', ISOLATE_SERVER,
'--namespace', self.namespace
]
args.extend(os.path.join(TEST_DATA_DIR, filename) for filename in files)
process = subprocess.Popen(args)
process.wait()
return process.returncode
def test_archive_empty_file(self):
self.assertEqual(0, self._archive_given_files(['empty_file.txt']))
def test_archive_small_file(self):
self.assertEqual(0, self._archive_given_files(['small_file.txt']))
def test_archive_huge_file(self):
huge_file = None
try:
# Create a file over 2gbs.
huge_file = tempfile.NamedTemporaryFile(delete=False)
huge_file.write(BINARY_DATA * int(128 * 1024 * 1024 * 2.1))
huge_file.close()
self.assertEqual(0, self._archive_given_files([huge_file.name]))
finally:
if huge_file:
os.remove(huge_file.name)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
tangyiyong/odoo | addons/hw_posbox_homepage/__openerp__.py | 313 | 1691 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'PosBox Homepage',
'version': '1.0',
'category': 'Hardware Drivers',
'sequence': 6,
'website': 'https://www.odoo.com/page/point-of-sale',
'summary': 'A homepage for the PosBox',
'description': """
PosBox Homepage
===============
This module overrides openerp web interface to display a simple
Homepage that explains what's the posbox and show the status,
and where to find documentation.
If you activate this module, you won't be able to access the
regular openerp interface anymore.
""",
'author': 'OpenERP SA',
'depends': ['hw_proxy'],
'installable': False,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ewindisch/nova | nova/openstack/common/fixture/config.py | 69 | 3062 | #
# Copyright 2013 Mirantis, Inc.
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from oslo.config import cfg
import six
class Config(fixtures.Fixture):
"""Allows overriding configuration settings for the test.
`conf` will be reset on cleanup.
"""
def __init__(self, conf=cfg.CONF):
self.conf = conf
def setUp(self):
super(Config, self).setUp()
# NOTE(morganfainberg): unregister must be added to cleanup before
# reset is because cleanup works in reverse order of registered items,
# and a reset must occur before unregistering options can occur.
self.addCleanup(self._unregister_config_opts)
self.addCleanup(self.conf.reset)
self._registered_config_opts = {}
def config(self, **kw):
"""Override configuration values.
The keyword arguments are the names of configuration options to
override and their values.
If a `group` argument is supplied, the overrides are applied to
the specified configuration option group, otherwise the overrides
are applied to the ``default`` group.
"""
group = kw.pop('group', None)
for k, v in six.iteritems(kw):
self.conf.set_override(k, v, group)
def _unregister_config_opts(self):
for group in self._registered_config_opts:
self.conf.unregister_opts(self._registered_config_opts[group],
group=group)
def register_opt(self, opt, group=None):
"""Register a single option for the test run.
Options registered in this manner will automatically be unregistered
during cleanup.
If a `group` argument is supplied, it will register the new option
to that group, otherwise the option is registered to the ``default``
group.
"""
self.conf.register_opt(opt, group=group)
self._registered_config_opts.setdefault(group, set()).add(opt)
def register_opts(self, opts, group=None):
"""Register multiple options for the test run.
This works in the same manner as register_opt() but takes a list of
options as the first argument. All arguments will be registered to the
same group if the ``group`` argument is supplied, otherwise all options
will be registered to the ``default`` group.
"""
for opt in opts:
self.register_opt(opt, group=group)
| apache-2.0 |
an7oine/WinVHS | Cygwin/lib/python2.7/runpy.py | 123 | 10698 | """runpy.py - locating and running Python code using the module namespace
Provides support for locating and running Python scripts using the Python
module namespace instead of the native filesystem.
This allows Python code to play nicely with non-filesystem based PEP 302
importers when locating support scripts as well as when importing modules.
"""
# Written by Nick Coghlan <ncoghlan at gmail.com>
# to implement PEP 338 (Executing Modules as Scripts)
import sys
import imp
from pkgutil import read_code
try:
from imp import get_loader
except ImportError:
from pkgutil import get_loader
__all__ = [
"run_module", "run_path",
]
class _TempModule(object):
"""Temporarily replace a module in sys.modules with an empty namespace"""
def __init__(self, mod_name):
self.mod_name = mod_name
self.module = imp.new_module(mod_name)
self._saved_module = []
def __enter__(self):
mod_name = self.mod_name
try:
self._saved_module.append(sys.modules[mod_name])
except KeyError:
pass
sys.modules[mod_name] = self.module
return self
def __exit__(self, *args):
if self._saved_module:
sys.modules[self.mod_name] = self._saved_module[0]
else:
del sys.modules[self.mod_name]
self._saved_module = []
class _ModifiedArgv0(object):
def __init__(self, value):
self.value = value
self._saved_value = self._sentinel = object()
def __enter__(self):
if self._saved_value is not self._sentinel:
raise RuntimeError("Already preserving saved value")
self._saved_value = sys.argv[0]
sys.argv[0] = self.value
def __exit__(self, *args):
self.value = self._sentinel
sys.argv[0] = self._saved_value
def _run_code(code, run_globals, init_globals=None,
mod_name=None, mod_fname=None,
mod_loader=None, pkg_name=None):
"""Helper to run code in nominated namespace"""
if init_globals is not None:
run_globals.update(init_globals)
run_globals.update(__name__ = mod_name,
__file__ = mod_fname,
__loader__ = mod_loader,
__package__ = pkg_name)
exec code in run_globals
return run_globals
def _run_module_code(code, init_globals=None,
mod_name=None, mod_fname=None,
mod_loader=None, pkg_name=None):
"""Helper to run code in new namespace with sys modified"""
with _TempModule(mod_name) as temp_module, _ModifiedArgv0(mod_fname):
mod_globals = temp_module.module.__dict__
_run_code(code, mod_globals, init_globals,
mod_name, mod_fname, mod_loader, pkg_name)
# Copy the globals of the temporary module, as they
# may be cleared when the temporary module goes away
return mod_globals.copy()
# This helper is needed due to a missing component in the PEP 302
# loader protocol (specifically, "get_filename" is non-standard)
# Since we can't introduce new features in maintenance releases,
# support was added to zipimporter under the name '_get_filename'
def _get_filename(loader, mod_name):
for attr in ("get_filename", "_get_filename"):
meth = getattr(loader, attr, None)
if meth is not None:
return meth(mod_name)
return None
# Helper to get the loader, code and filename for a module
def _get_module_details(mod_name):
loader = get_loader(mod_name)
if loader is None:
raise ImportError("No module named %s" % mod_name)
if loader.is_package(mod_name):
if mod_name == "__main__" or mod_name.endswith(".__main__"):
raise ImportError("Cannot use package as __main__ module")
try:
pkg_main_name = mod_name + ".__main__"
return _get_module_details(pkg_main_name)
except ImportError, e:
raise ImportError(("%s; %r is a package and cannot " +
"be directly executed") %(e, mod_name))
code = loader.get_code(mod_name)
if code is None:
raise ImportError("No code object available for %s" % mod_name)
filename = _get_filename(loader, mod_name)
return mod_name, loader, code, filename
def _get_main_module_details():
# Helper that gives a nicer error message when attempting to
# execute a zipfile or directory by invoking __main__.py
main_name = "__main__"
try:
return _get_module_details(main_name)
except ImportError as exc:
if main_name in str(exc):
raise ImportError("can't find %r module in %r" %
(main_name, sys.path[0]))
raise
# This function is the actual implementation of the -m switch and direct
# execution of zipfiles and directories and is deliberately kept private.
# This avoids a repeat of the situation where run_module() no longer met the
# needs of mainmodule.c, but couldn't be changed because it was public
def _run_module_as_main(mod_name, alter_argv=True):
"""Runs the designated module in the __main__ namespace
Note that the executed module will have full access to the
__main__ namespace. If this is not desirable, the run_module()
function should be used to run the module code in a fresh namespace.
At the very least, these variables in __main__ will be overwritten:
__name__
__file__
__loader__
__package__
"""
try:
if alter_argv or mod_name != "__main__": # i.e. -m switch
mod_name, loader, code, fname = _get_module_details(mod_name)
else: # i.e. directory or zipfile execution
mod_name, loader, code, fname = _get_main_module_details()
except ImportError as exc:
msg = "%s: %s" % (sys.executable, str(exc))
sys.exit(msg)
pkg_name = mod_name.rpartition('.')[0]
main_globals = sys.modules["__main__"].__dict__
if alter_argv:
sys.argv[0] = fname
return _run_code(code, main_globals, None,
"__main__", fname, loader, pkg_name)
def run_module(mod_name, init_globals=None,
run_name=None, alter_sys=False):
"""Execute a module's code without importing it
Returns the resulting top level namespace dictionary
"""
mod_name, loader, code, fname = _get_module_details(mod_name)
if run_name is None:
run_name = mod_name
pkg_name = mod_name.rpartition('.')[0]
if alter_sys:
return _run_module_code(code, init_globals, run_name,
fname, loader, pkg_name)
else:
# Leave the sys module alone
return _run_code(code, {}, init_globals, run_name,
fname, loader, pkg_name)
# XXX (ncoghlan): Perhaps expose the C API function
# as imp.get_importer instead of reimplementing it in Python?
def _get_importer(path_name):
"""Python version of PyImport_GetImporter C API function"""
cache = sys.path_importer_cache
try:
importer = cache[path_name]
except KeyError:
# Not yet cached. Flag as using the
# standard machinery until we finish
# checking the hooks
cache[path_name] = None
for hook in sys.path_hooks:
try:
importer = hook(path_name)
break
except ImportError:
pass
else:
# The following check looks a bit odd. The trick is that
# NullImporter raises ImportError if the supplied path is a
# *valid* directory entry (and hence able to be handled
# by the standard import machinery)
try:
importer = imp.NullImporter(path_name)
except ImportError:
return None
cache[path_name] = importer
return importer
def _get_code_from_file(fname):
# Check for a compiled file first
with open(fname, "rb") as f:
code = read_code(f)
if code is None:
# That didn't work, so try it as normal source code
with open(fname, "rU") as f:
code = compile(f.read(), fname, 'exec')
return code
def run_path(path_name, init_globals=None, run_name=None):
"""Execute code located at the specified filesystem location
Returns the resulting top level namespace dictionary
The file path may refer directly to a Python script (i.e.
one that could be directly executed with execfile) or else
it may refer to a zipfile or directory containing a top
level __main__.py script.
"""
if run_name is None:
run_name = "<run_path>"
importer = _get_importer(path_name)
if isinstance(importer, imp.NullImporter):
# Not a valid sys.path entry, so run the code directly
# execfile() doesn't help as we want to allow compiled files
code = _get_code_from_file(path_name)
return _run_module_code(code, init_globals, run_name, path_name)
else:
# Importer is defined for path, so add it to
# the start of sys.path
sys.path.insert(0, path_name)
try:
# Here's where things are a little different from the run_module
# case. There, we only had to replace the module in sys while the
# code was running and doing so was somewhat optional. Here, we
# have no choice and we have to remove it even while we read the
# code. If we don't do this, a __loader__ attribute in the
# existing __main__ module may prevent location of the new module.
main_name = "__main__"
saved_main = sys.modules[main_name]
del sys.modules[main_name]
try:
mod_name, loader, code, fname = _get_main_module_details()
finally:
sys.modules[main_name] = saved_main
pkg_name = ""
with _TempModule(run_name) as temp_module, \
_ModifiedArgv0(path_name):
mod_globals = temp_module.module.__dict__
return _run_code(code, mod_globals, init_globals,
run_name, fname, loader, pkg_name).copy()
finally:
try:
sys.path.remove(path_name)
except ValueError:
pass
if __name__ == "__main__":
# Run the module specified as the next command line argument
if len(sys.argv) < 2:
print >> sys.stderr, "No module specified for execution"
else:
del sys.argv[0] # Make the requested module sys.argv[0]
_run_module_as_main(sys.argv[0])
| gpl-3.0 |
zilantian/p2pool | p2pool/util/p2protocol.py | 22 | 3674 | '''
Generic message-based protocol used by Bitcoin and P2Pool for P2P communication
'''
import hashlib
import struct
from twisted.internet import protocol
from twisted.python import log
import p2pool
from p2pool.util import datachunker, variable
class TooLong(Exception):
pass
class Protocol(protocol.Protocol):
def __init__(self, message_prefix, max_payload_length, traffic_happened=variable.Event()):
self._message_prefix = message_prefix
self._max_payload_length = max_payload_length
self.dataReceived2 = datachunker.DataChunker(self.dataReceiver())
self.traffic_happened = traffic_happened
def dataReceived(self, data):
self.traffic_happened.happened('p2p/in', len(data))
self.dataReceived2(data)
def dataReceiver(self):
while True:
start = ''
while start != self._message_prefix:
start = (start + (yield 1))[-len(self._message_prefix):]
command = (yield 12).rstrip('\0')
length, = struct.unpack('<I', (yield 4))
if length > self._max_payload_length:
print 'length too large'
continue
checksum = yield 4
payload = yield length
if hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] != checksum:
print 'invalid hash for', self.transport.getPeer().host, repr(command), length, checksum.encode('hex'), hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4].encode('hex'), payload.encode('hex')
self.badPeerHappened()
continue
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
if p2pool.DEBUG:
print 'no type for', repr(command)
continue
try:
self.packetReceived(command, type_.unpack(payload))
except:
print 'RECV', command, payload[:100].encode('hex') + ('...' if len(payload) > 100 else '')
log.err(None, 'Error handling message: (see RECV line)')
self.transport.loseConnection()
def packetReceived(self, command, payload2):
handler = getattr(self, 'handle_' + command, None)
if handler is None:
if p2pool.DEBUG:
print 'no handler for', repr(command)
return
if getattr(self, 'connected', True) and not getattr(self, 'disconnecting', False):
handler(**payload2)
def badPeerHappened(self):
self.transport.loseConnection()
def sendPacket(self, command, payload2):
if len(command) >= 12:
raise ValueError('command too long')
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
raise ValueError('invalid command')
#print 'SEND', command, repr(payload2)[:500]
payload = type_.pack(payload2)
if len(payload) > self._max_payload_length:
raise TooLong('payload too long')
data = self._message_prefix + struct.pack('<12sI', command, len(payload)) + hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] + payload
self.traffic_happened.happened('p2p/out', len(data))
self.transport.write(data)
def __getattr__(self, attr):
prefix = 'send_'
if attr.startswith(prefix):
command = attr[len(prefix):]
return lambda **payload2: self.sendPacket(command, payload2)
#return protocol.Protocol.__getattr__(self, attr)
raise AttributeError(attr)
| gpl-3.0 |
Maspear/odoo | addons/lunch/tests/test_lunch.py | 345 | 5045 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.tests import common
class Test_Lunch(common.TransactionCase):
def setUp(self):
"""*****setUp*****"""
super(Test_Lunch, self).setUp()
cr, uid = self.cr, self.uid
self.res_users = self.registry('res.users')
self.lunch_order = self.registry('lunch.order')
self.lunch_order_line = self.registry('lunch.order.line')
self.lunch_cashmove = self.registry('lunch.cashmove')
self.lunch_product = self.registry('lunch.product')
self.lunch_alert = self.registry('lunch.alert')
self.lunch_product_category = self.registry('lunch.product.category')
self.demo_id = self.res_users.search(cr, uid, [('name', '=', 'Demo User')])
self.product_bolognese_ref = self.registry('ir.model.data').get_object_reference(cr, uid, 'lunch', 'product_Bolognese')
self.product_Bolognese_id = self.product_bolognese_ref and self.product_bolognese_ref[1] or False
self.new_id_order = self.lunch_order.create(cr,uid,{
'user_id': self.demo_id[0],
'order_line_ids':'[]',
},context=None)
self.new_id_order_line = self.lunch_order_line.create(cr,uid,{
'order_id':self.new_id_order,
'product_id':self.product_Bolognese_id,
'note': '+Emmental',
'cashmove': [],
'price': self.lunch_product.browse(cr,uid,self.product_Bolognese_id,context=None).price,
})
def test_00_lunch_order(self):
"""Change the state of an order line from 'new' to 'ordered'. Check that there are no cashmove linked to that order line"""
cr, uid = self.cr, self.uid
self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None)
#we check that our order_line is a 'new' one and that there are no cashmove linked to that order_line:
self.assertEqual(self.order_one.state,'new')
self.assertEqual(list(self.order_one.cashmove), [])
#we order that orderline so it's state will be 'ordered'
self.order_one.order()
self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None)
#we check that our order_line is a 'ordered' one and that there are no cashmove linked to that order_line:
self.assertEqual(self.order_one.state,'ordered')
self.assertEqual(list(self.order_one.cashmove), [])
def test_01_lunch_order(self):
"""Change the state of an order line from 'new' to 'ordered' then to 'confirmed'. Check that there is a cashmove linked to the order line"""
cr, uid = self.cr, self.uid
self.test_00_lunch_order()
#We receive the order so we confirm the order line so it's state will be 'confirmed'
#A cashmove will be created and we will test that the cashmove amount equals the order line price
self.order_one.confirm()
self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None)
#we check that our order_line is a 'confirmed' one and that there are a cashmove linked to that order_line with an amount equals to the order line price:
self.assertEqual(self.order_one.state,'confirmed')
self.assertTrue(self.order_one.cashmove)
self.assertTrue(self.order_one.cashmove[0].amount==-self.order_one.price)
def test_02_lunch_order(self):
"""Change the state of an order line from 'confirmed' to 'cancelled' and check that the cashmove linked to that order line will be deleted"""
cr, uid = self.cr, self.uid
self.test_01_lunch_order()
#We have a confirmed order with its associate cashmove
#We execute the cancel function
self.order_one.cancel()
self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None)
#We check that the state is cancelled and that the cashmove has been deleted
self.assertEqual(self.order_one.state,'cancelled')
self.assertFalse(self.order_one.cashmove) | agpl-3.0 |
yippeecw/sfa | sfa/client/common.py | 2 | 3567 | # a few utilities common to sfi and sfaadmin
def optparse_listvalue_callback(option, opt, value, parser):
former=getattr(parser.values,option.dest)
if not former: former=[]
# support for using e.g. sfi update -t slice -x the.slice.hrn -r none
# instead of -r '' which is painful and does not pass well through ssh
if value.lower()=='none':
newvalue=former
else:
newvalue=former+value.split(',')
setattr(parser.values, option.dest, newvalue)
def optparse_dictvalue_callback (option, option_string, value, parser):
try:
(k,v)=value.split('=',1)
d=getattr(parser.values, option.dest)
d[k]=v
except:
parser.print_help()
sys.exit(1)
# a code fragment that could be helpful for argparse which unfortunately is
# available with 2.7 only, so this feels like too strong a requirement for the client side
#class ExtraArgAction (argparse.Action):
# def __call__ (self, parser, namespace, values, option_string=None):
# would need a try/except of course
# (k,v)=values.split('=')
# d=getattr(namespace,self.dest)
# d[k]=v
#####
#parser.add_argument ("-X","--extra",dest='extras', default={}, action=ExtraArgAction,
# help="set extra flags, testbed dependent, e.g. --extra enabled=true")
##############################
# these are not needed from the outside
def terminal_render_plural (how_many, name,names=None):
if not names: names="%ss"%name
if how_many<=0: return "No %s"%name
elif how_many==1: return "1 %s"%name
else: return "%d %s"%(how_many,names)
def terminal_render_default (record,options):
print "%s (%s)" % (record['hrn'], record['type'])
def terminal_render_user (record, options):
print "%s (User)"%record['hrn'],
if record.get('reg-pi-authorities',None): print " [PI at %s]"%(" and ".join(record['reg-pi-authorities'])),
if record.get('reg-slices',None): print " [IN slices %s]"%(" and ".join(record['reg-slices'])),
user_keys=record.get('reg-keys',[])
if not options.verbose:
print " [has %s]"%(terminal_render_plural(len(user_keys),"key"))
else:
print ""
for key in user_keys: print 8*' ',key.strip("\n")
def terminal_render_slice (record, options):
print "%s (Slice)"%record['hrn'],
if record.get('reg-researchers',None): print " [USERS %s]"%(" and ".join(record['reg-researchers'])),
# print record.keys()
print ""
def terminal_render_authority (record, options):
print "%s (Authority)"%record['hrn'],
if record.get('reg-pis',None): print " [PIS %s]"%(" and ".join(record['reg-pis'])),
print ""
def terminal_render_node (record, options):
print "%s (Node)"%record['hrn']
### used in sfi list
def terminal_render (records,options):
# sort records by type
grouped_by_type={}
for record in records:
type=record['type']
if type not in grouped_by_type: grouped_by_type[type]=[]
grouped_by_type[type].append(record)
group_types=grouped_by_type.keys()
group_types.sort()
for type in group_types:
group=grouped_by_type[type]
# print 20 * '-', type
try: renderer=eval('terminal_render_'+type)
except: renderer=terminal_render_default
for record in group: renderer(record,options)
####################
def filter_records(type, records):
filtered_records = []
for record in records:
if (record['type'] == type) or (type == "all"):
filtered_records.append(record)
return filtered_records
| mit |
davidovich/pip | pip/commands/list.py | 269 | 7251 | from __future__ import absolute_import
import logging
from pip._vendor import pkg_resources
from pip.basecommand import Command
from pip.exceptions import DistributionNotFound
from pip.index import FormatControl, fmt_ctl_formats, PackageFinder, Search
from pip.req import InstallRequirement
from pip.utils import get_installed_distributions, dist_is_editable
from pip.wheel import WheelCache
from pip.cmdoptions import make_option_group, index_group
logger = logging.getLogger(__name__)
class ListCommand(Command):
"""
List installed packages, including editables.
Packages are listed in a case-insensitive sorted order.
"""
name = 'list'
usage = """
%prog [options]"""
summary = 'List installed packages.'
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages (excluding editables)')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages (excluding editables)')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help=('If in a virtualenv that has global access, do not list '
'globally-installed packages.'),
)
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
index_opts = make_option_group(index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(
find_links=options.find_links,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session,
)
def run(self, options, args):
if options.outdated:
self.run_outdated(options)
elif options.uptodate:
self.run_uptodate(options)
elif options.editable:
self.run_editables(options)
else:
self.run_listing(options)
def run_outdated(self, options):
for dist, version, typ in self.find_packages_latest_versions(options):
if version > dist.parsed_version:
logger.info(
'%s (Current: %s Latest: %s [%s])',
dist.project_name, dist.version, version, typ,
)
def find_packages_latest_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
dependency_links = []
for dist in get_installed_distributions(local_only=options.local,
user_only=options.user):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
include_editables=False,
)
format_control = FormatControl(set(), set())
wheel_cache = WheelCache(options.cache_dir, format_control)
for dist in installed_packages:
req = InstallRequirement.from_line(
dist.key, None, isolated=options.isolated_mode,
wheel_cache=wheel_cache
)
typ = 'unknown'
try:
link = finder.find_requirement(req, True)
# If link is None, means installed version is most
# up-to-date
if link is None:
continue
except DistributionNotFound:
continue
else:
canonical_name = pkg_resources.safe_name(req.name).lower()
formats = fmt_ctl_formats(format_control, canonical_name)
search = Search(
req.name,
canonical_name,
formats)
remote_version = finder._link_package_versions(
link, search).version
if link.is_wheel:
typ = 'wheel'
else:
typ = 'sdist'
yield dist, remote_version, typ
def run_listing(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
)
self.output_package_listing(installed_packages)
def run_editables(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=True,
)
self.output_package_listing(installed_packages)
def output_package_listing(self, installed_packages):
installed_packages = sorted(
installed_packages,
key=lambda dist: dist.project_name.lower(),
)
for dist in installed_packages:
if dist_is_editable(dist):
line = '%s (%s, %s)' % (
dist.project_name,
dist.version,
dist.location,
)
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.info(line)
def run_uptodate(self, options):
uptodate = []
for dist, version, typ in self.find_packages_latest_versions(options):
if dist.parsed_version == version:
uptodate.append(dist)
self.output_package_listing(uptodate)
| mit |
sookasa/heroku-buildpack-python-pipeline | vendor/distribute-0.6.34/setuptools/tests/test_sandbox.py | 204 | 1724 | """develop tests
"""
import sys
import os
import shutil
import unittest
import tempfile
from setuptools.sandbox import DirectorySandbox, SandboxViolation
def has_win32com():
"""
Run this to determine if the local machine has win32com, and if it
does, include additional tests.
"""
if not sys.platform.startswith('win32'):
return False
try:
mod = __import__('win32com')
except ImportError:
return False
return True
class TestSandbox(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.dir)
def test_devnull(self):
if sys.version < '2.4':
return
sandbox = DirectorySandbox(self.dir)
sandbox.run(self._file_writer(os.devnull))
def _file_writer(path):
def do_write():
f = open(path, 'w')
f.write('xxx')
f.close()
return do_write
_file_writer = staticmethod(_file_writer)
if has_win32com():
def test_win32com(self):
"""
win32com should not be prevented from caching COM interfaces
in gen_py.
"""
import win32com
gen_py = win32com.__gen_path__
target = os.path.join(gen_py, 'test_write')
sandbox = DirectorySandbox(self.dir)
try:
try:
sandbox.run(self._file_writer(target))
except SandboxViolation:
self.fail("Could not create gen_py file due to SandboxViolation")
finally:
if os.path.exists(target): os.remove(target)
if __name__ == '__main__':
unittest.main()
| mit |
abhikumar22/MYBLOG | blg/Lib/site-packages/pip/_vendor/requests/packages/chardet/big5freq.py | 3133 | 82594 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376
Big5CharToFreqOrder = (
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512
#Everything below is of no interest for detection purpose
2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
13968,13969,13970,13971,13972) #13973
# flake8: noqa
| gpl-3.0 |
rspavel/spack | var/spack/repos/builtin/packages/minivite/package.py | 5 | 1683 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Minivite(MakefilePackage):
"""miniVite is a proxy application that implements a single phase of
Louvain method in distributed memory for graph community detection.
"""
tags = ['proxy-app', 'ecp-proxy-app']
homepage = "http://hpc.pnl.gov/people/hala/grappolo.html"
git = "https://github.com/Exa-Graph/miniVite.git"
version('develop', branch='master')
version('1.0', tag='v1.0')
version('1.1', tag='v1.1')
variant('openmp', default=True, description='Build with OpenMP support')
variant('opt', default=True, description='Optimization flags')
depends_on('mpi')
@property
def build_targets(self):
targets = []
cxxflags = ['-std=c++11 -g -DCHECK_NUM_EDGES -DPRINT_EXTRA_NEDGES']
ldflags = []
if '+openmp' in self.spec:
cxxflags.append(self.compiler.openmp_flag)
ldflags.append(self.compiler.openmp_flag)
if '+opt' in self.spec:
cxxflags.append(' -O3 ')
targets.append('CXXFLAGS={0}'.format(' '.join(cxxflags)))
targets.append('OPTFLAGS={0}'.format(' '.join(ldflags)))
targets.append('CXX={0}'.format(self.spec['mpi'].mpicxx))
return targets
def install(self, spec, prefix):
mkdirp(prefix.bin)
if (self.version >= Version('1.1')):
install('miniVite', prefix.bin)
elif (self.version >= Version('1.0')):
install('dspl', prefix.bin)
| lgpl-2.1 |
karthik-sethuraman/ONFOpenTransport | RI/flask_server/tapi_server/util.py | 9 | 3385 | import datetime
import six
import typing
def _deserialize(data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if klass in six.integer_types or klass in (float, str, bool):
return _deserialize_primitive(data, klass)
elif klass == object:
return _deserialize_object(data)
elif klass == datetime.date:
return deserialize_date(data)
elif klass == datetime.datetime:
return deserialize_datetime(data)
elif type(klass) == typing.GenericMeta:
if klass.__extra__ == list:
return _deserialize_list(data, klass.__args__[0])
if klass.__extra__ == dict:
return _deserialize_dict(data, klass.__args__[1])
else:
return deserialize_model(data, klass)
def _deserialize_primitive(data, klass):
"""Deserializes to primitive type.
:param data: data to deserialize.
:param klass: class literal.
:return: int, long, float, str, bool.
:rtype: int | long | float | str | bool
"""
try:
value = klass(data)
except UnicodeEncodeError:
value = six.u(data)
except TypeError:
value = data
return value
def _deserialize_object(value):
"""Return an original value.
:return: object.
"""
return value
def deserialize_date(string):
"""Deserializes string to date.
:param string: str.
:type string: str
:return: date.
:rtype: date
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
def deserialize_datetime(string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:type string: str
:return: datetime.
:rtype: datetime
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
def deserialize_model(data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:type data: dict | list
:param klass: class literal.
:return: model object.
"""
instance = klass()
if not instance.openapi_types:
return data
for attr, attr_type in six.iteritems(instance.openapi_types):
if data is not None \
and instance.attribute_map[attr] in data \
and isinstance(data, (list, dict)):
value = data[instance.attribute_map[attr]]
setattr(instance, attr, _deserialize(value, attr_type))
return instance
def _deserialize_list(data, boxed_type):
"""Deserializes a list and its elements.
:param data: list to deserialize.
:type data: list
:param boxed_type: class literal.
:return: deserialized list.
:rtype: list
"""
return [_deserialize(sub_data, boxed_type)
for sub_data in data]
def _deserialize_dict(data, boxed_type):
"""Deserializes a dict and its elements.
:param data: dict to deserialize.
:type data: dict
:param boxed_type: class literal.
:return: deserialized dict.
:rtype: dict
"""
return {k: _deserialize(v, boxed_type)
for k, v in six.iteritems(data)}
| apache-2.0 |
w1z2g3/crossbar | crossbar/router/protocol.py | 1 | 23832 | #####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
import os
import traceback
import crossbar
from autobahn.twisted import websocket
from autobahn.twisted import rawsocket
from autobahn.websocket.compress import * # noqa
from txaio import make_logger
from crossbar.router.cookiestore import CookieStoreMemoryBacked, CookieStoreFileBacked
log = make_logger()
__all__ = (
'WampWebSocketServerFactory',
'WampRawSocketServerFactory',
'WampWebSocketServerProtocol',
'WampRawSocketServerProtocol',
'WampWebSocketClientFactory',
'WampRawSocketClientFactory',
'WampWebSocketClientProtocol',
'WampRawSocketClientProtocol',
)
def set_websocket_options(factory, options):
"""
Set WebSocket options on a WebSocket or WAMP-WebSocket factory.
:param factory: The WebSocket or WAMP-WebSocket factory to set options on.
:type factory: Instance of :class:`autobahn.twisted.websocket.WampWebSocketServerFactory`
or :class:`autobahn.twisted.websocket.WebSocketServerFactory`.
:param options: Options from Crossbar.io transport configuration.
:type options: dict
"""
c = options
# we need to pop() this, since it is not a WebSocket option to be consumed
# by setProtocolOption(), but will get used in onConnect() ("STRICT_PROTOCOL_NEGOTIATION")
#
factory._requireWebSocketSubprotocol = c.pop("require_websocket_subprotocol", True)
versions = []
if c.get("enable_hybi10", True):
versions.append(8)
if c.get("enable_rfc6455", True):
versions.append(13)
# FIXME: enforce!!
#
# self.connectionCap = c.get("max_connections")
# convert to seconds
#
openHandshakeTimeout = float(c.get("open_handshake_timeout", 0))
if openHandshakeTimeout:
openHandshakeTimeout = openHandshakeTimeout / 1000.
closeHandshakeTimeout = float(c.get("close_handshake_timeout", 0))
if closeHandshakeTimeout:
closeHandshakeTimeout = closeHandshakeTimeout / 1000.
autoPingInterval = None
if "auto_ping_interval" in c:
autoPingInterval = float(c["auto_ping_interval"]) / 1000.
autoPingTimeout = None
if "auto_ping_timeout" in c:
autoPingTimeout = float(c["auto_ping_timeout"]) / 1000.
factory.setProtocolOptions(versions=versions,
webStatus=c.get("enable_webstatus", True),
utf8validateIncoming=c.get("validate_utf8", True),
maskServerFrames=c.get("mask_server_frames", False),
requireMaskedClientFrames=c.get("require_masked_client_frames", True),
applyMask=c.get("apply_mask", True),
maxFramePayloadSize=c.get("max_frame_size", 0),
maxMessagePayloadSize=c.get("max_message_size", 0),
autoFragmentSize=c.get("auto_fragment_size", 0),
failByDrop=c.get("fail_by_drop", False),
echoCloseCodeReason=c.get("echo_close_codereason", False),
openHandshakeTimeout=openHandshakeTimeout,
closeHandshakeTimeout=closeHandshakeTimeout,
tcpNoDelay=c.get("tcp_nodelay", True),
autoPingInterval=autoPingInterval,
autoPingTimeout=autoPingTimeout,
autoPingSize=c.get("auto_ping_size", None),
serveFlashSocketPolicy=c.get("enable_flash_policy", None),
flashSocketPolicy=c.get("flash_policy", None),
allowedOrigins=c.get("allowed_origins", ["*"]))
# WebSocket compression
#
factory.setProtocolOptions(perMessageCompressionAccept=lambda _: None)
if 'compression' in c:
# permessage-deflate
#
if 'deflate' in c['compression']:
log.debug("enabling WebSocket compression (permessage-deflate)")
params = c['compression']['deflate']
requestNoContextTakeover = params.get('request_no_context_takeover', False)
requestMaxWindowBits = params.get('request_max_window_bits', 0)
noContextTakeover = params.get('no_context_takeover', None)
windowBits = params.get('max_window_bits', None)
memLevel = params.get('memory_level', None)
def accept(offers):
for offer in offers:
if isinstance(offer, PerMessageDeflateOffer):
if (requestMaxWindowBits == 0 or offer.acceptMaxWindowBits) and \
(not requestNoContextTakeover or offer.acceptNoContextTakeover):
return PerMessageDeflateOfferAccept(offer,
requestMaxWindowBits=requestMaxWindowBits,
requestNoContextTakeover=requestNoContextTakeover,
noContextTakeover=noContextTakeover,
windowBits=windowBits,
memLevel=memLevel)
factory.setProtocolOptions(perMessageCompressionAccept=accept)
class WampWebSocketServerProtocol(websocket.WampWebSocketServerProtocol):
"""
Crossbar.io WAMP-over-WebSocket server protocol.
"""
log = make_logger()
def __init__(self):
super(WampWebSocketServerProtocol, self).__init__()
self._cbtid = None
def onConnect(self, request):
if self.factory.debug_traffic:
from twisted.internet import reactor
def print_traffic():
self.log.info("Traffic {}: {} / {} in / out bytes - {} / {} in / out msgs".format(self.peer,
self.trafficStats.incomingOctetsWireLevel,
self.trafficStats.outgoingOctetsWireLevel,
self.trafficStats.incomingWebSocketMessages,
self.trafficStats.outgoingWebSocketMessages))
reactor.callLater(1, print_traffic)
print_traffic()
# if WebSocket client did not set WS subprotocol, assume "wamp.2.json"
#
self.STRICT_PROTOCOL_NEGOTIATION = self.factory._requireWebSocketSubprotocol
# handle WebSocket opening handshake
#
protocol, headers = websocket.WampWebSocketServerProtocol.onConnect(self, request)
try:
self._origin = request.origin
# transport-level WMAP authentication info
#
self._authid = None
self._authrole = None
self._authmethod = None
self._authprovider = None
# cookie tracking and cookie-based authentication
#
self._cbtid = None
if self.factory._cookiestore:
# try to parse an already set cookie from HTTP request headers
self._cbtid = self.factory._cookiestore.parse(request.headers)
# if no cookie is set, create a new one ..
if self._cbtid is None:
self._cbtid, headers['Set-Cookie'] = self.factory._cookiestore.create()
self.log.debug("Setting new cookie: {cookie}",
cookie=headers['Set-Cookie'])
else:
self.log.debug("Cookie already set")
# add this WebSocket connection to the set of connections
# associated with the same cookie
self.factory._cookiestore.addProto(self._cbtid, self)
self.log.debug("Cookie tracking enabled on WebSocket connection {}".format(self))
# if cookie-based authentication is enabled, set auth info from cookie store
#
if 'auth' in self.factory._config and 'cookie' in self.factory._config['auth']:
self._authid, self._authrole, self._authmethod = self.factory._cookiestore.getAuth(self._cbtid)
if self._authid:
# there is a cookie set, and the cookie was previously successfully authenticated,
# so immediately authenticate the client using that information
self._authprovider = u'cookie'
self.log.debug("Authenticated client via cookie cbtid={cbtid} as authid={authid}, authrole={authrole}, authmethod={authmethod}",
cbtid=self._cbtid, authid=self._authid, authrole=self._authrole, authmethod=self._authmethod)
else:
# there is a cookie set, but the cookie wasn't authenticated yet using a different auth method
self.log.debug("Cookie-based authentication enabled, but cookie isn't authenticated yet")
else:
self.log.debug("Cookie-based authentication disabled")
else:
self.log.debug("Cookie tracking disabled on WebSocket connection {}".format(self))
# remember transport level info for later forwarding in
# WAMP meta event "wamp.session.on_join"
#
self._transport_info = {
u'type': 'websocket',
u'protocol': protocol,
u'peer': self.peer,
u'http_headers_received': request.headers,
u'http_headers_sent': headers,
u'cbtid': self._cbtid
}
# accept the WebSocket connection, speaking subprotocol `protocol`
# and setting HTTP headers `headers`
#
return (protocol, headers)
except Exception:
traceback.print_exc()
def sendServerStatus(self, redirectUrl=None, redirectAfter=0):
"""
Used to send out server status/version upon receiving a HTTP/GET without
upgrade to WebSocket header (and option serverStatus is True).
"""
try:
page = self.factory._templates.get_template('cb_ws_status.html')
self.sendHtml(page.render(redirectUrl=redirectUrl,
redirectAfter=redirectAfter,
cbVersion=crossbar.__version__,
wsUri=self.factory.url,
peer=self.peer,
workerPid=os.getpid()))
except Exception:
self.log.failure("Error rendering WebSocket status page template: {log_failure.value}")
def onClose(self, wasClean, code, reason):
super(WampWebSocketServerProtocol, self).onClose(wasClean, code, reason)
# remove this WebSocket connection from the set of connections
# associated with the same cookie
if self._cbtid:
self.factory._cookiestore.dropProto(self._cbtid, self)
class WampWebSocketServerFactory(websocket.WampWebSocketServerFactory):
"""
Crossbar.io WAMP-over-WebSocket server factory.
"""
protocol = WampWebSocketServerProtocol
log = make_logger()
def __init__(self, factory, cbdir, config, templates):
"""
Ctor.
:param factory: WAMP session factory.
:type factory: An instance of ..
:param cbdir: The Crossbar.io node directory.
:type cbdir: str
:param config: Crossbar transport configuration.
:type config: dict
"""
self.debug_traffic = config.get('debug_traffic', False)
options = config.get('options', {})
server = "Crossbar/{}".format(crossbar.__version__)
externalPort = options.get('external_port', None)
# explicit list of WAMP serializers
#
if 'serializers' in config:
serializers = []
sers = set(config['serializers'])
if 'cbor' in sers:
# try CBOR WAMP serializer
try:
from autobahn.wamp.serializer import CBORSerializer
serializers.append(CBORSerializer(batched=True))
serializers.append(CBORSerializer())
except ImportError:
self.log.warn("Warning: could not load WAMP-CBOR serializer")
else:
sers.discard('cbor')
if 'msgpack' in sers:
# try MsgPack WAMP serializer
try:
from autobahn.wamp.serializer import MsgPackSerializer
serializers.append(MsgPackSerializer(batched=True))
serializers.append(MsgPackSerializer())
except ImportError:
self.log.warn("Warning: could not load WAMP-MsgPack serializer")
else:
sers.discard('msgpack')
if 'ubjson' in sers:
# try UBJSON WAMP serializer
try:
from autobahn.wamp.serializer import UBJSONSerializer
serializers.append(UBJSONSerializer(batched=True))
serializers.append(UBJSONSerializer())
except ImportError:
self.log.warn("Warning: could not load WAMP-UBJSON serializer")
else:
sers.discard('ubjson')
if 'json' in sers:
# try JSON WAMP serializer
try:
from autobahn.wamp.serializer import JsonSerializer
serializers.append(JsonSerializer(batched=True))
serializers.append(JsonSerializer())
except ImportError:
self.log.warn("Warning: could not load WAMP-JSON serializer")
else:
sers.discard('json')
if not serializers:
raise Exception("no valid WAMP serializers specified")
if len(sers) > 0:
raise Exception("invalid WAMP serializers specified (the following were unprocessed) {}".format(sers))
else:
serializers = None
websocket.WampWebSocketServerFactory.__init__(self,
factory,
serializers=serializers,
url=config.get('url', None),
server=server,
externalPort=externalPort)
# Crossbar.io node directory
self._cbdir = cbdir
# transport configuration
self._config = config
# Jinja2 templates for 404 etc
self._templates = templates
# cookie tracking
if 'cookie' in config:
cookie_store_type = config['cookie']['store']['type']
# ephemeral, memory-backed cookie store
if cookie_store_type == 'memory':
self._cookiestore = CookieStoreMemoryBacked(config['cookie'])
self.log.info("Memory-backed cookie store active.")
# persistent, file-backed cookie store
elif cookie_store_type == 'file':
cookie_store_file = os.path.abspath(os.path.join(self._cbdir, config['cookie']['store']['filename']))
self._cookiestore = CookieStoreFileBacked(cookie_store_file, config['cookie'])
self.log.info("File-backed cookie store active {cookie_store_file}", cookie_store_file=cookie_store_file)
else:
# should not arrive here as the config should have been checked before
raise Exception("logic error")
else:
self._cookiestore = None
# set WebSocket options
set_websocket_options(self, options)
class WampRawSocketServerProtocol(rawsocket.WampRawSocketServerProtocol):
"""
Crossbar.io WAMP-over-RawSocket server protocol.
"""
log = make_logger()
def connectionMade(self):
rawsocket.WampRawSocketServerProtocol.connectionMade(self)
# transport authentication
#
self._authid = None
self._authrole = None
self._authmethod = None
self._authprovider = None
# cookie tracking ID
#
self._cbtid = None
# remember transport level info for later forwarding in
# WAMP meta event "wamp.session.on_join"
#
self._transport_info = {
u'type': 'rawsocket',
u'protocol': None, # FIXME
u'peer': self.peer
}
def lengthLimitExceeded(self, length):
self.log.error("failing RawSocket connection - message length exceeded: message was {len} bytes, but current maximum is {maxlen} bytes",
len=length, maxlen=self.MAX_LENGTH)
self.transport.loseConnection()
class WampRawSocketServerFactory(rawsocket.WampRawSocketServerFactory):
"""
Crossbar.io WAMP-over-RawSocket server factory.
"""
protocol = WampRawSocketServerProtocol
log = make_logger()
def __init__(self, factory, config):
# remember transport configuration
#
self._config = config
# explicit list of WAMP serializers
#
if u'serializers' in config:
serializers = []
sers = set(config['serializers'])
if u'cbor' in sers:
# try CBOR WAMP serializer
try:
from autobahn.wamp.serializer import CBORSerializer
serializers.append(CBORSerializer())
except ImportError:
self.log.warn("Warning: could not load WAMP-CBOR serializer")
else:
sers.discard('cbor')
if u'msgpack' in sers:
# try MsgPack WAMP serializer
try:
from autobahn.wamp.serializer import MsgPackSerializer
serializer = MsgPackSerializer()
serializer._serializer.ENABLE_V5 = False # FIXME
serializers.append(serializer)
except ImportError:
self.log.warn("Warning: could not load WAMP-MsgPack serializer")
else:
sers.discard('msgpack')
if u'json' in sers:
# try JSON WAMP serializer
try:
from autobahn.wamp.serializer import JsonSerializer
serializers.append(JsonSerializer())
except ImportError:
self.log.warn("Warning: could not load WAMP-JSON serializer")
else:
sers.discard('json')
if not serializers:
raise Exception("no valid WAMP serializers specified")
if len(sers) > 0:
raise Exception("invalid WAMP serializers specified (the following were unprocessed) {}".format(sers))
else:
serializers = None
# Maximum message size
#
self._max_message_size = config.get('max_message_size', 128 * 1024) # default is 128kB
rawsocket.WampRawSocketServerFactory.__init__(self, factory, serializers)
self.log.debug("RawSocket transport factory created using {serializers} serializers, max. message size {maxsize}",
serializers=serializers, maxsize=self._max_message_size)
def buildProtocol(self, addr):
p = self.protocol()
p.factory = self
p.MAX_LENGTH = self._max_message_size
return p
class WampWebSocketClientProtocol(websocket.WampWebSocketClientProtocol):
"""
Crossbar.io WAMP-over-WebSocket client protocol.
"""
class WampWebSocketClientFactory(websocket.WampWebSocketClientFactory):
"""
Crossbar.io WAMP-over-WebSocket client factory.
"""
protocol = WampWebSocketClientProtocol
def buildProtocol(self, addr):
self._proto = websocket.WampWebSocketClientFactory.buildProtocol(self, addr)
return self._proto
class WampRawSocketClientProtocol(rawsocket.WampRawSocketClientProtocol):
"""
Crossbar.io WAMP-over-RawSocket client protocol.
"""
class WampRawSocketClientFactory(rawsocket.WampRawSocketClientFactory):
"""
Crossbar.io WAMP-over-RawSocket client factory.
"""
protocol = WampRawSocketClientProtocol
def __init__(self, factory, config):
# transport configuration
self._config = config
# WAMP serializer
#
serid = config.get(u'serializer', u'json')
if serid == u'json':
# try JSON WAMP serializer
try:
from autobahn.wamp.serializer import JsonSerializer
serializer = JsonSerializer()
except ImportError:
raise Exception("could not load WAMP-JSON serializer")
elif serid == u'msgpack':
# try MessagePack WAMP serializer
try:
from autobahn.wamp.serializer import MsgPackSerializer
serializer = MsgPackSerializer()
serializer._serializer.ENABLE_V5 = False # FIXME
except ImportError:
raise Exception("could not load WAMP-MessagePack serializer")
elif serid == u'cbor':
# try CBOR WAMP serializer
try:
from autobahn.wamp.serializer import CBORSerializer
serializer = CBORSerializer()
except ImportError:
raise Exception("could not load WAMP-CBOR serializer")
else:
raise Exception("invalid WAMP serializer '{}'".format(serid))
rawsocket.WampRawSocketClientFactory.__init__(self, factory, serializer)
| agpl-3.0 |
RevanProdigalKnight/sublimetext-codeformatter | codeformatter/lib/htmlbeautifier/bs4/builder/_htmlparser.py | 24 | 9102 | """Use the HTMLParser library to parse HTML files that aren't too bad."""
__all__ = [
'HTMLParserTreeBuilder',
]
from html.parser import HTMLParser
try:
from html.parser import HTMLParseError
except ImportError as e:
# HTMLParseError is removed in Python 3.5. Since it can never be
# thrown in 3.5, we can just define our own class as a placeholder.
class HTMLParseError(Exception):
pass
import sys
import warnings
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
# argument, which we'd like to set to False. Unfortunately,
# http://bugs.python.org/issue13273 makes strict=True a better bet
# before Python 3.2.3.
#
# At the end of this file, we monkeypatch HTMLParser so that
# strict=True works well on Python 3.2.2.
major, minor, release = sys.version_info[:3]
CONSTRUCTOR_TAKES_STRICT = major == 3 and minor == 2 and release >= 3
CONSTRUCTOR_STRICT_IS_DEPRECATED = major == 3 and minor == 3
CONSTRUCTOR_TAKES_CONVERT_CHARREFS = major == 3 and minor >= 4
from bs4.element import (
CData,
Comment,
Declaration,
Doctype,
ProcessingInstruction,
)
from bs4.dammit import EntitySubstitution, UnicodeDammit
from bs4.builder import (
HTML,
HTMLTreeBuilder,
STRICT,
)
HTMLPARSER = 'html.parser'
class BeautifulSoupHTMLParser(HTMLParser):
def handle_starttag(self, name, attrs):
# XXX namespace
attr_dict = {}
for key, value in attrs:
# Change None attribute values to the empty string
# for consistency with the other tree builders.
if value is None:
value = ''
attr_dict[key] = value
attrvalue = '""'
self.soup.handle_starttag(name, None, None, attr_dict)
def handle_endtag(self, name):
self.soup.handle_endtag(name)
def handle_data(self, data):
self.soup.handle_data(data)
def handle_charref(self, name):
# XXX workaround for a bug in HTMLParser. Remove this once
# it's fixed in all supported versions.
# http://bugs.python.org/issue13633
if name.startswith('x'):
real_name = int(name.lstrip('x'), 16)
elif name.startswith('X'):
real_name = int(name.lstrip('X'), 16)
else:
real_name = int(name)
try:
data = chr(real_name)
except (ValueError, OverflowError) as e:
data = "\N{REPLACEMENT CHARACTER}"
self.handle_data(data)
def handle_entityref(self, name):
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
if character is not None:
data = character
else:
data = "&%s;" % name
self.handle_data(data)
def handle_comment(self, data):
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(Comment)
def handle_decl(self, data):
self.soup.endData()
if data.startswith("DOCTYPE "):
data = data[len("DOCTYPE "):]
elif data == 'DOCTYPE':
# i.e. "<!DOCTYPE>"
data = ''
self.soup.handle_data(data)
self.soup.endData(Doctype)
def unknown_decl(self, data):
if data.upper().startswith('CDATA['):
cls = CData
data = data[len('CDATA['):]
else:
cls = Declaration
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(cls)
def handle_pi(self, data):
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(ProcessingInstruction)
class HTMLParserTreeBuilder(HTMLTreeBuilder):
is_xml = False
picklable = True
NAME = HTMLPARSER
features = [NAME, HTML, STRICT]
def __init__(self, *args, **kwargs):
if CONSTRUCTOR_TAKES_STRICT and not CONSTRUCTOR_STRICT_IS_DEPRECATED:
kwargs['strict'] = False
if CONSTRUCTOR_TAKES_CONVERT_CHARREFS:
kwargs['convert_charrefs'] = False
self.parser_args = (args, kwargs)
def prepare_markup(self, markup, user_specified_encoding=None,
document_declared_encoding=None, exclude_encodings=None):
"""
:return: A 4-tuple (markup, original encoding, encoding
declared within markup, whether any characters had to be
replaced with REPLACEMENT CHARACTER).
"""
if isinstance(markup, str):
yield (markup, None, None, False)
return
try_encodings = [user_specified_encoding, document_declared_encoding]
dammit = UnicodeDammit(markup, try_encodings, is_html=True,
exclude_encodings=exclude_encodings)
yield (dammit.markup, dammit.original_encoding,
dammit.declared_html_encoding,
dammit.contains_replacement_characters)
def feed(self, markup):
args, kwargs = self.parser_args
parser = BeautifulSoupHTMLParser(*args, **kwargs)
parser.soup = self.soup
try:
parser.feed(markup)
except HTMLParseError as e:
warnings.warn(RuntimeWarning(
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
raise e
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
# string.
#
# XXX This code can be removed once most Python 3 users are on 3.2.3.
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
import re
attrfind_tolerant = re.compile(
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
locatestarttagend = re.compile(r"""
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
(?:\s+ # whitespace before attribute name
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
(?:\s*=\s* # value indicator
(?:'[^']*' # LITA-enclosed value
|\"[^\"]*\" # LIT-enclosed value
|[^'\">\s]+ # bare value
)
)?
)
)*
\s* # trailing whitespace
""", re.VERBOSE)
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
from html.parser import tagfind, attrfind
def parse_starttag(self, i):
self.__starttag_text = None
endpos = self.check_for_whole_start_tag(i)
if endpos < 0:
return endpos
rawdata = self.rawdata
self.__starttag_text = rawdata[i:endpos]
# Now parse the data between i+1 and j into a tag and attrs
attrs = []
match = tagfind.match(rawdata, i+1)
assert match, 'unexpected call to parse_starttag()'
k = match.end()
self.lasttag = tag = rawdata[i+1:k].lower()
while k < endpos:
if self.strict:
m = attrfind.match(rawdata, k)
else:
m = attrfind_tolerant.match(rawdata, k)
if not m:
break
attrname, rest, attrvalue = m.group(1, 2, 3)
if not rest:
attrvalue = None
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
if attrvalue:
attrvalue = self.unescape(attrvalue)
attrs.append((attrname.lower(), attrvalue))
k = m.end()
end = rawdata[k:endpos].strip()
if end not in (">", "/>"):
lineno, offset = self.getpos()
if "\n" in self.__starttag_text:
lineno = lineno + self.__starttag_text.count("\n")
offset = len(self.__starttag_text) \
- self.__starttag_text.rfind("\n")
else:
offset = offset + len(self.__starttag_text)
if self.strict:
self.error("junk characters in start tag: %r"
% (rawdata[k:endpos][:20],))
self.handle_data(rawdata[i:endpos])
return endpos
if end.endswith('/>'):
# XHTML-style empty tag: <span attr="value" />
self.handle_startendtag(tag, attrs)
else:
self.handle_starttag(tag, attrs)
if tag in self.CDATA_CONTENT_ELEMENTS:
self.set_cdata_mode(tag)
return endpos
def set_cdata_mode(self, elem):
self.cdata_elem = elem.lower()
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
CONSTRUCTOR_TAKES_STRICT = True
| mit |
CXQERP/ODOOERP | addons/account_voucher/invoice.py | 382 | 2496 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class invoice(osv.osv):
_inherit = 'account.invoice'
def invoice_pay_customer(self, cr, uid, ids, context=None):
if not ids: return []
dummy, view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_voucher', 'view_vendor_receipt_dialog_form')
inv = self.browse(cr, uid, ids[0], context=context)
return {
'name':_("Pay Invoice"),
'view_mode': 'form',
'view_id': view_id,
'view_type': 'form',
'res_model': 'account.voucher',
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'domain': '[]',
'context': {
'payment_expected_currency': inv.currency_id.id,
'default_partner_id': self.pool.get('res.partner')._find_accounting_partner(inv.partner_id).id,
'default_amount': inv.type in ('out_refund', 'in_refund') and -inv.residual or inv.residual,
'default_reference': inv.name,
'close_after_process': True,
'invoice_type': inv.type,
'invoice_id': inv.id,
'default_type': inv.type in ('out_invoice','out_refund') and 'receipt' or 'payment',
'type': inv.type in ('out_invoice','out_refund') and 'receipt' or 'payment'
}
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
vrv/tensorflow | tensorflow/python/framework/ops_test.py | 3 | 64228 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gc
import weakref
from tensorflow.core.framework import attr_value_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_ops_2
from tensorflow.python.framework import test_util
from tensorflow.python.framework import versions
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
import tensorflow.python.ops.gradients # pylint: disable=unused-import
from tensorflow.python.platform import googletest
from tensorflow.python.util import compat
ops._set_call_cpp_shape_fn(common_shapes.call_cpp_shape_fn)
class ResourceTest(test_util.TensorFlowTestCase):
def testBuildGraph(self):
with self.test_session():
pt = test_ops.stub_resource_handle_op(container="a", shared_name="b")
test_ops.resource_create_op(pt).run()
def testInitialize(self):
with self.test_session():
handle = test_ops.stub_resource_handle_op(container="a", shared_name="b")
resources.register_resource(
handle=handle,
create_op=test_ops.resource_create_op(handle),
is_initialized_op=test_ops.resource_initialized_op(handle))
self.assertEquals(
len(
resources.report_uninitialized_resources(
resources.shared_resources()).eval()), 1)
resources.initialize_resources(resources.shared_resources()).run()
self.assertEquals(
len(
resources.report_uninitialized_resources(
resources.shared_resources()).eval()), 0)
class TensorTest(test_util.TensorFlowTestCase):
def testShape(self):
op = ops.Operation(
ops._NodeDef("noop", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
self.assertEqual(tensor_shape.unknown_shape(), t.get_shape())
t.set_shape([1, 2, 3])
self.assertEqual([1, 2, 3], t.get_shape())
def testIterable(self):
op = ops.Operation(
ops._NodeDef("noop", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
self.assertTrue(isinstance(t, ops.Tensor))
with self.assertRaisesRegexp(TypeError, "not iterable"):
for _ in t:
pass
class IndexedSlicesTest(test_util.TensorFlowTestCase):
def testToTensor(self):
with self.test_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
dense_shape = constant_op.constant([3, 2])
x = ops.IndexedSlices(values, indices, dense_shape)
tensor = ops.convert_to_tensor(x, name="tensor")
self.assertAllEqual(tensor.eval(), [[2, 3], [0, 0], [5, 7]])
def testNegation(self):
with self.test_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
x = -ops.IndexedSlices(values, indices)
self.assertAllEqual(x.values.eval(), [[-2, -3], [-5, -7]])
self.assertAllEqual(x.indices.eval(), [0, 2])
def testScalarMul(self):
with self.test_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
x = math_ops.scalar_mul(-2, ops.IndexedSlices(values, indices))
self.assertAllEqual(x.values.eval(), [[-4, -6], [-10, -14]])
self.assertAllEqual(x.indices.eval(), [0, 2])
class NodeDefConstructorTest(test_util.TensorFlowTestCase):
def testNoArgs(self):
nodedef = ops._NodeDef("noop", "bar")
self.assertProtoEquals("op: 'noop' name: 'bar'", nodedef)
def testArgs(self):
nodedef = ops._NodeDef("foo", "bar", device="/device:baz:*")
self.assertProtoEquals("op:'foo' name:'bar' device:'/device:baz:*'",
nodedef)
nodedef = ops._NodeDef("foo", "bar", device=pydev.DeviceSpec(job="j"))
self.assertProtoEquals("op:'foo' name:'bar' device:'/job:j'", nodedef)
# NOTE(mrry): Dummy shape registrations for ops used in the tests, since they
# don't have C++ op registrations on which to attach C++ shape fns.
ops.RegisterShape("a")(common_shapes.unknown_shape)
ops.RegisterShape("b")(common_shapes.unknown_shape)
ops.RegisterShape("c")(common_shapes.unknown_shape)
ops.RegisterShape("add")(common_shapes.unknown_shape)
ops.RegisterShape("an_op")(common_shapes.unknown_shape)
ops.RegisterShape("const")(common_shapes.unknown_shape)
ops.RegisterShape("copy")(common_shapes.unknown_shape)
ops.RegisterShape("foo")(common_shapes.unknown_shape)
ops.RegisterShape("identity")(common_shapes.unknown_shape)
ops.RegisterShape("mul")(common_shapes.unknown_shape)
ops.RegisterShape("nonrefop")(common_shapes.unknown_shape)
ops.RegisterShape("noop")(common_shapes.unknown_shape)
ops.RegisterShape("refop")(common_shapes.unknown_shape)
def _apply_op(g, *args, **kwargs):
op = g.create_op(*args, **kwargs)
if len(op.outputs) == 1:
return op.outputs[0]
else:
return op.outputs
class OperationTest(test_util.TensorFlowTestCase):
def testNoInputs(self):
op = ops.Operation(
ops._NodeDef("noop", "myop"),
ops.Graph(), [], [dtypes.float32, dtypes.string])
self.assertEqual(2, len(op.values()))
self.assertEqual(0, len(op.inputs))
self.assertEqual("myop", op.name)
float_t, label_str_t = op.values()
self.assertEqual(dtypes.float32, float_t.dtype)
self.assertEqual(op, float_t.op)
self.assertEqual(0, float_t._value_index)
self.assertEqual(0, len(float_t._consumers))
self.assertEqual("myop", float_t._as_node_def_input())
self.assertEqual(dtypes.string, label_str_t.dtype)
self.assertEqual(op, label_str_t.op)
self.assertEqual(1, label_str_t._value_index)
self.assertEqual(0, len(label_str_t._consumers))
self.assertEqual("myop:1", label_str_t._as_node_def_input())
self.assertProtoEquals("op:'noop' name:'myop'", op.node_def)
def testNoOutputs(self):
g = ops.Graph()
op1 = ops.Operation(ops._NodeDef("noop", "myop1"), g, [], [dtypes.float32])
float_t, = op1.values()
op2 = ops.Operation(ops._NodeDef("reop", "myop2"), g, [float_t], [])
self.assertEqual(0, len(op2.values()))
self.assertEqual(1, len(op2.inputs))
self.assertIs(float_t, op2.inputs[0])
self.assertEqual(1, len(float_t._consumers))
self.assertEqual(op2, float_t._consumers[0])
self.assertProtoEquals("op:'noop' name:'myop1'", op1.node_def)
self.assertProtoEquals("op:'reop' name:'myop2' input:'myop1'", op2.node_def)
def testInputsAndOutputs(self):
g = ops.Graph()
op1 = ops.Operation(ops._NodeDef("noop", "myop1"), g, [], [dtypes.float32])
self.assertEqual(1, len(op1.values()))
float1_t, = op1.values()
op2 = ops.Operation(
ops._NodeDef("reop", "myop2"), g, [], [dtypes.float32, dtypes.string])
self.assertEqual(2, len(op2.values()))
float2_t, label2_str_t = op2.values()
# Note that we consume label2_str_t twice here.
op3 = ops.Operation(
ops._NodeDef("add", "myop3"), g, [float1_t, label2_str_t, label2_str_t],
[dtypes.float32, dtypes.int32])
self.assertEqual(2, len(op3.values()))
self.assertEqual(1, len(float1_t._consumers))
self.assertEqual(op3, float1_t._consumers[0])
self.assertEqual(0, len(float2_t._consumers))
self.assertEqual(2, len(label2_str_t._consumers))
self.assertEqual(op3, label2_str_t._consumers[0])
self.assertEqual(op3, label2_str_t._consumers[1])
self.assertProtoEquals("""
op:'add' name:'myop3'
input:'myop1' input:'myop2:1' input:'myop2:1'
""", op3.node_def)
def testDeviceObject(self):
op = ops.Operation(ops._NodeDef("noop", "myop"), ops.Graph(), [], [])
op._set_device("/job:goo/device:GPU:0")
self.assertProtoEquals(
"op:'noop' name:'myop' device:'/job:goo/device:GPU:0' ", op.node_def)
op = ops.Operation(ops._NodeDef("noop", "op2"), ops.Graph(), [], [])
op._set_device(
pydev.DeviceSpec(
job="muu", device_type="CPU", device_index=0))
self.assertProtoEquals(
"op:'noop' name:'op2' device:'/job:muu/device:CPU:0'", op.node_def)
def testReferenceInput(self):
g = ops.Graph()
op1 = ops.Operation(
ops._NodeDef("noop", "op1"), g, [],
[dtypes.float32_ref, dtypes.float32])
self.assertProtoEquals("op:'noop' name:'op1'", op1.node_def)
ref_t, nonref_t = op1.values()
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
op2 = ops.Operation(
ops._NodeDef("refop", "op2"),
g, [ref_t, nonref_t], [],
input_types=[dtypes.float32_ref, dtypes.float32])
self.assertProtoEquals("op:'refop' name:'op2' input:'op1' input:'op1:1'",
op2.node_def)
op3 = ops.Operation(
ops._NodeDef("nonrefop", "op3"), g, [ref_t, nonref_t], [])
self.assertProtoEquals("op:'nonrefop' name:'op3' input:'op1' input:'op1:1'",
op3.node_def)
def testInvalidNames(self):
g = ops.Graph()
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", ""), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "_invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "-invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "/invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "invalid:0"), g)
def testNoShapeFunction(self):
g = ops.Graph()
ops.Operation(ops._NodeDef("op", "an_op"), g, output_types=[dtypes.float32])
self.assertEqual(tensor_shape.unknown_shape(),
_apply_op(g, "an_op", [], [dtypes.float32]).get_shape())
def testConvertToTensorNestedArray(self):
with self.test_session():
values = [[2], [3], [5], [7]]
tensor = ops.convert_to_tensor(values)
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, tensor.eval())
def testConvertToTensorNestedTuple(self):
with self.test_session():
values = ((2,), (3,), (5,), (7,))
tensor = ops.convert_to_tensor(values)
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, ops.convert_to_tensor(values).eval())
def testConvertToTensorNestedTensors(self):
with self.test_session():
values = ((2,), (3,), (5,), (7,))
tensor = ops.convert_to_tensor(
[constant_op.constant(row) for row in values])
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, tensor.eval())
tensor = ops.convert_to_tensor(
[[constant_op.constant(v) for v in row] for row in values])
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(values, tensor.eval())
def testConvertToTensorNestedMix(self):
with self.test_session():
values = ([2], (3,), [constant_op.constant(5)], constant_op.constant([7]))
tensor = ops.convert_to_tensor(values)
self.assertAllEqual((4, 1), tensor.get_shape().as_list())
self.assertAllEqual(((2,), (3,), (5,), (7,)), tensor.eval())
def testConvertToTensorPreferred(self):
with self.test_session():
values = [2, 3, 5, 7]
tensor = ops.convert_to_tensor(values, preferred_dtype=dtypes.float32)
self.assertEqual(dtypes.float32, tensor.dtype)
with self.test_session():
# Convert empty tensor to anything.
values = []
tensor = ops.convert_to_tensor(values, preferred_dtype=dtypes.int64)
self.assertEqual(dtypes.int64, tensor.dtype)
with self.test_session():
# The preferred dtype is a type error and will convert to
# float32 instead.
values = [1.23]
tensor = ops.convert_to_tensor(values, preferred_dtype=dtypes.int64)
self.assertEqual(dtypes.float32, tensor.dtype)
def testConvertToInvalidTensorType(self):
with self.assertRaises(TypeError):
# Forcing an invalid dtype should fail with a type error.
values = [1.23]
_ = ops.convert_to_tensor(values, dtype=dtypes.int64)
def testNoConvert(self):
# Operation cannot be converted to Tensor.
op = control_flow_ops.no_op()
with self.assertRaisesRegexp(TypeError,
r"Can't convert Operation '.*' to Tensor"):
ops.convert_to_tensor(op)
def testStr(self):
node_def = ops._NodeDef("noop", "op1")
op = ops.Operation(node_def, ops.Graph(), [], [dtypes.float32])
self.assertEqual(str(node_def), str(op))
def testRepr(self):
op = ops.Operation(
ops._NodeDef("noop", "op1"), ops.Graph(), [], [dtypes.float32])
self.assertEqual("<tf.Operation 'op1' type=noop>", repr(op))
class CreateOpTest(test_util.TensorFlowTestCase):
def testNodeDefArgs(self):
g = ops.Graph()
op1 = g.create_op("const", [], [dtypes.float32], None, name="myop1")
with g.device("/device:GPU:0"):
op2 = g.create_op(
"add", [], [dtypes.float32, dtypes.string], None, name="myop2")
op3 = g.create_op(
"foo",
[list(op1.values())[0], list(op2.values())[1], list(op2.values())[0]],
[dtypes.float32, dtypes.int32],
None,
name="myop3")
self.assertDeviceEqual(None, op1.device)
self.assertDeviceEqual("/device:GPU:0", op2.device)
self.assertDeviceEqual(None, op3.device)
self.assertProtoEquals("name:'myop1' op:'const'", op1.node_def)
self.assertProtoEquals("name:'myop2' op:'add' device:'/device:GPU:0'",
op2.node_def)
self.assertProtoEquals(
"name:'myop3' input:'myop1' input:'myop2:1' input:'myop2' op:'foo'",
op3.node_def)
def testReferenceInput(self):
g = ops.Graph()
op1 = g.create_op(
"noop", [], [dtypes.float32_ref, dtypes.float32], name="op1")
self.assertProtoEquals("op:'noop' name:'op1'", op1.node_def)
ref_t, nonref_t = op1.values()
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
op2 = g.create_op(
"refop", [ref_t, nonref_t], [],
input_types=[dtypes.float32_ref, dtypes.float32],
name="op2")
self.assertProtoEquals("op:'refop' name:'op2' input:'op1' input:'op1:1'",
op2.node_def)
op3 = g.create_op("nonrefop", [ref_t, nonref_t], [], name="op3")
self.assertProtoEquals("op:'nonrefop' name:'op3' input:'op1' input:'op1:1'",
op3.node_def)
def testFinalized(self):
g = ops.Graph()
g.finalize()
with self.assertRaises(RuntimeError):
g.create_op("const", [], [dtypes.float32], None, name="myop1")
# Test unfinalize.
g._unsafe_unfinalize()
g.create_op("const", [], [dtypes.float32], None, name="myop1")
class ApplyOpTest(test_util.TensorFlowTestCase):
def testNodeDefArgs(self):
g = ops.Graph()
t1 = _apply_op(g, "const", [], [dtypes.float32], name="myop1")
with g.device("/device:GPU:0"):
t2 = _apply_op(
g, "add", [], [dtypes.float32, dtypes.string], name="myop2")
t3 = _apply_op(
g,
"foo", [t1, t2[1], t2[0]], [dtypes.float32, dtypes.int32],
name="myop3")
self.assertTrue(isinstance(t1, ops.Tensor))
self.assertTrue(isinstance(t2, list))
self.assertTrue(isinstance(t3, list))
self.assertTrue(isinstance(t3[0], ops.Tensor))
self.assertEqual("myop1", t1._as_node_def_input())
self.assertEqual("myop2", t2[0]._as_node_def_input())
self.assertEqual("myop2:1", t2[1]._as_node_def_input())
self.assertEqual("myop3", t3[0]._as_node_def_input())
# Validate that we got the right ops as well
self.assertProtoEquals("name:'myop1' op:'const'", t1.op.node_def)
self.assertProtoEquals("name:'myop2' op:'add' device:'/device:GPU:0'",
t2[0].op.node_def)
self.assertProtoEquals(
"name:'myop3' input:'myop1' input:'myop2:1' input:'myop2' op:'foo'",
t3[0].op.node_def)
def testReferenceInput(self):
g = ops.Graph()
ref_t, nonref_t = _apply_op(
g, "noop", [], [dtypes.float32_ref, dtypes.float32], name="op1")
self.assertProtoEquals("op:'noop' name:'op1'", ref_t.op.node_def)
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
out_2 = _apply_op(
g,
"refop", [ref_t, nonref_t], [dtypes.int32],
input_types=[dtypes.float32_ref, dtypes.float32],
name="op2")
self.assertProtoEquals("op:'refop' name:'op2' input:'op1' input:'op1:1'",
out_2.op.node_def)
out_3 = _apply_op(
g, "nonrefop", [ref_t, nonref_t], [dtypes.int32], name="op3")
self.assertProtoEquals("op:'nonrefop' name:'op3' input:'op1' input:'op1:1'",
out_3.op.node_def)
class NameStackTest(test_util.TensorFlowTestCase):
def testBasics(self):
g = ops.Graph()
self.assertEqual("foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo", g.unique_name("foo"))
self.assertEqual("foo_1", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_1", g.unique_name("foo"))
self.assertEqual("foo_2", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_2", g.unique_name("foo"))
self.assertEqual("foo_1_1", g.unique_name("foo_1", mark_as_used=False))
self.assertEqual("foo_1_1", g.unique_name("foo_1"))
self.assertEqual("foo_1_2", g.unique_name("foo_1", mark_as_used=False))
self.assertEqual("foo_1_2", g.unique_name("foo_1"))
self.assertEqual("foo_1_2_1", g.unique_name("foo_1_2", mark_as_used=False))
self.assertEqual("foo_1_2_1", g.unique_name("foo_1_2"))
with g.name_scope("bar"):
self.assertEqual("bar/foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("bar/foo", g.unique_name("foo"))
self.assertEqual("bar/foo_1", g.unique_name("foo", mark_as_used=False))
self.assertEqual("bar/foo_1", g.unique_name("foo"))
with g.name_scope(None):
self.assertEqual("foo_3", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_3", g.unique_name("foo"))
with g.name_scope("baz"):
self.assertEqual(
"bar/baz/foo", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz/foo", g.unique_name("foo"))
self.assertEqual(
"bar/baz/foo_1", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz/foo_1", g.unique_name("foo"))
with g.name_scope("baz"):
self.assertEqual(
"bar/baz_1/foo", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz_1/foo", g.unique_name("foo"))
self.assertEqual(
"bar/baz_1/foo_1", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar/baz_1/foo_1", g.unique_name("foo"))
with g.name_scope("quux"):
self.assertEqual("quux/foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("quux/foo", g.unique_name("foo"))
with g.name_scope("bar"):
with g.name_scope("baz"):
self.assertEqual(
"bar_1/baz/foo", g.unique_name(
"foo", mark_as_used=False))
self.assertEqual("bar_1/baz/foo", g.unique_name("foo"))
self.assertEqual("foo_4", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_4", g.unique_name("foo"))
self.assertEqual("bar_2", g.unique_name("bar", mark_as_used=False))
self.assertEqual("bar_2", g.unique_name("bar"))
def testNameAndVariableScope(self):
with self.test_session() as sess:
with sess.graph.name_scope("l0"):
with variable_scope.variable_scope("l1"):
with sess.graph.name_scope("l1") as scope:
self.assertEqual("l0/l1/l1/", scope)
self.assertEqual(
"l0/l1/l1/foo",
sess.graph.unique_name(
"foo", mark_as_used=False))
self.assertEqual("l0/l1/l1/foo", sess.graph.unique_name("foo"))
with sess.graph.name_scope("l2") as scope:
self.assertEqual("l0/l1/l2/", scope)
self.assertEqual(
"l0/l1/l2/foo",
sess.graph.unique_name(
"foo", mark_as_used=False))
self.assertEqual("l0/l1/l2/foo", sess.graph.unique_name("foo"))
def testOutOfOrderUniqueName(self):
g = ops.Graph()
self.assertEqual("foo_2", g.unique_name("foo_2"))
self.assertEqual("foo", g.unique_name("foo"))
self.assertEqual("foo_1", g.unique_name("foo"))
self.assertEqual("foo_3", g.unique_name("foo"))
def testInvalidNameRaisesError(self):
g = ops.Graph()
with g.name_scope(""): # Should not raise
pass
with g.name_scope("foo/"): # Should not raise
with g.name_scope("_bar"): # Should not raise
pass
with self.assertRaises(ValueError):
with g.name_scope("foo:0"):
pass
with self.assertRaises(ValueError):
with g.name_scope("_bar"):
pass
class NameTest(test_util.TensorFlowTestCase):
def testGenerateName(self):
g = ops.Graph()
op0 = g.create_op("const", [], [dtypes.float32, dtypes.float32])
self.assertEqual("const", op0.name)
self.assertEqual("const:0", op0.outputs[0].name)
self.assertEqual("const:1", op0.outputs[1].name)
op1 = g.create_op("const", [], [dtypes.float32])
self.assertEqual("const_1", op1.name)
self.assertEqual("const_1:0", op1.outputs[0].name)
op2 = g.create_op("const", [], [dtypes.float32], name="my_op")
self.assertEqual("my_op", op2.name)
self.assertEqual("my_op:0", op2.outputs[0].name)
def testNameScope(self):
g = ops.Graph()
with g.name_scope("foo") as foo:
self.assertEqual("foo/", foo)
with g.name_scope("foo2") as foo2:
self.assertEqual("foo/foo2/", foo2)
with g.name_scope(None) as empty1:
self.assertEqual("", empty1)
with g.name_scope("foo3") as foo3:
self.assertEqual("foo3/", foo3)
with g.name_scope("") as empty2:
self.assertEqual("", empty2)
self.assertEqual("const", g.create_op("const", [], [dtypes.float32]).name)
with g.name_scope("bar") as scope:
self.assertEqual("bar/const",
g.create_op("const", [], [dtypes.float32]).name)
self.assertEqual("bar/const_1",
g.create_op("const", [], [dtypes.float32]).name)
# If you use the value from "with .. as", that values is used as-is.
self.assertEqual(
"bar", g.create_op(
"const", [], [dtypes.float32], name=scope).name)
with g.name_scope("baz") as scope:
with g.name_scope("quux"):
self.assertEqual("baz/quux/const",
g.create_op("const", [], [dtypes.float32]).name)
# If you use the value from the enclosing "with .. as", nothing is pushed.
with g.name_scope(scope):
self.assertEqual("baz/const",
g.create_op("const", [], [dtypes.float32]).name)
self.assertEqual(
"baz", g.create_op(
"const", [], [dtypes.float32], name=scope).name)
self.assertEqual(
"trailing",
g.create_op(
"const", [], [dtypes.float32], name="trailing/").name)
with g.name_scope("bar"):
self.assertEqual("bar_1/const",
g.create_op("const", [], [dtypes.float32]).name)
with g.name_scope("bar/"):
self.assertEqual("bar/const_2",
g.create_op("const", [], [dtypes.float32]).name)
class DeviceTest(test_util.TensorFlowTestCase):
def testNoDevice(self):
g = ops.Graph()
op = g.create_op("an_op", [], [dtypes.float32])
self.assertDeviceEqual(None, op.device)
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op" }
""", gd)
def testDevicePartialString(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op" device: "/job:worker/replica:2" }
""", gd)
def testDeviceFull(self):
g = ops.Graph()
with g.device(
pydev.DeviceSpec(
job="worker", replica=2, task=0, device_type="CPU",
device_index=3)):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/task:0/device:CPU:3" }
""", gd)
def testNesting(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker/replica:3/task:0"):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/replica:3/task:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2" }
""", gd)
def testNestingString(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker/replica:3/task:0"):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/replica:3/task:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2" }
""", gd)
def testNestingOverrideGpuCpu(self):
g = ops.Graph()
with g.device("/job:worker/replica:2/device:CPU:1"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker/replica:2/device:GPU:2"):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/replica:2/device:GPU:2" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def testNestingWithMergeDeviceFunction(self):
g = ops.Graph()
with g.device(pydev.merge_device("/device:GPU:0")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:worker")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/device:CPU:0")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:ps")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device(None)):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/device:GPU:0" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/device:GPU:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/device:CPU:0" }
node { name: "an_op_3" op: "an_op"
device: "/job:ps/device:CPU:0" }
node { name: "an_op_4" op: "an_op"
device: "/job:ps/device:CPU:0" }
""", gd)
def testNestingWithDeviceStrings(self):
g = ops.Graph()
with g.device("/device:GPU:0"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:CPU:0"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:ps"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(""):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/device:GPU:0" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/device:GPU:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/device:CPU:0" }
node { name: "an_op_3" op: "an_op"
device: "/job:ps/device:CPU:0" }
node { name: "an_op_4" op: "an_op"
device: "/job:ps/device:CPU:0" }
""", gd)
def testNestingWithDeviceStringWildcard(self):
g = ops.Graph()
with g.device("/device:GPU:7"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:GPU:*"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:CPU:*"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:CPU:5"):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/device:GPU:7" }
node { name: "an_op_1" op: "an_op"
device: "/device:GPU:7" }
node { name: "an_op_2" op: "an_op"
device: "/device:CPU:*" }
node { name: "an_op_3" op: "an_op"
device: "/device:CPU:5" }
""", gd)
def testNoneClearsDefault(self):
g = ops.Graph()
with g.device("/job:worker/replica:2/device:CPU:1"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(None):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "an_op_1" op: "an_op" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def testNoneIgnoresOuterDeviceFunction(self):
g = ops.Graph()
with g.device(lambda op: "/job:worker/replica:2/device:CPU:1"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(None):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "an_op_1" op: "an_op" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def _overwritingDeviceFunction(self, unused_op):
# This device function unconditionally overwrites the device of ops.
#
# NOTE(mrry): Writing device functions like this is not
# recommended. Instead, in most cases you should use
# `pydev.merge_device("/job:ps")` or simply `"/job:ps"` as the
# argument to `tf.device()` and the device component will be merged in.
return "/job:overwrite"
def testOverwritingBehavior(self):
g = ops.Graph()
with g.device(self._overwritingDeviceFunction):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:ps"): # Will be overwritten.
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:ps")): # Will be overwritten.
g.create_op("an_op", [], [dtypes.float32])
with g.device(None): # Disables overwriting device function
with g.device("/job:ps"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(None): # Disables overwriting device function
with g.device(pydev.merge_device("/job:ps")):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:overwrite" }
node { name: "an_op_1" op: "an_op"
device: "/job:overwrite" }
node { name: "an_op_2" op: "an_op"
device: "/job:overwrite" }
node { name: "an_op_3" op: "an_op"
device: "/job:ps" }
node { name: "an_op_4" op: "an_op"
device: "/job:ps" }
""", gd)
class ObjectWithName(object):
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
class CollectionTest(test_util.TensorFlowTestCase):
def test_add_to_collection(self):
g = ops.Graph()
g.add_to_collection("key", 12)
g.add_to_collection("other", "foo")
g.add_to_collection("key", 34)
# Note that only blank1 is returned.
g.add_to_collection("blah", 27)
blank1 = ObjectWithName("prefix/foo")
g.add_to_collection("blah", blank1)
blank2 = ObjectWithName("junk/foo")
g.add_to_collection("blah", blank2)
self.assertEqual([12, 34], g.get_collection("key"))
self.assertEqual([], g.get_collection("nothing"))
self.assertEqual([27, blank1, blank2], g.get_collection("blah"))
self.assertEqual([blank1], g.get_collection("blah", "prefix"))
self.assertEqual([blank1], g.get_collection("blah", ".*x"))
# Make sure that get_collection() returns a first-level
# copy of the collection, while get_collection_ref() returns
# the original list.
other_collection_snapshot = g.get_collection("other")
other_collection_ref = g.get_collection_ref("other")
self.assertEqual(["foo"], other_collection_snapshot)
self.assertEqual(["foo"], other_collection_ref)
g.add_to_collection("other", "bar")
self.assertEqual(["foo"], other_collection_snapshot)
self.assertEqual(["foo", "bar"], other_collection_ref)
self.assertEqual(["foo", "bar"], g.get_collection("other"))
self.assertTrue(other_collection_ref is g.get_collection_ref("other"))
# Verify that getting an empty collection ref returns a modifiable list.
empty_coll_ref = g.get_collection_ref("empty")
self.assertEqual([], empty_coll_ref)
empty_coll = g.get_collection("empty")
self.assertEqual([], empty_coll)
self.assertFalse(empty_coll is empty_coll_ref)
empty_coll_ref2 = g.get_collection_ref("empty")
self.assertTrue(empty_coll_ref2 is empty_coll_ref)
# Add to the collection.
empty_coll_ref.append("something")
self.assertEqual(["something"], empty_coll_ref)
self.assertEqual(["something"], empty_coll_ref2)
self.assertEqual([], empty_coll)
self.assertEqual(["something"], g.get_collection("empty"))
empty_coll_ref3 = g.get_collection_ref("empty")
self.assertTrue(empty_coll_ref3 is empty_coll_ref)
def test_add_to_collections_uniquify(self):
g = ops.Graph()
g.add_to_collections([1, 2, 1], "key")
# Make sure "key" is not added twice
self.assertEqual(["key"], g.get_collection(1))
def test_add_to_collections_from_list(self):
g = ops.Graph()
g.add_to_collections(["abc", "123"], "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_tuple(self):
g = ops.Graph()
g.add_to_collections(("abc", "123"), "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_generator(self):
g = ops.Graph()
def generator():
yield "abc"
yield "123"
g.add_to_collections(generator(), "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_set(self):
g = ops.Graph()
g.add_to_collections(set(["abc", "123"]), "key")
self.assertEqual(["key"], g.get_collection("abc"))
self.assertEqual(["key"], g.get_collection("123"))
def test_add_to_collections_from_string(self):
g = ops.Graph()
g.add_to_collections("abc", "key")
self.assertEqual(["key"], g.get_collection("abc"))
def test_default_graph(self):
with ops.Graph().as_default():
ops.add_to_collection("key", 90)
ops.add_to_collection("key", 100)
# Collections are ordered.
self.assertEqual([90, 100], ops.get_collection("key"))
def an_op(g):
return _apply_op(g, "an_op", [], [dtypes.float32])
ops.NotDifferentiable("an_op")
def copy_op(x):
return _apply_op(x.graph, "copy", [x], [x.dtype])
@ops.RegisterGradient("copy")
def _CopyGrad(op, x_grad): # pylint: disable=invalid-name
_ = op
return x_grad
@ops.RegisterGradient("copy_override")
def _CopyOverrideGrad(op, x_grad): # pylint: disable=invalid-name
_ = op
return x_grad
class RegistrationTest(test_util.TensorFlowTestCase):
def testRegisterGradients(self):
g = ops.Graph()
x = an_op(g)
y = copy_op(x)
fn = ops.get_gradient_function(y.op)
self.assertEqual(_CopyGrad, fn)
def testOverrideGradients(self):
g = ops.Graph()
x = an_op(g)
with g.gradient_override_map({"copy": "copy_override"}):
y = copy_op(x)
fn = ops.get_gradient_function(y.op)
self.assertEqual(_CopyOverrideGrad, fn)
def testNonExistentOverride(self):
g = ops.Graph()
x = an_op(g)
with g.gradient_override_map({"copy": "unknown_override"}):
y = copy_op(x)
with self.assertRaisesRegexp(LookupError, "unknown_override"):
ops.get_gradient_function(y.op)
class ComparisonTest(test_util.TensorFlowTestCase):
def testMembershipAllowed(self):
g = ops.Graph()
t1 = _apply_op(g, "const", [], [dtypes.float32], name="myop1")
t2 = _apply_op(g, "const", [], [dtypes.float32], name="myop2")
self.assertTrue(isinstance(t1, ops.Tensor))
self.assertTrue(isinstance(t2, ops.Tensor))
self.assertTrue(t1 in [t1])
self.assertTrue(t1 not in [t2])
class ControlDependenciesTest(test_util.TensorFlowTestCase):
def testBasic(self):
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
b = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a]):
c = _apply_op(g, "const", [], [dtypes.float32])
d = _apply_op(g, "identity", [b], [dtypes.float32])
e = _apply_op(g, "identity", [c], [dtypes.float32])
self.assertEqual(c.op.control_inputs, [a.op])
self.assertEqual(d.op.control_inputs, [a.op])
# e should be dominated by c.
self.assertEqual(e.op.control_inputs, [])
def testBasicWithConversion(self):
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
class ConvertibleObj(object):
def _as_graph_element(self):
return a
with g.control_dependencies([ConvertibleObj()]):
c = _apply_op(g, "const", [], [dtypes.float32])
self.assertEqual(c.op.control_inputs, [a.op])
def testNested(self):
g = ops.Graph()
a_1 = _apply_op(g, "const", [], [dtypes.float32])
a_2 = _apply_op(g, "const", [], [dtypes.float32])
a_3 = _apply_op(g, "const", [], [dtypes.float32])
a_4 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1, a_2, a_3, a_4]):
b_1 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
with g.control_dependencies([a_2]):
with g.control_dependencies([a_3]):
with g.control_dependencies([a_4]):
b_2 = _apply_op(g, "const", [], [dtypes.float32])
self.assertItemsEqual([a_1.op, a_2.op, a_3.op, a_4.op],
b_1.op.control_inputs)
self.assertItemsEqual(b_1.op.control_inputs, b_2.op.control_inputs)
def testClear(self):
g = ops.Graph()
a_1 = _apply_op(g, "const", [], [dtypes.float32])
a_2 = _apply_op(g, "const", [], [dtypes.float32])
a_3 = _apply_op(g, "const", [], [dtypes.float32])
a_4 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
with g.control_dependencies([a_2]):
with g.control_dependencies(None):
with g.control_dependencies([a_3]):
with g.control_dependencies([a_4]):
# deps [a_3, a_4]
b_3_4 = _apply_op(g, "const", [], [dtypes.float32])
# deps = [a_3]
b_3 = _apply_op(g, "const", [], [dtypes.float32])
# deps back to None
b_none = _apply_op(g, "const", [], [dtypes.float32])
# deps back to [a_1, a_2]
b_1_2 = _apply_op(g, "const", [], [dtypes.float32])
# deps back to [a_1]
b_1 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies(None):
# deps are None again
b_none2 = _apply_op(g, "const", [], [dtypes.float32])
self.assertItemsEqual([a_3.op, a_4.op], b_3_4.op.control_inputs)
self.assertItemsEqual([a_3.op], b_3.op.control_inputs)
self.assertItemsEqual([], b_none.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_1_2.op.control_inputs)
self.assertItemsEqual([a_1.op], b_1.op.control_inputs)
self.assertItemsEqual([], b_none2.op.control_inputs)
def testComplex(self):
g = ops.Graph()
# Usage pattern:
# * Nodes a_i are constants defined at the outermost scope, and are used
# as control inputs for the ith nested scope.
# * Nodes b_i are defined as Mul(a_3, a_4) at each scope.
# * Nodes c_i are defined as Mul(a_1, b_1) at each scope.
# * Nodes d_i are defined as Mul(b_i, c_i) at each scope.
# * Nodes e_i are defined as Mul(e_i-1, e_i-1) at each scope i > 1.
a_1 = _apply_op(g, "const", [], [dtypes.float32])
a_2 = _apply_op(g, "const", [], [dtypes.float32])
a_3 = _apply_op(g, "const", [], [dtypes.float32])
a_4 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
b_1 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_1 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_1 = _apply_op(g, "mul", [b_1, c_1], [dtypes.float32])
e_1 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_2]):
b_2 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_2 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_2 = _apply_op(g, "mul", [b_2, c_2], [dtypes.float32])
e_2 = _apply_op(g, "mul", [e_1, e_1], [dtypes.float32])
with g.control_dependencies([a_3]):
b_3 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_3 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_3 = _apply_op(g, "mul", [b_3, c_3], [dtypes.float32])
e_3 = _apply_op(g, "mul", [e_2, e_2], [dtypes.float32])
with g.control_dependencies([a_4]):
b_4 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_4 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_4 = _apply_op(g, "mul", [b_4, c_4], [dtypes.float32])
e_4 = _apply_op(g, "mul", [e_3, e_3], [dtypes.float32])
self.assertItemsEqual([a_1.op], b_1.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_2.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_3.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_4.op.control_inputs)
self.assertItemsEqual([], c_1.op.control_inputs)
self.assertItemsEqual([a_2.op], c_2.op.control_inputs)
self.assertItemsEqual([a_2.op, a_3.op], c_3.op.control_inputs)
self.assertItemsEqual([a_2.op, a_3.op, a_4.op], c_4.op.control_inputs)
self.assertItemsEqual([], d_1.op.control_inputs)
self.assertItemsEqual([], d_2.op.control_inputs)
self.assertItemsEqual([], d_3.op.control_inputs)
self.assertItemsEqual([], d_4.op.control_inputs)
self.assertItemsEqual([a_1.op], e_1.op.control_inputs)
self.assertItemsEqual([a_2.op], e_2.op.control_inputs)
self.assertItemsEqual([a_3.op], e_3.op.control_inputs)
self.assertItemsEqual([a_4.op], e_4.op.control_inputs)
def testRepeatedDependency(self):
g = ops.Graph()
a = g.create_op("foo", [], [dtypes.float32, dtypes.float32])
a_0, a_1 = a.outputs
with g.control_dependencies([a_0]):
b = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
c = _apply_op(g, "const", [], [dtypes.float32])
self.assertEqual(b.op.control_inputs, [a])
self.assertEqual(c.op.control_inputs, [a])
def testNoControlDependencyWithDataDependency(self):
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a]):
b = _apply_op(g, "identity", [a], [dtypes.float32])
self.assertEqual(b.op.control_inputs, [])
class OpScopeTest(test_util.TensorFlowTestCase):
def testNoScopeName(self):
g0 = ops.Graph()
values = [
g0.create_op("a", [], [dtypes.float32]),
g0.create_op("b", [], [dtypes.float32])
]
with self.assertRaises(ValueError):
with ops.name_scope(None, values=values):
pass
with self.assertRaises(ValueError):
with ops.name_scope(None, None, values):
pass
def testEmptyScopeName(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
with ops.name_scope("", values=[a, b]) as scope:
self.assertEqual("", scope)
self.assertEqual(g0, ops.get_default_graph())
with ops.name_scope("", "my_default_scope", [a, b]) as scope:
self.assertEqual("", scope)
self.assertEqual(g0, ops.get_default_graph())
def testDefaultScopeName(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
scope_name = "my_scope"
default_scope_name = "my_default_scope"
with ops.name_scope(scope_name, default_scope_name, [a, b]) as scope:
self.assertEqual("%s/" % scope_name, scope)
self.assertEqual(g0, ops.get_default_graph())
with ops.name_scope(None, default_scope_name, [a, b]) as scope:
self.assertEqual("%s/" % default_scope_name, scope)
self.assertEqual(g0, ops.get_default_graph())
def _testGraphElements(self, graph_elements):
scope_name = "my_scope"
with ops.name_scope(scope_name, values=graph_elements) as scope:
self.assertEqual("%s/" % scope_name, scope)
self.assertEqual(graph_elements[0].graph, ops.get_default_graph())
g1 = ops.Graph()
c = g1.create_op("c", [], [dtypes.float32])
with self.assertRaises(ValueError):
with ops.name_scope(scope_name, values=graph_elements + [c]):
pass
def testTensor(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
self._testGraphElements([a, b])
def testSparseTensor(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
sparse = sparse_tensor.SparseTensor(
_apply_op(g0, "const", [], [dtypes.int64]),
_apply_op(g0, "const", [], [dtypes.float32]),
_apply_op(g0, "const", [], [dtypes.int64]))
self._testGraphElements([a, sparse, b])
def testVariable(self):
g0 = ops.Graph()
with g0.as_default():
variable = variables.Variable([1.0])
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
self._testGraphElements([a, variable, b])
class GraphTest(test_util.TensorFlowTestCase):
def setUp(self):
ops.reset_default_graph()
def _AssertDefault(self, expected):
self.assertIs(expected, ops.get_default_graph())
def testGraphContextManager(self):
g0 = ops.Graph()
with g0.as_default() as g1:
self.assertIs(g0, g1)
def testDefaultGraph(self):
orig = ops.get_default_graph()
self._AssertDefault(orig)
g0 = ops.Graph()
self._AssertDefault(orig)
context_manager_0 = g0.as_default()
self._AssertDefault(orig)
with context_manager_0 as g0:
self._AssertDefault(g0)
with ops.Graph().as_default() as g1:
self._AssertDefault(g1)
self._AssertDefault(g0)
self._AssertDefault(orig)
def testAsGraphElementConversions(self):
class ConvertibleObj(object):
def _as_graph_element(self):
return "const:0"
class NonConvertibleObj(object):
pass
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
self.assertEqual(a, g.as_graph_element(ConvertibleObj()))
with self.assertRaises(TypeError):
g.as_graph_element(NonConvertibleObj())
# Regression test against creating custom __del__ functions in classes
# involved in cyclic references, e.g. Graph and Operation. (Python won't gc
# cycles that require calling a __del__ method, because the __del__ method can
# theoretically increase the object's refcount to "save" it from gc, and any
# already-deleted objects in the cycle would have be to restored.)
def testGarbageCollected(self):
# Create a graph we can delete and a weak reference to monitor if it's gc'd
g = ops.Graph()
g_ref = weakref.ref(g)
# Create some ops
with g.as_default():
a = constant_op.constant(2.0)
b = constant_op.constant(3.0)
c = math_ops.add(a, b)
# Create a session we can delete
with session.Session(graph=g) as sess:
sess.run(c)
# Delete all references and trigger gc
del g
del a
del b
del c
del sess
gc.collect()
self.assertIsNone(g_ref())
class AttrScopeTest(test_util.TensorFlowTestCase):
def _get_test_attrs(self):
x = control_flow_ops.no_op()
try:
a = compat.as_text(x.get_attr("_A"))
except ValueError:
a = None
try:
b = compat.as_text(x.get_attr("_B"))
except ValueError:
b = None
print(a, b)
return (a, b)
def testNoLabel(self):
with self.test_session():
self.assertAllEqual((None, None), self._get_test_attrs())
def testLabelMap(self):
with self.test_session() as sess:
a1 = self._get_test_attrs()
with sess.graph._attr_scope({
"_A": attr_value_pb2.AttrValue(s=compat.as_bytes("foo"))
}):
a2 = self._get_test_attrs()
with sess.graph._attr_scope({
"_A": None,
"_B": attr_value_pb2.AttrValue(s=compat.as_bytes("bar"))
}):
a3 = self._get_test_attrs()
with sess.graph._attr_scope({
"_A": attr_value_pb2.AttrValue(s=compat.as_bytes("baz"))
}):
a4 = self._get_test_attrs()
a5 = self._get_test_attrs()
a6 = self._get_test_attrs()
a7 = self._get_test_attrs()
self.assertAllEqual((None, None), a1)
self.assertAllEqual(("foo", None), a2)
self.assertAllEqual((None, "bar"), a3)
self.assertAllEqual(("baz", "bar"), a4)
self.assertAllEqual((None, "bar"), a5)
self.assertAllEqual(("foo", None), a6)
self.assertAllEqual((None, None), a7)
ops.RegisterShape("KernelLabel")(common_shapes.scalar_shape)
class KernelLabelTest(test_util.TensorFlowTestCase):
def testNoLabel(self):
with self.test_session():
self.assertAllEqual(b"My label is: default",
test_ops.kernel_label().eval())
def testLabelMap(self):
with self.test_session() as sess:
default_1 = test_ops.kernel_label()
# pylint: disable=protected-access
with sess.graph._kernel_label_map({"KernelLabel": "overload_1"}):
overload_1_1 = test_ops.kernel_label()
with sess.graph._kernel_label_map({"KernelLabel": "overload_2"}):
overload_2 = test_ops.kernel_label()
with sess.graph._kernel_label_map({"KernelLabel": ""}):
default_2 = test_ops.kernel_label()
overload_1_2 = test_ops.kernel_label()
# pylint: enable=protected-access
default_3 = test_ops.kernel_label()
self.assertAllEqual(b"My label is: default", default_1.eval())
self.assertAllEqual(b"My label is: default", default_2.eval())
self.assertAllEqual(b"My label is: default", default_3.eval())
self.assertAllEqual(b"My label is: overload_1", overload_1_1.eval())
self.assertAllEqual(b"My label is: overload_1", overload_1_2.eval())
self.assertAllEqual(b"My label is: overload_2", overload_2.eval())
class AsGraphDefTest(test_util.TensorFlowTestCase):
def testGraphDefVersion(self):
"""Test that the graphdef version is plumbed through to kernels."""
for version in range(versions.GRAPH_DEF_VERSION_MIN_PRODUCER,
versions.GRAPH_DEF_VERSION + 2):
with ops.Graph().as_default() as g:
g.graph_def_versions.producer = version
with self.test_session(graph=g):
v = test_ops.graph_def_version().eval()
self.assertEqual(version, v)
def testAddShapes(self):
with ops.Graph().as_default() as g:
t1, t2, t3, t4, t5 = _apply_op(g, "an_op", [], [dtypes.float32] * 5)
t1.set_shape(None)
t2.set_shape([])
t3.set_shape([None])
t4.set_shape([43, 37])
t5.set_shape([43, None])
gd = g.as_graph_def(add_shapes=True)
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
attr {
key: "_output_shapes"
value {
list {
shape { unknown_rank: true }
shape { }
shape { dim { size: -1 } }
shape { dim { size: 43 } dim { size: 37 } }
shape { dim { size: 43 } dim { size: -1 } }
}
}
}
}
""", gd)
@ops.RegisterStatistics("a", "flops")
def _calc_a_forward_flops(unused_graph, unused_node):
return ops.OpStats("flops", 20)
class StatisticsTest(test_util.TensorFlowTestCase):
def testRegisteredNode(self):
graph = ops.Graph()
node = ops._NodeDef("a", "an_a")
flops = ops.get_stats_for_node_def(graph, node, "flops")
self.assertEqual(20, flops.value)
missing_stat = ops.get_stats_for_node_def(graph, node, "missing_stat")
self.assertEqual(None, missing_stat.value)
def testUnregisteredNode(self):
graph = ops.Graph()
node = ops._NodeDef("b", "a_b")
weight_params = ops.get_stats_for_node_def(graph, node, "weight_params")
self.assertEqual(None, weight_params.value)
def testAccumulateStatistics(self):
flops_total = ops.OpStats("flops")
self.assertEqual(None, flops_total.value)
second_flops = ops.OpStats("flops", 3)
flops_total += second_flops
self.assertEqual(3, flops_total.value)
class ColocationGroupTest(test_util.TensorFlowTestCase):
def testBasic(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
c = constant_op.constant(4.0)
self.assertEqual([b"loc:@a"], a.op.colocation_groups())
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
with self.assertRaises(ValueError):
c.op.get_attr("_class")
def testColocationDeviceInteraction(self):
with ops.device("/cpu:0"):
with ops.device("/gpu:0"):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
# 'b' is created in the scope of /cpu:0, but it is
# colocated with 'a', which is on '/gpu:0'. colocate_with
# overrides devices because it is a stronger constraint.
b = constant_op.constant(3.0)
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual(a.op.device, b.op.device)
def testLocationOverrides(self):
with ops.device("/cpu:0"):
with ops.device("/gpu:0"):
a = constant_op.constant([2.0], name="a")
# Note that this colocation is "redundant", since we are
# within the scope of "/gpu:0". However, we would like to
# preserve in the GraphDef that these two ops should be
# colocated in a portable way.
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
c = constant_op.constant(4.0)
d = constant_op.constant(5.0)
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual("/device:GPU:0", a.op.device)
self.assertEqual(a.op.device, b.op.device)
# Test that device function stack is restored.
self.assertEqual("/device:GPU:0", c.op.device)
self.assertEqual("/device:CPU:0", d.op.device)
def testNestedColocateWith(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
with ops.colocate_with(b.op):
c = constant_op.constant(4.0)
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual([b"loc:@a"], c.op.colocation_groups())
def testMultiColocationGroups(self):
a = constant_op.constant([2.0], name="a")
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(a.op):
with ops.colocate_with(b.op):
c = constant_op.constant(4.0)
self.assertEqual(set([b"loc:@a", b"loc:@b"]), set(c.op.colocation_groups()))
def testColocationIgnoreStack(self):
a = constant_op.constant([2.0], name="a")
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(a.op):
with ops.colocate_with(b.op, ignore_existing=True):
c = constant_op.constant(4.0)
self.assertEqual(set([b"loc:@b"]), set(c.op.colocation_groups()))
def testColocateWithReset(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(None, ignore_existing=True):
c = constant_op.constant(4.0, name="c")
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
self.assertEqual([b"loc:@c"], c.op.colocation_groups())
def testColocateWithInitialNoneThenNested(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
with ops.colocate_with(None, ignore_existing=True):
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(b.op):
c = constant_op.constant(4.0, name="c")
self.assertEqual([b"loc:@b"], b.op.colocation_groups())
self.assertEqual([b"loc:@b"], c.op.colocation_groups())
def testColocateVariables(self):
a = variables.Variable([2.0], name="a")
with ops.colocate_with(a.op):
b = variables.Variable([3.0], name="b")
self.assertEqual([b"loc:@a"], b.op.colocation_groups())
def testInconsistentDeviceWithinColocate(self):
with ops.device("/gpu:0"):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
# This is allowed due to legacy but clearly wrong, since we
# should really be colocating with 'a'. We allow devices to
# override colocate_with, but we log warnings to suggest that
# this is probably unintentional or misguided.
with ops.device("/cpu:0"):
b = constant_op.constant([3.0], name="b")
self.assertEqual("/device:CPU:0", b.device)
class DeprecatedTest(test_util.TensorFlowTestCase):
def testSuccess(self):
with ops.Graph().as_default() as g:
g.graph_def_versions.producer = 7
old = test_ops.old()
with self.test_session(graph=g):
old.run()
def _error(self):
return ((r"Op Old is not available in GraphDef version %d\. "
r"It has been removed in version 8\. For reasons\.") %
versions.GRAPH_DEF_VERSION)
def testGraphConstructionFail(self):
with ops.Graph().as_default():
with self.assertRaisesRegexp(NotImplementedError, self._error()):
test_ops.old()
def testGraphExecutionFail(self):
with ops.Graph().as_default() as g:
g.graph_def_versions.producer = 7
old = test_ops.old()
g.graph_def_versions.producer = versions.GRAPH_DEF_VERSION
with self.test_session(graph=g):
with self.assertRaisesRegexp(errors.UnimplementedError, self._error()):
old.run()
class DenseTensorLikeTypeTest(test_util.TensorFlowTestCase):
def testSuccess(self):
op = ops.Operation(
ops._NodeDef("noop", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
self.assertTrue(ops.is_dense_tensor_like(t))
v = variables.Variable([17])
self.assertTrue(ops.is_dense_tensor_like(v))
class BadClassNoName(object):
pass
class BadClassBadName(object):
def name(self):
pass
class BadClassNoDtype(object):
@property
def name(self):
pass
class BadClassBadDtype(object):
@property
def name(self):
pass
def dtype(self):
pass
def testBadClass(self):
with self.assertRaisesRegexp(TypeError, "`name`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassNoName)
with self.assertRaisesRegexp(TypeError, "`name`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassBadName)
with self.assertRaisesRegexp(TypeError, "`dtype`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassNoDtype)
with self.assertRaisesRegexp(TypeError, "`dtype`"):
ops.register_dense_tensor_like_type(
DenseTensorLikeTypeTest.BadClassBadDtype)
class NameScopeTest(test_util.TensorFlowTestCase):
def testStripAndPrependScope(self):
strs = [
"hidden1/hidden1/weights", # Same prefix. Should strip.
"hidden1///hidden1/weights", # Extra "/". Should strip.
"^hidden1/hidden1/weights", # Same prefix. Should strip.
"loc:@hidden1/hidden1/weights", # Same prefix. Should strip.
"hhidden1/hidden1/weights", # Different prefix. Should keep.
"hidden1"
] # Not a prefix. Should keep.
expected_striped = [
"hidden1/weights", "hidden1/weights", "^hidden1/weights",
"loc:@hidden1/weights", "hhidden1/hidden1/weights", "hidden1"
]
expected_prepended = [
"hidden2/hidden1/weights", "hidden2/hidden1/weights",
"^hidden2/hidden1/weights", "loc:@hidden2/hidden1/weights",
"hidden2/hhidden1/hidden1/weights", "hidden2/hidden1"
]
name_scope_to_strip = "hidden1"
name_scope_to_add = "hidden2"
for es, ep, s in zip(expected_striped, expected_prepended, strs):
striped = ops.strip_name_scope(s, name_scope_to_strip)
self.assertEqual(es, striped)
self.assertEqual(ep, ops.prepend_name_scope(striped, name_scope_to_add))
def testGetNameScope(self):
with ops.Graph().as_default() as g:
with ops.name_scope("scope1"):
with ops.name_scope("scope2"):
with ops.name_scope("scope3"):
self.assertEqual("scope1/scope2/scope3", g.get_name_scope())
self.assertEqual("scope1/scope2", g.get_name_scope())
self.assertEqual("scope1", g.get_name_scope())
self.assertEqual("", g.get_name_scope())
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
rg3/youtube-dl | youtube_dl/extractor/traileraddict.py | 90 | 2692 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
class TrailerAddictIE(InfoExtractor):
_WORKING = False
_VALID_URL = r'(?:https?://)?(?:www\.)?traileraddict\.com/(?:trailer|clip)/(?P<movie>.+?)/(?P<trailer_name>.+)'
_TEST = {
'url': 'http://www.traileraddict.com/trailer/prince-avalanche/trailer',
'md5': '41365557f3c8c397d091da510e73ceb4',
'info_dict': {
'id': '76184',
'ext': 'mp4',
'title': 'Prince Avalanche Trailer',
'description': 'Trailer for Prince Avalanche.\n\nTwo highway road workers spend the summer of 1988 away from their city lives. The isolated landscape becomes a place of misadventure as the men find themselves at odds with each other and the women they left behind.',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
name = mobj.group('movie') + '/' + mobj.group('trailer_name')
webpage = self._download_webpage(url, name)
title = self._search_regex(r'<title>(.+?)</title>',
webpage, 'video title').replace(' - Trailer Addict', '')
view_count_str = self._search_regex(
r'<span class="views_n">([0-9,.]+)</span>',
webpage, 'view count', fatal=False)
view_count = (
None if view_count_str is None
else int(view_count_str.replace(',', '')))
video_id = self._search_regex(
r'<param\s+name="movie"\s+value="/emb/([0-9]+)"\s*/>',
webpage, 'video id')
# Presence of (no)watchplus function indicates HD quality is available
if re.search(r'function (no)?watchplus()', webpage):
fvar = 'fvarhd'
else:
fvar = 'fvar'
info_url = 'http://www.traileraddict.com/%s.php?tid=%s' % (fvar, str(video_id))
info_webpage = self._download_webpage(info_url, video_id, 'Downloading the info webpage')
final_url = self._search_regex(r'&fileurl=(.+)',
info_webpage, 'Download url').replace('%3F', '?')
thumbnail_url = self._search_regex(r'&image=(.+?)&',
info_webpage, 'thumbnail url')
description = self._html_search_regex(
r'(?s)<div class="synopsis">.*?<div class="movie_label_info"[^>]*>(.*?)</div>',
webpage, 'description', fatal=False)
return {
'id': video_id,
'url': final_url,
'title': title,
'thumbnail': thumbnail_url,
'description': description,
'view_count': view_count,
}
| unlicense |
sonaht/ansible | lib/ansible/modules/files/template.py | 10 | 5474 | # this is a virtual module that is entirely implemented server side
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: template
version_added: historical
short_description: Templates a file out to a remote server
description:
- Templates are processed by the Jinja2 templating language
(U(http://jinja.pocoo.org/docs/)) - documentation on the template
formatting can be found in the Template Designer Documentation
(U(http://jinja.pocoo.org/docs/templates/)).
- "Six additional variables can be used in templates:
C(ansible_managed) (configurable via the C(defaults) section of C(ansible.cfg)) contains a string which can be used to
describe the template name, host, modification time of the template file and the owner uid.
C(template_host) contains the node name of the template's machine.
C(template_uid) the numeric user id of the owner.
C(template_path) the path of the template.
C(template_fullpath) is the absolute path of the template.
C(template_run_date) is the date that the template was rendered."
options:
src:
description:
- Path of a Jinja2 formatted template on the Ansible controller. This can be a relative or absolute path.
required: true
dest:
description:
- Location to render the template to on the remote machine.
required: true
backup:
description:
- Create a backup file including the timestamp information so you can get
the original file back if you somehow clobbered it incorrectly.
type: bool
default: 'no'
newline_sequence:
description:
- Specify the newline sequence to use for templating files.
choices: [ '\n', '\r', '\r\n' ]
default: '\n'
version_added: '2.4'
block_start_string:
description:
- The string marking the beginning of a block.
default: '{%'
version_added: '2.4'
block_end_string:
description:
- The string marking the end of a block.
default: '%}'
version_added: '2.4'
variable_start_string:
description:
- The string marking the beginning of a print statement.
default: '{{'
version_added: '2.4'
variable_end_string:
description:
- The string marking the end of a print statement.
default: '}}'
version_added: '2.4'
trim_blocks:
description:
- If this is set to True the first newline after a block is removed (block, not variable tag!).
type: bool
default: 'no'
version_added: '2.4'
force:
description:
- the default is C(yes), which will replace the remote file when contents
are different than the source. If C(no), the file will only be transferred
if the destination does not exist.
type: bool
default: 'yes'
notes:
- For Windows you can use M(win_template) which uses '\r\n' as C(newline_sequence).
- Including a string that uses a date in the template will result in the template being marked 'changed' each time
- "Since Ansible version 0.9, templates are loaded with C(trim_blocks=True)."
- "Also, you can override jinja2 settings by adding a special header to template file.
i.e. C(#jinja2:variable_start_string:'[%', variable_end_string:'%]', trim_blocks: False)
which changes the variable interpolation markers to [% var %] instead of {{ var }}.
This is the best way to prevent evaluation of things that look like, but should not be Jinja2.
raw/endraw in Jinja2 will not work as you expect because templates in Ansible are recursively evaluated."
- You can use the C(copy) module with the C(content:) option if you prefer the template inline,
as part of the playbook.
author:
- Ansible Core Team
- Michael DeHaan
extends_documentation_fragment:
- files
- validate
'''
EXAMPLES = r'''
# Example from Ansible Playbooks
- template:
src: /mytemplates/foo.j2
dest: /etc/file.conf
owner: bin
group: wheel
mode: 0644
# The same example, but using symbolic modes equivalent to 0644
- template:
src: /mytemplates/foo.j2
dest: /etc/file.conf
owner: bin
group: wheel
mode: "u=rw,g=r,o=r"
# Create a DOS-style text file from a template
- template:
src: config.ini.j2
dest: /share/windows/config.ini
newline_sequence: '\r\n'
# Copy a new "sudoers" file into place, after passing validation with visudo
- template:
src: /mine/sudoers
dest: /etc/sudoers
validate: 'visudo -cf %s'
# Update sshd configuration safely, avoid locking yourself out
- template:
src: etc/ssh/sshd_config.j2
dest: /etc/ssh/sshd_config
owner: root
group: root
mode: '0600'
validate: /usr/sbin/sshd -t -f %s
backup: yes
'''
| gpl-3.0 |
california-civic-data-coalition/first-python-notebook | docs/conf.py | 1 | 5717 | # -*- coding: utf-8 -*-
#
# First Python Notebook documentation build configuration file, created by
# sphinx-quickstart on Sat Feb 11 12:41:58 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'ext.video',
]
pdf_documents = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'First Python Notebook'
copyright = u'2020, Ben Welsh and the California Civic Data Coalition'
author = u'Ben Welsh and the California Civic Data Coalition'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_style = 'css/custom.css'
else:
html_context = {
'css_files': [
'https://media.readthedocs.org/css/sphinx_rtd_theme.css',
'https://media.readthedocs.org/css/readthedocs-doc-embed.css',
'_static/css/custom.css',
],
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'display_version': False,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'FirstPythonNotebookdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'FirstPythonNotebook.tex', u'First Python Notebook Documentation',
u'Ben Welsh and the California Civic Data Coalition', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'firstpythonnotebook', u'First Python Notebook Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'FirstPythonNotebook', u'First Python Notebook Documentation',
author, 'FirstPythonNotebook', 'One line description of project.',
'Miscellaneous'),
]
| mit |
2ndy/RaspIM | usr/lib/python2.7/ast.py | 255 | 11805 | # -*- coding: utf-8 -*-
"""
ast
~~~
The `ast` module helps Python applications to process trees of the Python
abstract syntax grammar. The abstract syntax itself might change with
each Python release; this module helps to find out programmatically what
the current grammar looks like and allows modifications of it.
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
a flag to the `compile()` builtin function or by using the `parse()`
function from this module. The result will be a tree of objects whose
classes all inherit from `ast.AST`.
A modified abstract syntax tree can be compiled into a Python code object
using the built-in `compile()` function.
Additionally various helper functions are provided that make working with
the trees simpler. The main intention of the helper functions and this
module in general is to provide an easy to use interface for libraries
that work tightly with the python syntax (template engines for example).
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import *
from _ast import __version__
def parse(source, filename='<unknown>', mode='exec'):
"""
Parse the source into an AST node.
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
"""
return compile(source, filename, mode, PyCF_ONLY_AST)
def literal_eval(node_or_string):
"""
Safely evaluate an expression node or a string containing a Python
expression. The string or node provided may only consist of the following
Python literal structures: strings, numbers, tuples, lists, dicts, booleans,
and None.
"""
_safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, basestring):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.body
def _convert(node):
if isinstance(node, Str):
return node.s
elif isinstance(node, Num):
return node.n
elif isinstance(node, Tuple):
return tuple(map(_convert, node.elts))
elif isinstance(node, List):
return list(map(_convert, node.elts))
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v
in zip(node.keys, node.values))
elif isinstance(node, Name):
if node.id in _safe_names:
return _safe_names[node.id]
elif isinstance(node, BinOp) and \
isinstance(node.op, (Add, Sub)) and \
isinstance(node.right, Num) and \
isinstance(node.right.n, complex) and \
isinstance(node.left, Num) and \
isinstance(node.left.n, (int, long, float)):
left = node.left.n
right = node.right.n
if isinstance(node.op, Add):
return left + right
else:
return left - right
raise ValueError('malformed string')
return _convert(node_or_string)
def dump(node, annotate_fields=True, include_attributes=False):
"""
Return a formatted dump of the tree in *node*. This is mainly useful for
debugging purposes. The returned string will show the names and the values
for fields. This makes the code impossible to evaluate, so if evaluation is
wanted *annotate_fields* must be set to False. Attributes such as line
numbers and column offsets are not dumped by default. If this is wanted,
*include_attributes* can be set to True.
"""
def _format(node):
if isinstance(node, AST):
fields = [(a, _format(b)) for a, b in iter_fields(node)]
rv = '%s(%s' % (node.__class__.__name__, ', '.join(
('%s=%s' % field for field in fields)
if annotate_fields else
(b for a, b in fields)
))
if include_attributes and node._attributes:
rv += fields and ', ' or ' '
rv += ', '.join('%s=%s' % (a, _format(getattr(node, a)))
for a in node._attributes)
return rv + ')'
elif isinstance(node, list):
return '[%s]' % ', '.join(_format(x) for x in node)
return repr(node)
if not isinstance(node, AST):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
return _format(node)
def copy_location(new_node, old_node):
"""
Copy source location (`lineno` and `col_offset` attributes) from
*old_node* to *new_node* if possible, and return *new_node*.
"""
for attr in 'lineno', 'col_offset':
if attr in old_node._attributes and attr in new_node._attributes \
and hasattr(old_node, attr):
setattr(new_node, attr, getattr(old_node, attr))
return new_node
def fix_missing_locations(node):
"""
When you compile a node tree with compile(), the compiler expects lineno and
col_offset attributes for every node that supports them. This is rather
tedious to fill in for generated nodes, so this helper adds these attributes
recursively where not already set, by setting them to the values of the
parent node. It works recursively starting at *node*.
"""
def _fix(node, lineno, col_offset):
if 'lineno' in node._attributes:
if not hasattr(node, 'lineno'):
node.lineno = lineno
else:
lineno = node.lineno
if 'col_offset' in node._attributes:
if not hasattr(node, 'col_offset'):
node.col_offset = col_offset
else:
col_offset = node.col_offset
for child in iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, 1, 0)
return node
def increment_lineno(node, n=1):
"""
Increment the line number of each node in the tree starting at *node* by *n*.
This is useful to "move code" to a different location in a file.
"""
for child in walk(node):
if 'lineno' in child._attributes:
child.lineno = getattr(child, 'lineno', 0) + n
return node
def iter_fields(node):
"""
Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
that is present on *node*.
"""
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass
def iter_child_nodes(node):
"""
Yield all direct child nodes of *node*, that is, all fields that are nodes
and all items of fields that are lists of nodes.
"""
for name, field in iter_fields(node):
if isinstance(field, AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, AST):
yield item
def get_docstring(node, clean=True):
"""
Return the docstring for the given node or None if no docstring can
be found. If the node provided does not have docstrings a TypeError
will be raised.
"""
if not isinstance(node, (FunctionDef, ClassDef, Module)):
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
if node.body and isinstance(node.body[0], Expr) and \
isinstance(node.body[0].value, Str):
if clean:
import inspect
return inspect.cleandoc(node.body[0].value.s)
return node.body[0].value.s
def walk(node):
"""
Recursively yield all descendant nodes in the tree starting at *node*
(including *node* itself), in no specified order. This is useful if you
only want to modify nodes in place and don't care about the context.
"""
from collections import deque
todo = deque([node])
while todo:
node = todo.popleft()
todo.extend(iter_child_nodes(node))
yield node
class NodeVisitor(object):
"""
A node visitor base class that walks the abstract syntax tree and calls a
visitor function for every node found. This function may return a value
which is forwarded by the `visit` method.
This class is meant to be subclassed, with the subclass adding visitor
methods.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `visit` method. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
Don't use the `NodeVisitor` if you want to apply changes to nodes during
traversing. For this a special visitor exists (`NodeTransformer`) that
allows modifications.
"""
def visit(self, node):
"""Visit a node."""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, AST):
self.visit(item)
elif isinstance(value, AST):
self.visit(value)
class NodeTransformer(NodeVisitor):
"""
A :class:`NodeVisitor` subclass that walks the abstract syntax tree and
allows modification of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor methods to replace or remove the old node. If the return value of
the visitor method is ``None``, the node will be removed from its location,
otherwise it is replaced with the return value. The return value may be the
original node in which case no replacement takes place.
Here is an example transformer that rewrites all occurrences of name lookups
(``foo``) to ``data['foo']``::
class RewriteName(NodeTransformer):
def visit_Name(self, node):
return copy_location(Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
), node)
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
method for the node first.
For nodes that were part of a collection of statements (that applies to all
statement nodes), the visitor may also return a list of nodes rather than
just a single node.
Usually you use the transformer like this::
node = YourTransformer().visit(node)
"""
def generic_visit(self, node):
for field, old_value in iter_fields(node):
old_value = getattr(node, field, None)
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
| gpl-2.0 |
uthaipon/SkillsWorkshop2017 | Week03/PCA_aplied_to_ComputerHardware_data_set.py | 2 | 4589 | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 1 13:28:49 2017
@author: Aster
"""
#=========================================================================
# Preparing the Dataset
#=========================================================================
import pandas as pd
df = pd.read_csv(
filepath_or_buffer='https://archive.ics.uci.edu/ml/machine-learning-databases/cpu-performance/machine.data',
header=None,
sep=',')
df.columns=['vendor_name','Model_Name', 'MYCT', 'MMIN', 'MMAX', 'CACH','CHMIN','CHMAX','PRP','ERP']
df.dropna(how="all", inplace=True) # drops the empty line at file-end
df.tail()
# split data table into data X and class labels y
X = df.ix[:,3:12].values
y = df.ix[:,0].values
#--------------------------------------
# Standardizing
#--------------------------------------
from sklearn.preprocessing import StandardScaler
X_std = StandardScaler().fit_transform(X)
#=========================================================================
#Eigendecomposition - Computing Eigenvectors and Eigenvalues
#=========================================================================
#--------------------------------------
#Covariance Matrix
#--------------------------------------
import numpy as np
mean_vec = np.mean(X_std, axis=0)
cov_mat = (X_std - mean_vec).T.dot((X_std - mean_vec)) / (X_std.shape[0]-1)
# or
cov_mat = np.cov(X_std.T)
print('Covariance matrix: \n%s' %cov_mat)
eig_vals, eig_vecs = np.linalg.eig(cov_mat)
print('Eigenvectors \n%s' %eig_vecs)
print('\nEigenvalues \n%s' %eig_vals)
#--------------------------------------
#Correlation Matrix
#--------------------------------------
# The eigendecomposition of the covariance matrix (if the input data was standardized) yields the same results as a eigendecomposition on the correlation matrix, since the correlation matrix can be understood as the normalized covariance matrix.
'''
Eigendecomposition of the standardized data based on the correlation matrix:
'''
cor_mat1 = np.corrcoef(X_std.T)
eig_vals, eig_vecs = np.linalg.eig(cor_mat1)
print('Eigenvectors \n%s' %eig_vecs)
print('\nEigenvalues \n%s' %eig_vals)
'''
Eigendecomposition of the raw data based on the correlation matrix:
'''
cor_mat2 = np.corrcoef(X.T)
eig_vals, eig_vecs = np.linalg.eig(cor_mat2)
print('Eigenvectors \n%s' %eig_vecs)
print('\nEigenvalues \n%s' %eig_vals)
#--------------------------------------
#Singular Vector Decomposition
#--------------------------------------
# Most PCA implementations perform a Singular Vector Decomposition (SVD) to improve the computational efficiency.
u,s,v = np.linalg.svd(X_std.T)
u
#=========================================================================
# Selecting Principal Components
#=========================================================================
for ev in eig_vecs:
np.testing.assert_array_almost_equal(1.0, np.linalg.norm(ev))
print('Everything ok!')
# Make a list of (eigenvalue, eigenvector) tuples
eig_pairs = [(np.abs(eig_vals[i]), eig_vecs[:,i]) for i in range(len(eig_vals))]
# Sort the (eigenvalue, eigenvector) tuples from high to low
eig_pairs.sort()
eig_pairs.reverse()
# Visually confirm that the list is correctly sorted by decreasing eigenvalues
print('Eigenvalues in descending order:')
for i in eig_pairs:
print(i[0])
print(eig_pairs)
#matrix_w = np.hstack((eig_pairs[0][1].reshape(7,1),
# eig_pairs[1][1].reshape(7,1),
# eig_pairs[2][1].reshape(7,1),
# eig_pairs[3][1].reshape(7,1)))
matrix_w = np.hstack((eig_pairs[i][1].reshape(7,1) for i in [0,1]))
print('Matrix W:\n', matrix_w)
#=========================================================================
#Projection Onto the New Feature Space
#=========================================================================
Y = X_std.dot(matrix_w)
#=========================================================================
# Clustering
#=========================================================================
from sklearn.cluster import KMeans
import matplotlib.pyplot as plt
from matplotlib import style
kmeans = KMeans(n_clusters=2).fit(Y)
centroid = kmeans.cluster_centers_
labels = kmeans.labels_
colors = ["g.","r.","c.","y.","m.","k."]
plt.figure(figsize=(15,10))
for i in range(len(Y)):
plt.plot(Y[i,0],Y[i,1],"k.",markersize=10)
plt.show()
plt.figure(figsize=(15,10))
for i in range(len(Y)):
plt.plot(Y[i,0],Y[i,1],colors[labels[i]],markersize=10)
plt.scatter(centroid[:,0],centroid[:,1], marker = "x", s=150, linewidths = 5, zorder =10)
plt.show() | bsd-3-clause |
hunch/hunch-gift-app | django/db/models/sql/query.py | 9 | 79872 | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
from django.utils.copycompat import deepcopy
from django.utils.tree import Node
from django.utils.datastructures import SortedDict
from django.utils.encoding import force_unicode
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.models import signals
from django.db.models.fields import FieldDoesNotExist
from django.db.models.query_utils import select_related_descend, InvalidQuery
from django.db.models.sql import aggregates as base_aggregates_module
from django.db.models.sql.constants import *
from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.sql.where import (WhereNode, Constraint, EverythingNode,
ExtraWhere, AND, OR)
from django.core.exceptions import FieldError
__all__ = ['Query', 'RawQuery']
class RawQuery(object):
"""
A single raw SQL query
"""
def __init__(self, sql, using, params=None):
self.validate_sql(sql)
self.params = params or ()
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.aggregate_select = {}
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def convert_values(self, value, field, connection):
"""Convert the database-returned value into a type that is consistent
across database backends.
By default, this defers to the underlying backend operations, but
it can be overridden by Query classes for specific backends.
"""
return connection.ops.convert_values(value, field)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.table_name_converter
return [converter(column_meta[0])
for column_meta in self.cursor.description]
def validate_sql(self, sql):
if not sql.lower().strip().startswith('select'):
raise InvalidQuery('Raw queries are limited to SELECT queries. Use '
'connection.cursor directly for other types of queries.')
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<RawQuery: %r>" % (self.sql % self.params)
def _execute_query(self):
self.cursor = connections[self.using].cursor()
self.cursor.execute(self.sql, self.params)
class Query(object):
"""
A single SQL query.
"""
# SQL join types. These are part of the class because their string forms
# vary from database to database and can be customised by a subclass.
INNER = 'INNER JOIN'
LOUTER = 'LEFT OUTER JOIN'
alias_prefix = 'T'
query_terms = QUERY_TERMS
aggregates_module = base_aggregates_module
compiler = 'SQLCompiler'
def __init__(self, model, where=WhereNode):
self.model = model
self.alias_refcount = {}
self.alias_map = {} # Maps alias to join information
self.table_map = {} # Maps table names to list of aliases.
self.join_map = {}
self.rev_join_map = {} # Reverse of join_map.
self.quote_cache = {}
self.default_cols = True
self.default_ordering = True
self.standard_ordering = True
self.ordering_aliases = []
self.select_fields = []
self.related_select_fields = []
self.dupe_avoidance = {}
self.used_aliases = set()
self.filter_is_sticky = False
self.included_inherited_models = {}
# SQL-related attributes
self.select = []
self.tables = [] # Aliases in the order they are created.
self.where = where()
self.where_class = where
self.group_by = None
self.having = where()
self.order_by = []
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.distinct = False
self.select_related = False
self.related_select_cols = []
# SQL aggregate-related attributes
self.aggregates = SortedDict() # Maps alias -> SQL aggregate function
self.aggregate_select_mask = None
self._aggregate_select_cache = None
# Arbitrary maximum limit for select_related. Prevents infinite
# recursion. Can be changed by the depth parameter to select_related().
self.max_depth = 5
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = SortedDict() # Maps col_alias -> (col_sql, params).
self.extra_select_mask = None
self._extra_select_cache = None
self.extra_tables = ()
self.extra_order_by = ()
# A tuple that is a set of model field names and either True, if these
# are the fields to defer, or False if these are the only fields to
# load.
self.deferred_loading = (set(), True)
def __str__(self):
"""
Returns the query as a string of SQL with the parameter values
substituted in.
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
return sql % params
def __deepcopy__(self, memo):
result = self.clone(memo=memo)
memo[id(self)] = result
return result
def __getstate__(self):
"""
Pickling support.
"""
obj_dict = self.__dict__.copy()
obj_dict['related_select_fields'] = []
obj_dict['related_select_cols'] = []
# Fields can't be pickled, so if a field list has been
# specified, we pickle the list of field names instead.
# None is also a possible value; that can pass as-is
obj_dict['select_fields'] = [
f is not None and f.name or None
for f in obj_dict['select_fields']
]
return obj_dict
def __setstate__(self, obj_dict):
"""
Unpickling support.
"""
# Rebuild list of field instances
obj_dict['select_fields'] = [
name is not None and obj_dict['model']._meta.get_field(name) or None
for name in obj_dict['select_fields']
]
self.__dict__.update(obj_dict)
def prepare(self):
return self
def get_compiler(self, using=None, connection=None):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
# Check that the compiler will be able to execute the query
for alias, aggregate in self.aggregate_select.items():
connection.ops.check_aggregate_support(aggregate)
return connection.ops.compiler(self.compiler)(self, connection, using)
def get_meta(self):
"""
Returns the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
return self.model._meta
def clone(self, klass=None, memo=None, **kwargs):
"""
Creates a copy of the current instance. The 'kwargs' parameter can be
used by clients to update attributes after copying has taken place.
"""
obj = Empty()
obj.__class__ = klass or self.__class__
obj.model = self.model
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.table_map = self.table_map.copy()
obj.join_map = self.join_map.copy()
obj.rev_join_map = self.rev_join_map.copy()
obj.quote_cache = {}
obj.default_cols = self.default_cols
obj.default_ordering = self.default_ordering
obj.standard_ordering = self.standard_ordering
obj.included_inherited_models = self.included_inherited_models.copy()
obj.ordering_aliases = []
obj.select_fields = self.select_fields[:]
obj.related_select_fields = self.related_select_fields[:]
obj.dupe_avoidance = self.dupe_avoidance.copy()
obj.select = self.select[:]
obj.tables = self.tables[:]
obj.where = deepcopy(self.where, memo=memo)
obj.where_class = self.where_class
if self.group_by is None:
obj.group_by = None
else:
obj.group_by = self.group_by[:]
obj.having = deepcopy(self.having, memo=memo)
obj.order_by = self.order_by[:]
obj.low_mark, obj.high_mark = self.low_mark, self.high_mark
obj.distinct = self.distinct
obj.select_related = self.select_related
obj.related_select_cols = []
obj.aggregates = deepcopy(self.aggregates, memo=memo)
if self.aggregate_select_mask is None:
obj.aggregate_select_mask = None
else:
obj.aggregate_select_mask = self.aggregate_select_mask.copy()
# _aggregate_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both aggregates and
# _aggregate_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._aggregate_select_cache = None
obj.max_depth = self.max_depth
obj.extra = self.extra.copy()
if self.extra_select_mask is None:
obj.extra_select_mask = None
else:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is None:
obj._extra_select_cache = None
else:
obj._extra_select_cache = self._extra_select_cache.copy()
obj.extra_tables = self.extra_tables
obj.extra_order_by = self.extra_order_by
obj.deferred_loading = deepcopy(self.deferred_loading, memo=memo)
if self.filter_is_sticky and self.used_aliases:
obj.used_aliases = self.used_aliases.copy()
else:
obj.used_aliases = set()
obj.filter_is_sticky = False
obj.__dict__.update(kwargs)
if hasattr(obj, '_setup_query'):
obj._setup_query()
return obj
def convert_values(self, value, field, connection):
"""Convert the database-returned value into a type that is consistent
across database backends.
By default, this defers to the underlying backend operations, but
it can be overridden by Query classes for specific backends.
"""
return connection.ops.convert_values(value, field)
def resolve_aggregate(self, value, aggregate, connection):
"""Resolve the value of aggregates returned by the database to
consistent (and reasonable) types.
This is required because of the predisposition of certain backends
to return Decimal and long types when they are not needed.
"""
if value is None:
if aggregate.is_ordinal:
return 0
# Return None as-is
return value
elif aggregate.is_ordinal:
# Any ordinal aggregate (e.g., count) returns an int
return int(value)
elif aggregate.is_computed:
# Any computed aggregate (e.g., avg) returns a float
return float(value)
else:
# Return value depends on the type of the field being processed.
return self.convert_values(value, aggregate.field, connection)
def get_aggregation(self, using):
"""
Returns the dictionary with the values of the existing aggregations.
"""
if not self.aggregate_select:
return {}
# If there is a group by clause, aggregating does not add useful
# information but retrieves only the first row. Aggregate
# over the subquery instead.
if self.group_by is not None:
from subqueries import AggregateQuery
query = AggregateQuery(self.model)
obj = self.clone()
# Remove any aggregates marked for reduction from the subquery
# and move them to the outer AggregateQuery.
for alias, aggregate in self.aggregate_select.items():
if aggregate.is_summary:
query.aggregate_select[alias] = aggregate
del obj.aggregate_select[alias]
query.add_subquery(obj, using)
else:
query = self
self.select = []
self.default_cols = False
self.extra = {}
self.remove_inherited_models()
query.clear_ordering(True)
query.clear_limits()
query.select_related = False
query.related_select_cols = []
query.related_select_fields = []
result = query.get_compiler(using).execute_sql(SINGLE)
if result is None:
result = [None for q in query.aggregate_select.items()]
return dict([
(alias, self.resolve_aggregate(val, aggregate, connection=connections[using]))
for (alias, aggregate), val
in zip(query.aggregate_select.items(), result)
])
def get_count(self, using):
"""
Performs a COUNT() query using the current filter constraints.
"""
obj = self.clone()
if len(self.select) > 1 or self.aggregate_select:
# If a select clause exists, then the query has already started to
# specify the columns that are to be returned.
# In this case, we need to use a subquery to evaluate the count.
from subqueries import AggregateQuery
subquery = obj
subquery.clear_ordering(True)
subquery.clear_limits()
obj = AggregateQuery(obj.model)
obj.add_subquery(subquery, using=using)
obj.add_count_column()
number = obj.get_aggregation(using=using)[None]
# Apply offset and limit constraints manually, since using LIMIT/OFFSET
# in SQL (in variants that provide them) doesn't change the COUNT
# output.
number = max(0, number - self.low_mark)
if self.high_mark is not None:
number = min(number, self.high_mark - self.low_mark)
return number
def has_results(self, using):
q = self.clone()
q.select = []
q.select_fields = []
q.default_cols = False
q.select_related = False
q.set_aggregate_mask(())
q.clear_ordering(True)
q.set_limits(high=1)
compiler = q.get_compiler(using=using)
return compiler.has_results()
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
assert self.model == rhs.model, \
"Cannot combine queries on two different base models."
assert self.can_filter(), \
"Cannot combine queries once a slice has been taken."
assert self.distinct == rhs.distinct, \
"Cannot combine a unique query with a non-unique query."
self.remove_inherited_models()
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
used = set()
conjunction = (connector == AND)
first = True
for alias in rhs.tables:
if not rhs.alias_refcount[alias]:
# An unused alias.
continue
promote = (rhs.alias_map[alias][JOIN_TYPE] == self.LOUTER)
new_alias = self.join(rhs.rev_join_map[alias],
(conjunction and not first), used, promote, not conjunction)
used.add(new_alias)
change_map[alias] = new_alias
first = False
# So that we don't exclude valid results in an "or" query combination,
# the first join that is exclusive to the lhs (self) must be converted
# to an outer join.
if not conjunction:
for alias in self.tables[1:]:
if self.alias_refcount[alias] == 1:
self.promote_alias(alias, True)
break
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
if rhs.where:
w = deepcopy(rhs.where)
w.relabel_aliases(change_map)
if not self.where:
# Since 'self' matches everything, add an explicit "include
# everything" where-constraint so that connections between the
# where clauses won't exclude valid results.
self.where.add(EverythingNode(), AND)
elif self.where:
# rhs has an empty where clause.
w = self.where_class()
w.add(EverythingNode(), AND)
else:
w = self.where_class()
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
self.select = []
for col in rhs.select:
if isinstance(col, (list, tuple)):
self.select.append((change_map.get(col[0], col[0]), col[1]))
else:
item = deepcopy(col)
item.relabel_aliases(change_map)
self.select.append(item)
self.select_fields = rhs.select_fields[:]
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError("When merging querysets using 'or', you "
"cannot have extra(select=...) on both sides.")
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by and rhs.order_by[:] or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def deferred_to_data(self, target, callback):
"""
Converts the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialised on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
The "target" parameter is the instance that is populated (in place).
The "callback" is a function that is called whenever a (model, field)
pair need to be added to "target". It accepts three parameters:
"target", and the model and list of fields being added for that model.
"""
field_names, defer = self.deferred_loading
if not field_names:
return
columns = set()
orig_opts = self.model._meta
seen = {}
must_include = {self.model: set([orig_opts.pk])}
for field_name in field_names:
parts = field_name.split(LOOKUP_SEP)
cur_model = self.model
opts = orig_opts
for name in parts[:-1]:
old_model = cur_model
source = opts.get_field_by_name(name)[0]
cur_model = opts.get_field_by_name(name)[0].rel.to
opts = cur_model._meta
# Even if we're "just passing through" this model, we must add
# both the current model's pk and the related reference field
# to the things we select.
must_include[old_model].add(source)
add_to_dict(must_include, cur_model, opts.pk)
field, model, _, _ = opts.get_field_by_name(parts[-1])
if model is None:
model = cur_model
add_to_dict(seen, model, field)
if defer:
# We need to load all fields for each model, except those that
# appear in "seen" (for all models that appear in "seen"). The only
# slight complexity here is handling fields that exist on parent
# models.
workset = {}
for model, values in seen.iteritems():
for field, m in model._meta.get_fields_with_model():
if field in values:
continue
add_to_dict(workset, m or model, field)
for model, values in must_include.iteritems():
# If we haven't included a model in workset, we don't add the
# corresponding must_include fields for that model, since an
# empty set means "include all fields". That's why there's no
# "else" branch here.
if model in workset:
workset[model].update(values)
for model, values in workset.iteritems():
callback(target, model, values)
else:
for model, values in must_include.iteritems():
if model in seen:
seen[model].update(values)
else:
# As we've passed through this model, but not explicitly
# included any fields, we have to make sure it's mentioned
# so that only the "must include" fields are pulled in.
seen[model] = values
# Now ensure that every model in the inheritance chain is mentioned
# in the parent list. Again, it must be mentioned to ensure that
# only "must include" fields are pulled in.
for model in orig_opts.get_parent_list():
if model not in seen:
seen[model] = set()
for model, values in seen.iteritems():
callback(target, model, values)
def deferred_to_columns_cb(self, target, model, fields):
"""
Callback used by deferred_to_columns(). The "target" parameter should
be a set instance.
"""
table = model._meta.db_table
if table not in target:
target[table] = set()
for field in fields:
target[table].add(field.column)
def table_alias(self, table_name, create=False):
"""
Returns a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
current = self.table_map.get(table_name)
if not create and current:
alias = current[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if current:
alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1)
current.append(alias)
else:
# The first occurence of a table uses the table name directly.
alias = table_name
self.table_map[alias] = [alias]
self.alias_refcount[alias] = 1
self.tables.append(alias)
return alias, True
def ref_alias(self, alias):
""" Increases the reference count for this alias. """
self.alias_refcount[alias] += 1
def unref_alias(self, alias):
""" Decreases the reference count for this alias. """
self.alias_refcount[alias] -= 1
def promote_alias(self, alias, unconditional=False):
"""
Promotes the join type of an alias to an outer join if it's possible
for the join to contain NULL values on the left. If 'unconditional' is
False, the join is only promoted if it is nullable, otherwise it is
always promoted.
Returns True if the join was promoted.
"""
if ((unconditional or self.alias_map[alias][NULLABLE]) and
self.alias_map[alias][JOIN_TYPE] != self.LOUTER):
data = list(self.alias_map[alias])
data[JOIN_TYPE] = self.LOUTER
self.alias_map[alias] = tuple(data)
return True
return False
def promote_alias_chain(self, chain, must_promote=False):
"""
Walks along a chain of aliases, promoting the first nullable join and
any joins following that. If 'must_promote' is True, all the aliases in
the chain are promoted.
"""
for alias in chain:
if self.promote_alias(alias, must_promote):
must_promote = True
def promote_unused_aliases(self, initial_refcounts, used_aliases):
"""
Given a "before" copy of the alias_refcounts dictionary (as
'initial_refcounts') and a collection of aliases that may have been
changed or created, works out which aliases have been created since
then and which ones haven't been used and promotes all of those
aliases, plus any children of theirs in the alias tree, to outer joins.
"""
# FIXME: There's some (a lot of!) overlap with the similar OR promotion
# in add_filter(). It's not quite identical, but is very similar. So
# pulling out the common bits is something for later.
considered = {}
for alias in self.tables:
if alias not in used_aliases:
continue
if (alias not in initial_refcounts or
self.alias_refcount[alias] == initial_refcounts[alias]):
parent = self.alias_map[alias][LHS_ALIAS]
must_promote = considered.get(parent, False)
promoted = self.promote_alias(alias, must_promote)
considered[alias] = must_promote or promoted
def change_aliases(self, change_map):
"""
Changes the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
assert set(change_map.keys()).intersection(set(change_map.values())) == set()
# 1. Update references in "select" (normal columns plus aliases),
# "group by", "where" and "having".
self.where.relabel_aliases(change_map)
self.having.relabel_aliases(change_map)
for columns in (self.select, self.aggregates.values(), self.group_by or []):
for pos, col in enumerate(columns):
if isinstance(col, (list, tuple)):
old_alias = col[0]
columns[pos] = (change_map.get(old_alias, old_alias), col[1])
else:
col.relabel_aliases(change_map)
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.iteritems():
alias_data = list(self.alias_map[old_alias])
alias_data[RHS_ALIAS] = new_alias
t = self.rev_join_map[old_alias]
data = list(self.join_map[t])
data[data.index(old_alias)] = new_alias
self.join_map[t] = tuple(data)
self.rev_join_map[new_alias] = t
del self.rev_join_map[old_alias]
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
self.alias_map[new_alias] = tuple(alias_data)
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data[TABLE_NAME]]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
for pos, alias in enumerate(self.tables):
if alias == old_alias:
self.tables[pos] = new_alias
break
for key, alias in self.included_inherited_models.items():
if alias in change_map:
self.included_inherited_models[key] = change_map[alias]
# 3. Update any joins that refer to the old alias.
for alias, data in self.alias_map.iteritems():
lhs = data[LHS_ALIAS]
if lhs in change_map:
data = list(data)
data[LHS_ALIAS] = change_map[lhs]
self.alias_map[alias] = tuple(data)
def bump_prefix(self, exceptions=()):
"""
Changes the alias prefix to the next letter in the alphabet and
relabels all the aliases. Even tables that previously had no alias will
get an alias after this call (it's mostly used for nested queries and
the outer query will already be using the non-aliased table name).
Subclasses who create their own prefix should override this method to
produce a similar result (a new prefix and relabelled aliases).
The 'exceptions' parameter is a container that holds alias names which
should not be changed.
"""
current = ord(self.alias_prefix)
assert current < ord('Z')
prefix = chr(current + 1)
self.alias_prefix = prefix
change_map = {}
for pos, alias in enumerate(self.tables):
if alias in exceptions:
continue
new_alias = '%s%d' % (prefix, pos)
change_map[alias] = new_alias
self.tables[pos] = new_alias
self.change_aliases(change_map)
def get_initial_alias(self):
"""
Returns the first alias for this query, after increasing its reference
count.
"""
if self.tables:
alias = self.tables[0]
self.ref_alias(alias)
else:
alias = self.join((None, self.model._meta.db_table, None, None))
return alias
def count_active_tables(self):
"""
Returns the number of tables in this query with a non-zero reference
count.
"""
return len([1 for count in self.alias_refcount.itervalues() if count])
def join(self, connection, always_create=False, exclusions=(),
promote=False, outer_if_first=False, nullable=False, reuse=None):
"""
Returns an alias for the join in 'connection', either reusing an
existing alias for that join or creating a new one. 'connection' is a
tuple (lhs, table, lhs_col, col) where 'lhs' is either an existing
table alias or a table name. The join correspods to the SQL equivalent
of::
lhs.lhs_col = table.col
If 'always_create' is True and 'reuse' is None, a new alias is always
created, regardless of whether one already exists or not. If
'always_create' is True and 'reuse' is a set, an alias in 'reuse' that
matches the connection will be returned, if possible. If
'always_create' is False, the first existing alias that matches the
'connection' is returned, if any. Otherwise a new join is created.
If 'exclusions' is specified, it is something satisfying the container
protocol ("foo in exclusions" must work) and specifies a list of
aliases that should not be returned, even if they satisfy the join.
If 'promote' is True, the join type for the alias will be LOUTER (if
the alias previously existed, the join type will be promoted from INNER
to LOUTER, if necessary).
If 'outer_if_first' is True and a new join is created, it will have the
LOUTER join type. This is used when joining certain types of querysets
and Q-objects together.
If 'nullable' is True, the join can potentially involve NULL values and
is a candidate for promotion (to "left outer") when combining querysets.
"""
lhs, table, lhs_col, col = connection
if lhs in self.alias_map:
lhs_table = self.alias_map[lhs][TABLE_NAME]
else:
lhs_table = lhs
if reuse and always_create and table in self.table_map:
# Convert the 'reuse' to case to be "exclude everything but the
# reusable set, minus exclusions, for this table".
exclusions = set(self.table_map[table]).difference(reuse).union(set(exclusions))
always_create = False
t_ident = (lhs_table, table, lhs_col, col)
if not always_create:
for alias in self.join_map.get(t_ident, ()):
if alias not in exclusions:
if lhs_table and not self.alias_refcount[self.alias_map[alias][LHS_ALIAS]]:
# The LHS of this join tuple is no longer part of the
# query, so skip this possibility.
continue
if self.alias_map[alias][LHS_ALIAS] != lhs:
continue
self.ref_alias(alias)
if promote:
self.promote_alias(alias)
return alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(table, True)
if not lhs:
# Not all tables need to be joined to anything. No join type
# means the later columns are ignored.
join_type = None
elif promote or outer_if_first:
join_type = self.LOUTER
else:
join_type = self.INNER
join = (table, alias, join_type, lhs, lhs_col, col, nullable)
self.alias_map[alias] = join
if t_ident in self.join_map:
self.join_map[t_ident] += (alias,)
else:
self.join_map[t_ident] = (alias,)
self.rev_join_map[alias] = t_ident
return alias
def setup_inherited_models(self):
"""
If the model that is the basis for this QuerySet inherits other models,
we need to ensure that those other models have their tables included in
the query.
We do this as a separate step so that subclasses know which
tables are going to be active in the query, without needing to compute
all the select columns (this method is called from pre_sql_setup(),
whereas column determination is a later part, and side-effect, of
as_sql()).
"""
opts = self.model._meta
root_alias = self.tables[0]
seen = {None: root_alias}
# Skip all proxy to the root proxied model
proxied_model = get_proxied_model(opts)
for field, model in opts.get_fields_with_model():
if model not in seen:
if model is proxied_model:
seen[model] = root_alias
else:
link_field = opts.get_ancestor_link(model)
seen[model] = self.join((root_alias, model._meta.db_table,
link_field.column, model._meta.pk.column))
self.included_inherited_models = seen
def remove_inherited_models(self):
"""
Undoes the effects of setup_inherited_models(). Should be called
whenever select columns (self.select) are set explicitly.
"""
for key, alias in self.included_inherited_models.items():
if key:
self.unref_alias(alias)
self.included_inherited_models = {}
def add_aggregate(self, aggregate, model, alias, is_summary):
"""
Adds a single aggregate expression to the Query
"""
opts = model._meta
field_list = aggregate.lookup.split(LOOKUP_SEP)
if (len(field_list) == 1 and
aggregate.lookup in self.aggregates.keys()):
# Aggregate is over an annotation
field_name = field_list[0]
col = field_name
source = self.aggregates[field_name]
if not is_summary:
raise FieldError("Cannot compute %s('%s'): '%s' is an aggregate" % (
aggregate.name, field_name, field_name))
elif ((len(field_list) > 1) or
(field_list[0] not in [i.name for i in opts.fields]) or
self.group_by is None or
not is_summary):
# If:
# - the field descriptor has more than one part (foo__bar), or
# - the field descriptor is referencing an m2m/m2o field, or
# - this is a reference to a model field (possibly inherited), or
# - this is an annotation over a model field
# then we need to explore the joins that are required.
field, source, opts, join_list, last, _ = self.setup_joins(
field_list, opts, self.get_initial_alias(), False)
# Process the join chain to see if it can be trimmed
col, _, join_list = self.trim_joins(source, join_list, last, False)
# If the aggregate references a model or field that requires a join,
# those joins must be LEFT OUTER - empty join rows must be returned
# in order for zeros to be returned for those aggregates.
for column_alias in join_list:
self.promote_alias(column_alias, unconditional=True)
col = (join_list[-1], col)
else:
# The simplest cases. No joins required -
# just reference the provided column alias.
field_name = field_list[0]
source = opts.get_field(field_name)
col = field_name
# Add the aggregate to the query
aggregate.add_to_query(self, alias, col=col, source=source, is_summary=is_summary)
def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
can_reuse=None, process_extras=True):
"""
Add a single filter to the query. The 'filter_expr' is a pair:
(filter_string, value). E.g. ('name__contains', 'fred')
If 'negate' is True, this is an exclude() filter. It's important to
note that this method does not negate anything in the where-clause
object when inserting the filter constraints. This is because negated
filters often require multiple calls to add_filter() and the negation
should only happen once. So the caller is responsible for this (the
caller will normally be add_q(), so that as an example).
If 'trim' is True, we automatically trim the final join group (used
internally when constructing nested queries).
If 'can_reuse' is a set, we are processing a component of a
multi-component filter (e.g. filter(Q1, Q2)). In this case, 'can_reuse'
will be a set of table aliases that can be reused in this filter, even
if we would otherwise force the creation of new aliases for a join
(needed for nested Q-filters). The set is updated by this method.
If 'process_extras' is set, any extra filters returned from the table
joining process will be processed. This parameter is set to False
during the processing of extra filters to avoid infinite recursion.
"""
arg, value = filter_expr
parts = arg.split(LOOKUP_SEP)
if not parts:
raise FieldError("Cannot parse keyword query %r" % arg)
# Work out the lookup type and remove it from 'parts', if necessary.
if len(parts) == 1 or parts[-1] not in self.query_terms:
lookup_type = 'exact'
else:
lookup_type = parts.pop()
# By default, this is a WHERE clause. If an aggregate is referenced
# in the value, the filter will be promoted to a HAVING
having_clause = False
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value.
if value is None:
if lookup_type != 'exact':
raise ValueError("Cannot use None as a query value")
lookup_type = 'isnull'
value = True
elif callable(value):
value = value()
elif hasattr(value, 'evaluate'):
# If value is a query expression, evaluate it
value = SQLEvaluator(value, self)
having_clause = value.contains_aggregate
for alias, aggregate in self.aggregates.items():
if alias == parts[0]:
entry = self.where_class()
entry.add((aggregate, lookup_type, value), AND)
if negate:
entry.negate()
self.having.add(entry, AND)
return
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = trim or not negate
try:
field, target, opts, join_list, last, extra_filters = self.setup_joins(
parts, opts, alias, True, allow_many, can_reuse=can_reuse,
negate=negate, process_extras=process_extras)
except MultiJoin, e:
self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]),
can_reuse)
return
if (lookup_type == 'isnull' and value is True and not negate and
len(join_list) > 1):
# If the comparison is against NULL, we may need to use some left
# outer joins when creating the join chain. This is only done when
# needed, as it's less efficient at the database level.
self.promote_alias_chain(join_list)
# Process the join list to see if we can remove any inner joins from
# the far end (fewer tables in a query is better).
col, alias, join_list = self.trim_joins(target, join_list, last, trim)
if connector == OR:
# Some joins may need to be promoted when adding a new filter to a
# disjunction. We walk the list of new joins and where it diverges
# from any previous joins (ref count is 1 in the table list), we
# make the new additions (and any existing ones not used in the new
# join list) an outer join.
join_it = iter(join_list)
table_it = iter(self.tables)
join_it.next(), table_it.next()
table_promote = False
join_promote = False
for join in join_it:
table = table_it.next()
if join == table and self.alias_refcount[join] > 1:
continue
join_promote = self.promote_alias(join)
if table != join:
table_promote = self.promote_alias(table)
break
self.promote_alias_chain(join_it, join_promote)
self.promote_alias_chain(table_it, table_promote)
if having_clause:
self.having.add((Constraint(alias, col, field), lookup_type, value),
connector)
else:
self.where.add((Constraint(alias, col, field), lookup_type, value),
connector)
if negate:
self.promote_alias_chain(join_list)
if lookup_type != 'isnull':
if len(join_list) > 1:
for alias in join_list:
if self.alias_map[alias][JOIN_TYPE] == self.LOUTER:
j_col = self.alias_map[alias][RHS_JOIN_COL]
entry = self.where_class()
entry.add((Constraint(alias, j_col, None), 'isnull', True), AND)
entry.negate()
self.where.add(entry, AND)
break
elif not (lookup_type == 'in'
and not hasattr(value, 'as_sql')
and not hasattr(value, '_as_sql')
and not value) and field.null:
# Leaky abstraction artifact: We have to specifically
# exclude the "foo__in=[]" case from this handling, because
# it's short-circuited in the Where class.
# We also need to handle the case where a subquery is provided
self.where.add((Constraint(alias, col, None), 'isnull', False), AND)
if can_reuse is not None:
can_reuse.update(join_list)
if process_extras:
for filter in extra_filters:
self.add_filter(filter, negate=negate, can_reuse=can_reuse,
process_extras=False)
def add_q(self, q_object, used_aliases=None):
"""
Adds a Q-object to the current filter.
Can also be used to add anything that has an 'add_to_query()' method.
"""
if used_aliases is None:
used_aliases = self.used_aliases
if hasattr(q_object, 'add_to_query'):
# Complex custom objects are responsible for adding themselves.
q_object.add_to_query(self, used_aliases)
else:
if self.where and q_object.connector != AND and len(q_object) > 1:
self.where.start_subtree(AND)
subtree = True
else:
subtree = False
connector = AND
for child in q_object.children:
if connector == OR:
refcounts_before = self.alias_refcount.copy()
self.where.start_subtree(connector)
if isinstance(child, Node):
self.add_q(child, used_aliases)
else:
self.add_filter(child, connector, q_object.negated,
can_reuse=used_aliases)
self.where.end_subtree()
if connector == OR:
# Aliases that were newly added or not used at all need to
# be promoted to outer joins if they are nullable relations.
# (they shouldn't turn the whole conditional into the empty
# set just because they don't match anything).
self.promote_unused_aliases(refcounts_before, used_aliases)
connector = q_object.connector
if q_object.negated:
self.where.negate()
if subtree:
self.where.end_subtree()
if self.filter_is_sticky:
self.used_aliases = used_aliases
def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
allow_explicit_fk=False, can_reuse=None, negate=False,
process_extras=True):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are joining to), 'alias' is the alias for the
table we are joining to. If dupe_multis is True, any many-to-many or
many-to-one joins will always create a new alias (necessary for
disjunctive filters). If can_reuse is not None, it's a list of aliases
that can be reused in these joins (nothing else can be reused in this
case). Finally, 'negate' is used in the same sense as for add_filter()
-- it indicates an exclude() filter, or something similar. It is only
passed in here so that it can be passed to a field's extra_filter() for
customised behaviour.
Returns the final field involved in the join, the target database
column (used for any 'where' constraint), the final 'opts' value and the
list of tables joined.
"""
joins = [alias]
last = [0]
dupe_set = set()
exclusions = set()
extra_filters = []
for pos, name in enumerate(names):
try:
exclusions.add(int_alias)
except NameError:
pass
exclusions.add(alias)
last.append(len(joins))
if name == 'pk':
name = opts.pk.name
try:
field, model, direct, m2m = opts.get_field_by_name(name)
except FieldDoesNotExist:
for f in opts.fields:
if allow_explicit_fk and name == f.attname:
# XXX: A hack to allow foo_id to work in values() for
# backwards compatibility purposes. If we dropped that
# feature, this could be removed.
field, model, direct, m2m = opts.get_field_by_name(f.name)
break
else:
names = opts.get_all_field_names() + self.aggregate_select.keys()
raise FieldError("Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names)))
if not allow_many and (m2m or not direct):
for alias in joins:
self.unref_alias(alias)
raise MultiJoin(pos + 1)
if model:
# The field lives on a base class of the current model.
# Skip the chain of proxy to the concrete proxied model
proxied_model = get_proxied_model(opts)
for int_model in opts.get_base_chain(model):
if int_model is proxied_model:
opts = int_model._meta
else:
lhs_col = opts.parents[int_model].column
dedupe = lhs_col in opts.duplicate_targets
if dedupe:
exclusions.update(self.dupe_avoidance.get(
(id(opts), lhs_col), ()))
dupe_set.add((opts, lhs_col))
opts = int_model._meta
alias = self.join((alias, opts.db_table, lhs_col,
opts.pk.column), exclusions=exclusions)
joins.append(alias)
exclusions.add(alias)
for (dupe_opts, dupe_col) in dupe_set:
self.update_dupe_avoidance(dupe_opts, dupe_col,
alias)
cached_data = opts._join_cache.get(name)
orig_opts = opts
dupe_col = direct and field.column or field.field.column
dedupe = dupe_col in opts.duplicate_targets
if dupe_set or dedupe:
if dedupe:
dupe_set.add((opts, dupe_col))
exclusions.update(self.dupe_avoidance.get((id(opts), dupe_col),
()))
if process_extras and hasattr(field, 'extra_filters'):
extra_filters.extend(field.extra_filters(names, pos, negate))
if direct:
if m2m:
# Many-to-many field defined on the current model.
if cached_data:
(table1, from_col1, to_col1, table2, from_col2,
to_col2, opts, target) = cached_data
else:
table1 = field.m2m_db_table()
from_col1 = opts.pk.column
to_col1 = field.m2m_column_name()
opts = field.rel.to._meta
table2 = opts.db_table
from_col2 = field.m2m_reverse_name()
to_col2 = opts.pk.column
target = opts.pk
orig_opts._join_cache[name] = (table1, from_col1,
to_col1, table2, from_col2, to_col2, opts,
target)
int_alias = self.join((alias, table1, from_col1, to_col1),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
if int_alias == table2 and from_col2 == to_col2:
joins.append(int_alias)
alias = int_alias
else:
alias = self.join(
(int_alias, table2, from_col2, to_col2),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.extend([int_alias, alias])
elif field.rel:
# One-to-one or many-to-one field
if cached_data:
(table, from_col, to_col, opts, target) = cached_data
else:
opts = field.rel.to._meta
target = field.rel.get_related_field()
table = opts.db_table
from_col = field.column
to_col = target.column
orig_opts._join_cache[name] = (table, from_col, to_col,
opts, target)
alias = self.join((alias, table, from_col, to_col),
exclusions=exclusions, nullable=field.null)
joins.append(alias)
else:
# Non-relation fields.
target = field
break
else:
orig_field = field
field = field.field
if m2m:
# Many-to-many field defined on the target model.
if cached_data:
(table1, from_col1, to_col1, table2, from_col2,
to_col2, opts, target) = cached_data
else:
table1 = field.m2m_db_table()
from_col1 = opts.pk.column
to_col1 = field.m2m_reverse_name()
opts = orig_field.opts
table2 = opts.db_table
from_col2 = field.m2m_column_name()
to_col2 = opts.pk.column
target = opts.pk
orig_opts._join_cache[name] = (table1, from_col1,
to_col1, table2, from_col2, to_col2, opts,
target)
int_alias = self.join((alias, table1, from_col1, to_col1),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
alias = self.join((int_alias, table2, from_col2, to_col2),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.extend([int_alias, alias])
else:
# One-to-many field (ForeignKey defined on the target model)
if cached_data:
(table, from_col, to_col, opts, target) = cached_data
else:
local_field = opts.get_field_by_name(
field.rel.field_name)[0]
opts = orig_field.opts
table = opts.db_table
from_col = local_field.column
to_col = field.column
target = opts.pk
orig_opts._join_cache[name] = (table, from_col, to_col,
opts, target)
alias = self.join((alias, table, from_col, to_col),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.append(alias)
for (dupe_opts, dupe_col) in dupe_set:
try:
self.update_dupe_avoidance(dupe_opts, dupe_col, int_alias)
except NameError:
self.update_dupe_avoidance(dupe_opts, dupe_col, alias)
if pos != len(names) - 1:
if pos == len(names) - 2:
raise FieldError("Join on field %r not permitted. Did you misspell %r for the lookup type?" % (name, names[pos + 1]))
else:
raise FieldError("Join on field %r not permitted." % name)
return field, target, opts, joins, last, extra_filters
def trim_joins(self, target, join_list, last, trim):
"""
Sometimes joins at the end of a multi-table sequence can be trimmed. If
the final join is against the same column as we are comparing against,
and is an inner join, we can go back one step in a join chain and
compare against the LHS of the join instead (and then repeat the
optimization). The result, potentially, involves less table joins.
The 'target' parameter is the final field being joined to, 'join_list'
is the full list of join aliases.
The 'last' list contains offsets into 'join_list', corresponding to
each component of the filter. Many-to-many relations, for example, add
two tables to the join list and we want to deal with both tables the
same way, so 'last' has an entry for the first of the two tables and
then the table immediately after the second table, in that case.
The 'trim' parameter forces the final piece of the join list to be
trimmed before anything. See the documentation of add_filter() for
details about this.
Returns the final active column and table alias and the new active
join_list.
"""
final = len(join_list)
penultimate = last.pop()
if penultimate == final:
penultimate = last.pop()
if trim and len(join_list) > 1:
extra = join_list[penultimate:]
join_list = join_list[:penultimate]
final = penultimate
penultimate = last.pop()
col = self.alias_map[extra[0]][LHS_JOIN_COL]
for alias in extra:
self.unref_alias(alias)
else:
col = target.column
alias = join_list[-1]
while final > 1:
join = self.alias_map[alias]
if col != join[RHS_JOIN_COL] or join[JOIN_TYPE] != self.INNER:
break
self.unref_alias(alias)
alias = join[LHS_ALIAS]
col = join[LHS_JOIN_COL]
join_list = join_list[:-1]
final -= 1
if final == penultimate:
penultimate = last.pop()
return col, alias, join_list
def update_dupe_avoidance(self, opts, col, alias):
"""
For a column that is one of multiple pointing to the same table, update
the internal data structures to note that this alias shouldn't be used
for those other columns.
"""
ident = id(opts)
for name in opts.duplicate_targets[col]:
try:
self.dupe_avoidance[ident, name].add(alias)
except KeyError:
self.dupe_avoidance[ident, name] = set([alias])
def split_exclude(self, filter_expr, prefix, can_reuse):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
"""
query = Query(self.model)
query.add_filter(filter_expr, can_reuse=can_reuse)
query.bump_prefix()
query.clear_ordering(True)
query.set_start(prefix)
self.add_filter(('%s__in' % prefix, query), negate=True, trim=True,
can_reuse=can_reuse)
# If there's more than one join in the inner query (before any initial
# bits were trimmed -- which means the last active table is more than
# two places into the alias list), we need to also handle the
# possibility that the earlier joins don't match anything by adding a
# comparison to NULL (e.g. in
# Tag.objects.exclude(parent__parent__name='t1'), a tag with no parent
# would otherwise be overlooked).
active_positions = [pos for (pos, count) in
enumerate(query.alias_refcount.itervalues()) if count]
if active_positions[-1] > 1:
self.add_filter(('%s__isnull' % prefix, False), negate=True,
trim=True, can_reuse=can_reuse)
def set_limits(self, low=None, high=None):
"""
Adjusts the limits on the rows retrieved. We use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, they are converted to the appropriate offset and limit values.
Any limits passed in here are applied relative to the existing
constraints. So low is added to the current low value and both will be
clamped to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
def clear_limits(self):
"""
Clears any existing limits.
"""
self.low_mark, self.high_mark = 0, None
def can_filter(self):
"""
Returns True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.low_mark and self.high_mark is None
def clear_select_fields(self):
"""
Clears the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = []
self.select_fields = []
def add_fields(self, field_names, allow_m2m=True):
"""
Adds the given (model) fields to the select set. The field names are
added in the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
for name in field_names:
field, target, u2, joins, u3, u4 = self.setup_joins(
name.split(LOOKUP_SEP), opts, alias, False, allow_m2m,
True)
final_alias = joins[-1]
col = target.column
if len(joins) > 1:
join = self.alias_map[final_alias]
if col == join[RHS_JOIN_COL]:
self.unref_alias(final_alias)
final_alias = join[LHS_ALIAS]
col = join[LHS_JOIN_COL]
joins = joins[:-1]
self.promote_alias_chain(joins[1:])
self.select.append((final_alias, col))
self.select_fields.append(field)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
names = opts.get_all_field_names() + self.extra.keys() + self.aggregate_select.keys()
names.sort()
raise FieldError("Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names)))
self.remove_inherited_models()
def add_ordering(self, *ordering):
"""
Adds items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or ordinals,
corresponding to column positions in the 'select' list.
If 'ordering' is empty, all ordering is cleared from the query.
"""
errors = []
for item in ordering:
if not ORDER_PATTERN.match(item):
errors.append(item)
if errors:
raise FieldError('Invalid order_by arguments: %s' % errors)
if ordering:
self.order_by.extend(ordering)
else:
self.default_ordering = False
def clear_ordering(self, force_empty=False):
"""
Removes any ordering settings. If 'force_empty' is True, there will be
no ordering in the resulting query (not even the model's default).
"""
self.order_by = []
self.extra_order_by = ()
if force_empty:
self.default_ordering = False
def set_group_by(self):
"""
Expands the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
self.group_by = []
for sel in self.select:
self.group_by.append(sel)
def add_count_column(self):
"""
Converts the query to do count(...) or count(distinct(pk)) in order to
get its size.
"""
if not self.distinct:
if not self.select:
count = self.aggregates_module.Count('*', is_summary=True)
else:
assert len(self.select) == 1, \
"Cannot add count col with multiple cols in 'select': %r" % self.select
count = self.aggregates_module.Count(self.select[0])
else:
opts = self.model._meta
if not self.select:
count = self.aggregates_module.Count((self.join((None, opts.db_table, None, None)), opts.pk.column),
is_summary=True, distinct=True)
else:
# Because of SQL portability issues, multi-column, distinct
# counts need a sub-query -- see get_count() for details.
assert len(self.select) == 1, \
"Cannot add count col with multiple cols in 'select'."
count = self.aggregates_module.Count(self.select[0], distinct=True)
# Distinct handling is done in Count(), so don't do it at this
# level.
self.distinct = False
# Set only aggregate to be the count column.
# Clear out the select cache to reflect the new unmasked aggregates.
self.aggregates = {None: count}
self.set_aggregate_mask(None)
self.group_by = None
def add_select_related(self, fields):
"""
Sets up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
field_dict = {}
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
self.related_select_cols = []
self.related_select_fields = []
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Adds data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = SortedDict()
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
entry = force_unicode(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
entry_params.append(param_iter.next())
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
# This is order preserving, since self.extra_select is a SortedDict.
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""
Remove any fields from the deferred loading set.
"""
self.deferred_loading = (set(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. The new field names are added to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL colum names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
self.deferred_loading = existing.difference(field_names), False
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, those
names are removed from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = set(field_names).difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = set(field_names), False
def get_loaded_field_names(self):
"""
If any fields are marked to be deferred, returns a dictionary mapping
models to a set of names in those fields that will be loaded. If a
model is not in the returned dictionary, none of it's fields are
deferred.
If no fields are marked for deferral, returns an empty dictionary.
"""
collection = {}
self.deferred_to_data(collection, self.get_loaded_field_names_cb)
return collection
def get_loaded_field_names_cb(self, target, model, fields):
"""
Callback used by get_deferred_field_names().
"""
target[model] = set([f.name for f in fields])
def set_aggregate_mask(self, names):
"Set the mask of aggregates that will actually be returned by the SELECT"
if names is None:
self.aggregate_select_mask = None
else:
self.aggregate_select_mask = set(names)
self._aggregate_select_cache = None
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT,
we don't actually remove them from the Query since they might be used
later
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def _aggregate_select(self):
"""The SortedDict of aggregate columns that are not masked, and should
be used in the SELECT clause.
This result is cached for optimization purposes.
"""
if self._aggregate_select_cache is not None:
return self._aggregate_select_cache
elif self.aggregate_select_mask is not None:
self._aggregate_select_cache = SortedDict([
(k,v) for k,v in self.aggregates.items()
if k in self.aggregate_select_mask
])
return self._aggregate_select_cache
else:
return self.aggregates
aggregate_select = property(_aggregate_select)
def _extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
elif self.extra_select_mask is not None:
self._extra_select_cache = SortedDict([
(k,v) for k,v in self.extra.items()
if k in self.extra_select_mask
])
return self._extra_select_cache
else:
return self.extra
extra_select = property(_extra_select)
def set_start(self, start):
"""
Sets the table from which to start joining. The start position is
specified by the related attribute from the base model. This will
automatically set to the select column to be the column linked from the
previous table.
This method is primarily for internal use and the error checking isn't
as friendly as add_filter(). Mostly useful for querying directly
against the join table of many-to-many relation in a subquery.
"""
opts = self.model._meta
alias = self.get_initial_alias()
field, col, opts, joins, last, extra = self.setup_joins(
start.split(LOOKUP_SEP), opts, alias, False)
select_col = self.alias_map[joins[1]][LHS_JOIN_COL]
select_alias = alias
# The call to setup_joins added an extra reference to everything in
# joins. Reverse that.
for alias in joins:
self.unref_alias(alias)
# We might be able to trim some joins from the front of this query,
# providing that we only traverse "always equal" connections (i.e. rhs
# is *always* the same value as lhs).
for alias in joins[1:]:
join_info = self.alias_map[alias]
if (join_info[LHS_JOIN_COL] != select_col
or join_info[JOIN_TYPE] != self.INNER):
break
self.unref_alias(select_alias)
select_alias = join_info[RHS_ALIAS]
select_col = join_info[RHS_JOIN_COL]
self.select = [(select_alias, select_col)]
self.remove_inherited_models()
def get_order_dir(field, default='ASC'):
"""
Returns the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == '-':
return field[1:], dirn[1]
return field, dirn[0]
def setup_join_cache(sender, **kwargs):
"""
The information needed to join between model fields is something that is
invariant over the life of the model, so we cache it in the model's Options
class, rather than recomputing it all the time.
This method initialises the (empty) cache when the model is created.
"""
sender._meta._join_cache = {}
signals.class_prepared.connect(setup_join_cache)
def add_to_dict(data, key, value):
"""
A helper function to add "value" to the set of values for "key", whether or
not "key" already exists.
"""
if key in data:
data[key].add(value)
else:
data[key] = set([value])
def get_proxied_model(opts):
int_opts = opts
proxied_model = None
while int_opts.proxy:
proxied_model = int_opts.proxy_for_model
int_opts = proxied_model._meta
return proxied_model
| mit |
Timus1712/boto | boto/vpc/customergateway.py | 3 | 1955 | # Copyright (c) 2009-2010 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents a Customer Gateway
"""
from boto.ec2.ec2object import TaggedEC2Object
class CustomerGateway(TaggedEC2Object):
def __init__(self, connection=None):
TaggedEC2Object.__init__(self, connection)
self.id = None
self.type = None
self.state = None
self.ip_address = None
self.bgp_asn = None
def __repr__(self):
return 'CustomerGateway:%s' % self.id
def endElement(self, name, value, connection):
if name == 'customerGatewayId':
self.id = value
elif name == 'ipAddress':
self.ip_address = value
elif name == 'type':
self.type = value
elif name == 'state':
self.state = value
elif name == 'bgpAsn':
self.bgp_asn = value
else:
setattr(self, name, value)
| mit |
wizztjh/three.js | utils/exporters/blender/addons/io_three/exporter/material.py | 124 | 3600 | from .. import constants, logger
from . import base_classes, utilities, api
class Material(base_classes.BaseNode):
"""Class that wraps material nodes"""
def __init__(self, node, parent):
logger.debug("Material().__init__(%s)", node)
base_classes.BaseNode.__init__(self, node, parent,
constants.MATERIAL)
self._common_attributes()
if self[constants.TYPE] == constants.THREE_PHONG:
self._phong_attributes()
textures = self.parent.options.get(constants.MAPS)
if textures:
self._update_maps()
def _common_attributes(self):
"""Parse the common material attributes"""
logger.debug('Material()._common_attributes()')
dispatch = {
constants.PHONG: constants.THREE_PHONG,
constants.LAMBERT: constants.THREE_LAMBERT,
constants.BASIC: constants.THREE_BASIC
}
shader_type = api.material.type(self.node)
self[constants.TYPE] = dispatch[shader_type]
diffuse = api.material.diffuse_color(self.node)
self[constants.COLOR] = utilities.rgb2int(diffuse)
if self[constants.TYPE] != constants.THREE_BASIC:
ambient = api.material.ambient_color(self.node)
self[constants.AMBIENT] = utilities.rgb2int(ambient)
emissive = api.material.emissive_color(self.node)
self[constants.EMISSIVE] = utilities.rgb2int(emissive)
vertex_color = api.material.use_vertex_colors(self.node)
self[constants.VERTEX_COLORS] = vertex_color
self[constants.BLENDING] = api.material.blending(self.node)
self[constants.DEPTH_TEST] = api.material.depth_test(self.node)
self[constants.DEPTH_WRITE] = api.material.depth_write(self.node)
def _phong_attributes(self):
"""Parse phong specific attributes"""
logger.debug("Material()._phong_attributes()")
specular = api.material.specular_color(self.node)
self[constants.SPECULAR] = utilities.rgb2int(specular)
self[constants.SHININESS] = api.material.specular_coef(self.node)
def _update_maps(self):
"""Parses maps/textures and updates the textures array
with any new nodes found.
"""
logger.debug("Material()._update_maps()")
mapping = (
(api.material.diffuse_map, constants.MAP),
(api.material.specular_map, constants.SPECULAR_MAP),
(api.material.light_map, constants.LIGHT_MAP)
)
for func, key in mapping:
map_node = func(self.node)
if map_node:
logger.info('Found map node %s for %s', map_node, key)
tex_inst = self.scene.texture(map_node.name)
self[key] = tex_inst[constants.UUID]
if self[constants.TYPE] == constants.THREE_PHONG:
mapping = (
(api.material.bump_map, constants.BUMP_MAP,
constants.BUMP_SCALE, api.material.bump_scale),
(api.material.normal_map, constants.NORMAL_MAP,
constants.NORMAL_SCALE, api.material.normal_scale)
)
for func, map_key, scale_key, scale_func in mapping:
map_node = func(self.node)
if not map_node:
continue
logger.info("Found map node %s for %s", map_node, map_key)
tex_inst = self.scene.texture(map_node.name)
self[map_key] = tex_inst[constants.UUID]
self[scale_key] = scale_func(self.node)
| mit |
amith01994/intellij-community | python/lib/Lib/re.py | 115 | 12726 | #
# Secret Labs' Regular Expression Engine
#
# re-compatible interface for the sre matching engine
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# This version of the SRE library can be redistributed under CNRI's
# Python 1.6 license. For any other use, please contact Secret Labs
# AB (info@pythonware.com).
#
# Portions of this engine have been developed in cooperation with
# CNRI. Hewlett-Packard provided funding for 1.6 integration and
# other compatibility work.
#
r"""Support for regular expressions (RE).
This module provides regular expression matching operations similar to
those found in Perl. It supports both 8-bit and Unicode strings; both
the pattern and the strings being processed can contain null bytes and
characters outside the US ASCII range.
Regular expressions can contain both special and ordinary characters.
Most ordinary characters, like "A", "a", or "0", are the simplest
regular expressions; they simply match themselves. You can
concatenate ordinary characters, so last matches the string 'last'.
The special characters are:
"." Matches any character except a newline.
"^" Matches the start of the string.
"$" Matches the end of the string or just before the newline at
the end of the string.
"*" Matches 0 or more (greedy) repetitions of the preceding RE.
Greedy means that it will match as many repetitions as possible.
"+" Matches 1 or more (greedy) repetitions of the preceding RE.
"?" Matches 0 or 1 (greedy) of the preceding RE.
*?,+?,?? Non-greedy versions of the previous three special characters.
{m,n} Matches from m to n repetitions of the preceding RE.
{m,n}? Non-greedy version of the above.
"\\" Either escapes special characters or signals a special sequence.
[] Indicates a set of characters.
A "^" as the first character indicates a complementing set.
"|" A|B, creates an RE that will match either A or B.
(...) Matches the RE inside the parentheses.
The contents can be retrieved or matched later in the string.
(?iLmsux) Set the I, L, M, S, U, or X flag for the RE (see below).
(?:...) Non-grouping version of regular parentheses.
(?P<name>...) The substring matched by the group is accessible by name.
(?P=name) Matches the text matched earlier by the group named name.
(?#...) A comment; ignored.
(?=...) Matches if ... matches next, but doesn't consume the string.
(?!...) Matches if ... doesn't match next.
(?<=...) Matches if preceded by ... (must be fixed length).
(?<!...) Matches if not preceded by ... (must be fixed length).
(?(id/name)yes|no) Matches yes pattern if the group with id/name matched,
the (optional) no pattern otherwise.
The special sequences consist of "\\" and a character from the list
below. If the ordinary character is not on the list, then the
resulting RE will match the second character.
\number Matches the contents of the group of the same number.
\A Matches only at the start of the string.
\Z Matches only at the end of the string.
\b Matches the empty string, but only at the start or end of a word.
\B Matches the empty string, but not at the start or end of a word.
\d Matches any decimal digit; equivalent to the set [0-9].
\D Matches any non-digit character; equivalent to the set [^0-9].
\s Matches any whitespace character; equivalent to [ \t\n\r\f\v].
\S Matches any non-whitespace character; equiv. to [^ \t\n\r\f\v].
\w Matches any alphanumeric character; equivalent to [a-zA-Z0-9_].
With LOCALE, it will match the set [0-9_] plus characters defined
as letters for the current locale.
\W Matches the complement of \w.
\\ Matches a literal backslash.
This module exports the following functions:
match Match a regular expression pattern to the beginning of a string.
search Search a string for the presence of a pattern.
sub Substitute occurrences of a pattern found in a string.
subn Same as sub, but also return the number of substitutions made.
split Split a string by the occurrences of a pattern.
findall Find all occurrences of a pattern in a string.
finditer Return an iterator yielding a match object for each match.
compile Compile a pattern into a RegexObject.
purge Clear the regular expression cache.
escape Backslash all non-alphanumerics in a string.
Some of the functions in this module takes flags as optional parameters:
I IGNORECASE Perform case-insensitive matching.
L LOCALE Make \w, \W, \b, \B, dependent on the current locale.
M MULTILINE "^" matches the beginning of lines (after a newline)
as well as the string.
"$" matches the end of lines (before a newline) as well
as the end of the string.
S DOTALL "." matches any character at all, including the newline.
X VERBOSE Ignore whitespace and comments for nicer looking RE's.
U UNICODE Make \w, \W, \b, \B, dependent on the Unicode locale.
This module also defines an exception 'error'.
"""
import sys
import sre_compile
import sre_parse
# public symbols
__all__ = [ "match", "search", "sub", "subn", "split", "findall",
"compile", "purge", "template", "escape", "I", "L", "M", "S", "X",
"U", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE",
"UNICODE", "error" ]
__version__ = "2.2.1"
# flags
I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE # ignore case
L = LOCALE = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
U = UNICODE = sre_compile.SRE_FLAG_UNICODE # assume unicode locale
M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
S = DOTALL = sre_compile.SRE_FLAG_DOTALL # make dot match newline
X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
# sre extensions (experimental, don't rely on these)
T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
# sre exception
error = sre_compile.error
# --------------------------------------------------------------------
# public interface
def match(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).match(string)
def search(pattern, string, flags=0):
"""Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).search(string)
def sub(pattern, repl, string, count=0):
"""Return the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in string by the
replacement repl. repl can be either a string or a callable;
if a callable, it's passed the match object and must return
a replacement string to be used."""
return _compile(pattern, 0).sub(repl, string, count)
def subn(pattern, repl, string, count=0):
"""Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a callable, it's passed the match object and must
return a replacement string to be used."""
return _compile(pattern, 0).subn(repl, string, count)
def split(pattern, string, maxsplit=0):
"""Split the source string by the occurrences of the pattern,
returning a list containing the resulting substrings."""
return _compile(pattern, 0).split(string, maxsplit)
def findall(pattern, string, flags=0):
"""Return a list of all non-overlapping matches in the string.
If one or more groups are present in the pattern, return a
list of groups; this will be a list of tuples if the pattern
has more than one group.
Empty matches are included in the result."""
return _compile(pattern, flags).findall(string)
if sys.hexversion >= 0x02020000:
__all__.append("finditer")
def finditer(pattern, string, flags=0):
"""Return an iterator over all non-overlapping matches in the
string. For each match, the iterator returns a match object.
Empty matches are included in the result."""
return _compile(pattern, flags).finditer(string)
def compile(pattern, flags=0):
"Compile a regular expression pattern, returning a pattern object."
return _compile(pattern, flags)
def purge():
"Clear the regular expression cache"
_cache.clear()
_cache_repl.clear()
def template(pattern, flags=0):
"Compile a template pattern, returning a pattern object"
return _compile(pattern, flags|T)
_alphanum = {}
for c in 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890':
_alphanum[c] = 1
del c
def escape(pattern):
"Escape all non-alphanumeric characters in pattern."
s = list(pattern)
alphanum = _alphanum
for i in range(len(pattern)):
c = pattern[i]
if c not in alphanum:
if c == "\000":
s[i] = "\\000"
else:
s[i] = "\\" + c
return pattern[:0].join(s)
# --------------------------------------------------------------------
# internals
_cache = {}
_cache_repl = {}
_pattern_type = type(sre_compile.compile("", 0))
_MAXCACHE = 100
def _compile(*key):
# internal: compile pattern
cachekey = (type(key[0]),) + key
p = _cache.get(cachekey)
if p is not None:
return p
pattern, flags = key
if isinstance(pattern, _pattern_type):
return pattern
if not sre_compile.isstring(pattern):
raise TypeError, "first argument must be string or compiled pattern"
try:
p = sre_compile.compile(pattern, flags)
except error, v:
raise error, v # invalid expression
if len(_cache) >= _MAXCACHE:
_cache.clear()
_cache[cachekey] = p
return p
def _compile_repl(*key):
# internal: compile replacement pattern
p = _cache_repl.get(key)
if p is not None:
return p
repl, pattern = key
try:
p = sre_parse.parse_template(repl, pattern)
except error, v:
raise error, v # invalid expression
if len(_cache_repl) >= _MAXCACHE:
_cache_repl.clear()
_cache_repl[key] = p
return p
def _expand(pattern, match, template):
# internal: match.expand implementation hook
template = sre_parse.parse_template(template, pattern)
return sre_parse.expand_template(template, match)
def _subx(pattern, template):
# internal: pattern.sub/subn implementation helper
template = _compile_repl(template, pattern)
if not template[0] and len(template[1]) == 1:
# literal replacement
return template[1][0]
def filter(match, template=template):
return sre_parse.expand_template(template, match)
return filter
# register myself for pickling
import copy_reg
def _pickle(p):
return _compile, (p.pattern, p.flags)
copy_reg.pickle(_pattern_type, _pickle, _compile)
# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)
class Scanner:
def __init__(self, lexicon, flags=0):
from sre_constants import BRANCH, SUBPATTERN
self.lexicon = lexicon
# combine phrases into a compound pattern
p = []
s = sre_parse.Pattern()
s.flags = flags
for phrase, action in lexicon:
p.append(sre_parse.SubPattern(s, [
(SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
]))
s.groups = len(p)+1
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def scan(self, string):
result = []
append = result.append
match = self.scanner.scanner(string).match
i = 0
while 1:
m = match()
if not m:
break
j = m.end()
if i == j:
break
action = self.lexicon[m.lastindex-1][1]
if callable(action):
self.match = m
action = action(self, m.group())
if action is not None:
append(action)
i = j
return result, string[i:]
| apache-2.0 |
xasos/3DSnap | 3ds-shim/recv.py | 1 | 1744 | from flask import Flask, send_file
import requests
from PIL import Image
app = Flask(__name__)
api = 'http://159.203.98.104:3000/'
@app.route('/recv/<username>')
def recv(username):
r = requests.post(api + 'get', data={'username': username, 'client': '3ds'})
j = r.json()
print(j)
left_img_name = download_file(j['link']['link_left'])
right_img_name = download_file(j['link']['link_right'])
left_img = Image.open(left_img_name)
right_img = Image.open(right_img_name)
if left_img.mode == "RGB":
pixelSize = 3
else:
pixelSize = 4
(width, height) = left_img.size
left_pixels = left_img.tobytes()
left_pixels_post = ''
for i in range(0, len(left_pixels) - 1, pixelSize):
left_pixels_post += chr(ord(left_pixels[i+0]))
left_pixels_post += chr(ord(left_pixels[i+1]))
left_pixels_post += chr(ord(left_pixels[i+2]))
right_pixels = right_img.tobytes()
right_pixels_post = ''
for i in range(0, len(right_pixels) - 1, pixelSize):
right_pixels_post += chr(ord(right_pixels[i+0]))
right_pixels_post += chr(ord(right_pixels[i+1]))
right_pixels_post += chr(ord(right_pixels[i+2]))
with open(username + '.bin', 'wb') as f:
f.write(left_pixels_post)
f.write(right_pixels_post)
return send_file(username + '.bin')
def download_file(url):
local_filename = url.split('/')[-1]
r = requests.get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
return local_filename
if __name__ == '__main__':
app.run(host='0.0.0.0', port=6001, debug=True)
| mit |
s0lst1c3/eaphammer | local/hostapd-eaphammer/tests/hwsim/test_owe.py | 1 | 24088 | # Test cases for Opportunistic Wireless Encryption (OWE)
# Copyright (c) 2017, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import binascii
import logging
logger = logging.getLogger()
import time
import os
import struct
import hostapd
from wpasupplicant import WpaSupplicant
import hwsim_utils
from tshark import run_tshark
from utils import HwsimSkip, fail_test, alloc_fail, wait_fail_trigger
def test_owe(dev, apdev):
"""Opportunistic Wireless Encryption"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
bss = dev[0].get_bss(bssid)
if "[WPA2-OWE-CCMP]" not in bss['flags']:
raise Exception("OWE AKM not recognized: " + bss['flags'])
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_groups(dev, apdev):
"""Opportunistic Wireless Encryption - DH groups"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
for group in [19, 20, 21]:
dev[0].connect("owe", key_mgmt="OWE", owe_group=str(group))
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
def test_owe_pmksa_caching(dev, apdev):
"""Opportunistic Wireless Encryption and PMKSA caching"""
run_owe_pmksa_caching(dev, apdev)
def test_owe_pmksa_caching_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption and PMKSA caching using cfg80211 connect command"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
run_owe_pmksa_caching([wpas], apdev)
def run_owe_pmksa_caching(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
id = dev[0].connect("owe", key_mgmt="OWE")
hwsim_utils.test_connectivity(dev[0], hapd)
pmksa = dev[0].get_pmksa(bssid)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
dev[0].select_network(id, 2412)
dev[0].wait_connected()
hwsim_utils.test_connectivity(dev[0], hapd)
pmksa2 = dev[0].get_pmksa(bssid)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
if "OK" not in hapd.request("PMKSA_FLUSH"):
raise Exception("PMKSA_FLUSH failed")
dev[0].select_network(id, 2412)
dev[0].wait_connected()
hwsim_utils.test_connectivity(dev[0], hapd)
pmksa3 = dev[0].get_pmksa(bssid)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
if pmksa is None or pmksa2 is None or pmksa3 is None:
raise Exception("PMKSA entry missing")
if pmksa['pmkid'] != pmksa2['pmkid']:
raise Exception("Unexpected PMKID change when using PMKSA caching")
if pmksa['pmkid'] == pmksa3['pmkid']:
raise Exception("PMKID did not change after PMKSA cache flush")
def test_owe_and_psk(dev, apdev):
"""Opportunistic Wireless Encryption and WPA2-PSK enabled"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe+psk",
"wpa": "2",
"wpa_key_mgmt": "OWE WPA-PSK",
"rsn_pairwise": "CCMP",
"wpa_passphrase": "12345678"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe+psk", psk="12345678")
hwsim_utils.test_connectivity(dev[0], hapd)
dev[1].scan_for_bss(bssid, freq="2412")
dev[1].connect("owe+psk", key_mgmt="OWE")
hwsim_utils.test_connectivity(dev[1], hapd)
def test_owe_transition_mode(dev, apdev):
"""Opportunistic Wireless Encryption transition mode"""
run_owe_transition_mode(dev, apdev)
def test_owe_transition_mode_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption transition mode using cfg80211 connect command"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
run_owe_transition_mode([wpas], apdev)
def run_owe_transition_mode(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-random",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"owe_transition_bssid": apdev[1]['bssid'],
"owe_transition_ssid": '"owe-test"',
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
params = {"ssid": "owe-test",
"owe_transition_bssid": apdev[0]['bssid'],
"owe_transition_ssid": '"owe-random"'}
hapd2 = hostapd.add_ap(apdev[1], params)
bssid2 = hapd2.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].scan_for_bss(bssid2, freq="2412")
bss = dev[0].get_bss(bssid)
if "[WPA2-OWE-CCMP]" not in bss['flags']:
raise Exception("OWE AKM not recognized: " + bss['flags'])
if "[OWE-TRANS]" not in bss['flags']:
raise Exception("OWE transition not recognized: " + bss['flags'])
bss = dev[0].get_bss(bssid2)
if "[OWE-TRANS-OPEN]" not in bss['flags']:
raise Exception("OWE transition (open) not recognized: " + bss['flags'])
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
logger.info("Move to OWE only mode (disable transition mode)")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].dump_monitor()
hapd2.disable()
hapd.disable()
dev[0].flush_scan_cache()
hapd.set("owe_transition_bssid", "00:00:00:00:00:00")
hapd.set("ignore_broadcast_ssid", '0')
hapd.set("ssid", 'owe-test')
hapd.enable()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].select_network(id, 2412)
dev[0].wait_connected()
hwsim_utils.test_connectivity(dev[0], hapd)
def test_owe_transition_mode_open_only_ap(dev, apdev):
"""Opportunistic Wireless Encryption transition mode connect to open-only AP"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-test-open"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
bss = dev[0].get_bss(bssid)
id = dev[0].connect("owe-test-open", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
val = dev[0].get_status_field("key_mgmt")
if val != "NONE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_transition_mode_open_multiple_scans(dev, apdev):
"""Opportunistic Wireless Encryption transition mode and need for multiple scans"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
dev[0].flush_scan_cache()
params = {"ssid": "owe-test",
"owe_transition_bssid": apdev[0]['bssid'],
"owe_transition_ssid": '"owe-random"'}
hapd2 = hostapd.add_ap(apdev[1], params)
bssid2 = hapd2.own_addr()
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].dump_monitor()
id = dev[0].connect("owe-test", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=1)
params = {"ssid": "owe-random",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"ieee80211w": "2",
"owe_transition_bssid": apdev[1]['bssid'],
"owe_transition_ssid": '"owe-test"',
"ignore_broadcast_ssid": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].wait_connected()
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
def test_owe_transition_mode_multi_bss(dev, apdev):
"""Opportunistic Wireless Encryption transition mode (multi BSS)"""
try:
run_owe_transition_mode_multi_bss(dev, apdev)
finally:
dev[0].request("SCAN_INTERVAL 5")
def run_owe_transition_mode_multi_bss(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
ifname1 = apdev[0]['ifname']
ifname2 = apdev[0]['ifname'] + '-2'
hapd1 = hostapd.add_bss(apdev[0], ifname1, 'owe-bss-1.conf')
hapd2 = hostapd.add_bss(apdev[0], ifname2, 'owe-bss-2.conf')
hapd2.bssidx = 1
bssid = hapd1.own_addr()
bssid2 = hapd2.own_addr()
# Beaconing with the OWE Transition Mode element can start only once both
# BSSs are enabled, so the very first Beacon frame may go out without this
# element. Wait a bit to avoid getting incomplete scan results.
time.sleep(0.1)
dev[0].request("SCAN_INTERVAL 1")
dev[0].scan_for_bss(bssid2, freq="2412")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("transition-mode-open", key_mgmt="OWE")
val = dev[0].get_status_field("bssid")
if val != bssid2:
raise Exception("Unexpected bssid: " + val)
val = dev[0].get_status_field("key_mgmt")
if val != "OWE":
raise Exception("Unexpected key_mgmt: " + val)
hwsim_utils.test_connectivity(dev[0], hapd2)
def test_owe_unsupported_group(dev, apdev):
"""Opportunistic Wireless Encryption and unsupported group"""
try:
run_owe_unsupported_group(dev, apdev)
finally:
dev[0].request("VENDOR_ELEM_REMOVE 13 *")
def test_owe_unsupported_group_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption and unsupported group using cfg80211 connect command"""
try:
wpas = None
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
run_owe_unsupported_group([wpas], apdev)
finally:
if wpas:
wpas.request("VENDOR_ELEM_REMOVE 13 *")
def run_owe_unsupported_group(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
# Override OWE Dh Parameters element with a payload that uses invalid group
# 0 (and actual group 19 data) to make the AP reject this with the specific
# status code 77.
dev[0].request("VENDOR_ELEM_ADD 13 ff23200000783590fb7440e03d5b3b33911f86affdcc6b4411b707846ac4ff08ddc8831ccd")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
def test_owe_limited_group_set(dev, apdev):
"""Opportunistic Wireless Encryption and limited group set"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "20 21"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
dev[0].dump_monitor()
for group in [20, 21]:
dev[0].connect("owe", key_mgmt="OWE", owe_group=str(group))
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
def test_owe_limited_group_set_pmf(dev, apdev, params):
"""Opportunistic Wireless Encryption and limited group set (PMF)"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
pcapng = os.path.join(params['logdir'], "hwsim0.pcapng")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "21"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
dev[0].dump_monitor()
dev[0].connect("owe", key_mgmt="OWE", owe_group="20", ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected (2)")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason (2): " + ev)
dev[0].dump_monitor()
dev[0].connect("owe", key_mgmt="OWE", owe_group="21", ieee80211w="2",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
out = run_tshark(pcapng,
"wlan.fc.type_subtype == 1",
display=['wlan_mgt.fixed.status_code'])
status = out.splitlines()
logger.info("Association Response frame status codes: " + str(status))
if len(status) != 3:
raise Exception("Unexpected number of Association Response frames")
if int(status[0]) != 77 or int(status[1]) != 77 or int(status[2]) != 0:
raise Exception("Unexpected Association Response frame status code")
def test_owe_group_negotiation(dev, apdev):
"""Opportunistic Wireless Encryption and group negotiation"""
run_owe_group_negotiation(dev[0], apdev)
def test_owe_group_negotiation_connect_cmd(dev, apdev):
"""Opportunistic Wireless Encryption and group negotiation (connect command)"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
run_owe_group_negotiation(wpas, apdev)
def run_owe_group_negotiation(dev, apdev):
if "OWE" not in dev.get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "21"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev.scan_for_bss(bssid, freq="2412")
dev.connect("owe", key_mgmt="OWE")
def test_owe_assoc_reject(dev, apdev):
"""Opportunistic Wireless Encryption association rejection handling"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"require_ht": "1",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP",
"owe_groups": "19"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
# First, reject two associations with HT-required (i.e., not OWE related)
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
disable_ht="1", scan_freq="2412", wait_connect=False)
for i in range(0, 2):
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
if ev is None:
raise Exception("Association rejection not reported")
# Then, verify that STA tries OWE with the default group (19) on the next
# attempt instead of having moved to testing another group.
hapd.set("require_ht", "0")
for i in range(0, 2):
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT",
"CTRL-EVENT-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association result not reported")
if "CTRL-EVENT-CONNECTED" in ev:
break
if "status_code=77" in ev:
raise Exception("Unexpected unsupport group rejection")
if "CTRL-EVENT-CONNECTED" not in ev:
raise Exception("Did not connect successfully")
def test_owe_local_errors(dev, apdev):
"""Opportunistic Wireless Encryption - local errors on supplicant"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
tests = [(1, "crypto_ecdh_init;owe_build_assoc_req"),
(1, "crypto_ecdh_get_pubkey;owe_build_assoc_req"),
(1, "wpabuf_alloc;owe_build_assoc_req")]
for count, func in tests:
with alloc_fail(dev[0], count, func):
dev[0].connect("owe", key_mgmt="OWE", owe_group="20",
ieee80211w="2",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
tests = [(1, "crypto_ecdh_set_peerkey;owe_process_assoc_resp"),
(1, "crypto_ecdh_get_pubkey;owe_process_assoc_resp"),
(1, "wpabuf_alloc;=owe_process_assoc_resp")]
for count, func in tests:
with alloc_fail(dev[0], count, func):
dev[0].connect("owe", key_mgmt="OWE", owe_group="20",
ieee80211w="2",
scan_freq="2412", wait_connect=False)
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
tests = [(1, "hmac_sha256;owe_process_assoc_resp", 19),
(1, "hmac_sha256_kdf;owe_process_assoc_resp", 19),
(1, "hmac_sha384;owe_process_assoc_resp", 20),
(1, "hmac_sha384_kdf;owe_process_assoc_resp", 20),
(1, "hmac_sha512;owe_process_assoc_resp", 21),
(1, "hmac_sha512_kdf;owe_process_assoc_resp", 21)]
for count, func, group in tests:
with fail_test(dev[0], count, func):
dev[0].connect("owe", key_mgmt="OWE", owe_group=str(group),
ieee80211w="2",
scan_freq="2412", wait_connect=False)
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
dev[0].connect("owe", key_mgmt="OWE", owe_group="18",
ieee80211w="2",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["SME: Trying to authenticate"], timeout=5)
if ev is None:
raise Exception("No authentication attempt")
time.sleep(0.5)
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
def hapd_auth(hapd):
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame not received")
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = struct.pack('<HHH', 0, 2, 0)
hapd.mgmt_tx(resp)
def hapd_assoc(hapd, extra):
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out")
if req['subtype'] == 0:
break
req = None
if not req:
raise Exception("Association Request frame not received")
resp = {}
resp['fc'] = 0x0010
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
payload = struct.pack('<HHH', 0x0411, 0, 0xc001)
payload += binascii.unhexlify("010882848b960c121824")
resp['payload'] = payload + extra
hapd.mgmt_tx(resp)
def test_owe_invalid_assoc_resp(dev, apdev):
"""Opportunistic Wireless Encryption - invalid Association Response frame"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = {"ssid": "owe",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
hapd.set("ext_mgmt_frame_handling", "1")
# OWE: No Diffie-Hellman Parameter element found in Association Response frame
tests = [b'']
# No room for group --> no DH Params
tests += [binascii.unhexlify('ff0120')]
# OWE: Unexpected Diffie-Hellman group in response: 18
tests += [binascii.unhexlify('ff03201200')]
# OWE: Invalid peer DH public key
tests += [binascii.unhexlify('ff23201300' + 31*'00' + '01')]
# OWE: Invalid peer DH public key
tests += [binascii.unhexlify('ff24201300' + 33*'ee')]
for extra in tests:
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", ieee80211w="2",
scan_freq="2412", wait_connect=False)
hapd_auth(hapd)
hapd_assoc(hapd, extra)
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
# OWE: Empty public key (this ends up getting padded to a valid point)
dev[0].connect("owe", key_mgmt="OWE", owe_group="19", ieee80211w="2",
scan_freq="2412", wait_connect=False)
hapd_auth(hapd)
hapd_assoc(hapd, binascii.unhexlify('ff03201300'))
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED", "PMKSA-CACHE-ADDED"],
timeout=5)
if ev is None:
raise Exception("No result reported for empty public key")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
| gpl-3.0 |
jeanparpaillon/pOCCI | pOCCI/render_uri.py | 3 | 2743 | import occi
from render_base import Renderer, check_url
class URIListRenderer(Renderer):
"""URI list OCCI Renderer
Empty array is always returned as headers during rendering.
"""
def render_categories(self, categories):
"""Render OCCI Category collection
This method can't be used in URI list rendering.
:param occi.Category category[]: OCCI Category array
:return: render result
:rtype: [string, string[]]
"""
raise occi.RenderError('This method can\'t be used with URI list rendering.')
def render_resource(self, categories, links=None, attributes=None):
"""Render OCCI Resource instance
This method can't be used in URI list rendering.
:param occi.Category categories[]: OCCI Category array
:param occi.Link links[]: OCCI Link array
:param occi.Attribute attributes[]: OCCI Attribute array
:return: render result
:rtype: [string, string[]]
"""
raise occi.RenderError('This method can\'t be used with URI list rendering.')
def render_locations(self, locations):
""" Render Locations
:param string location[]: location URI
:return: render result
:rtype: [string, string[]]
"""
return ['\n'.join(locations), []]
def parse_categories(self, body, headers):
"""Parse OCCI Category Collection
This method can't be used in URI list rendering.
:param string body[]: text to parse
:param string headers[]: headers to parse
:return: Array of OCCI Categories
:rtype: occi.Category[]
"""
raise occi.ParseError('This method can\'t be used with URI list rendering.')
def parse_locations(self, body, headers):
"""Parse OCCI Entity collection
:param string body[]: text to parse
:param string headers[]: headers to parse
:return: array of renderer-specific strings
:rtype: string[]
"""
locations = []
for uri in body:
if not check_url(uri, scheme=True, host=True):
raise occi.ParseError('Invalid URI in OCCI Entity collection', uri)
locations.append(uri)
return locations
def parse_resource(self, body, header):
"""Parse OCCI Resource instance
This method can't be used in URI list rendering.
:param string body[]: text to parse
:param string headers[]: headers to parse
:return: categories, links, and attributes
:rtype: [occi.Category categories[], occi.Link links[], occi.Attribute attributes[]]
"""
raise occi.ParseError('This method can\'t be used with URI list rendering.')
| mit |
thejordan95/Groovebot2 | plugins/snopes.py | 8 | 1065 | import re
from util import hook, http
search_url = "http://search.atomz.com/search/?sp_a=00062d45-sp00000000"
@hook.command
def snopes(inp):
"""snopes <topic> -- Searches snopes for an urban legend about <topic>."""
search_page = http.get_html(search_url, sp_q=inp, sp_c="1")
result_urls = search_page.xpath("//a[@target='_self']/@href")
if not result_urls:
return "no matching pages found"
snopes_page = http.get_html(result_urls[0])
snopes_text = snopes_page.text_content()
claim = re.search(r"Claim: .*", snopes_text).group(0).strip()
status = re.search(r"Status: .*", snopes_text)
if status is not None:
status = status.group(0).strip()
else: # new-style statuses
status = "Status: %s." % re.search(r"FALSE|TRUE|MIXTURE|UNDETERMINED",
snopes_text).group(0).title()
claim = re.sub(r"[\s\xa0]+", " ", claim) # compress whitespace
status = re.sub(r"[\s\xa0]+", " ", status)
return "{} {} {}".format(claim, status, result_urls[0])
| gpl-3.0 |
texastribune/the-dp | tx_highered/instachart/models.py | 1 | 3596 | from django.db import models
NULL_DISPLAY = "–"
class ChartCell(object):
head_attrs = {}
body_attrs = {}
label = ""
format = "%s"
text = ""
raw_text = ""
def __init__(self, cls, fieldname, format=None):
self.label = fieldname
try:
self.field = cls._meta.get_field(fieldname)
self.text = self.field.verbose_name
except models.FieldDoesNotExist:
try:
self.text = getattr(cls, fieldname).verbose_name
except AttributeError:
self.text = fieldname
self.raw_text = self.text
if format is not None:
self.format = format
if hasattr(cls, 'chart_head_attrs'):
self.head_attrs = dict(cls.chart_head_attrs)
if hasattr(cls, 'chart_body_attrs'):
self.body_attrs = dict(cls.chart_body_attrs)
def apply_format(self, template):
try:
return template % self.raw_text
except TypeError:
return template
def build_attrs(self, attrs, label):
if label not in attrs:
return ""
attr = attrs[label]
if isinstance(attr, basestring):
return self.apply_format(attr)
return u" ".join(map(self.apply_format, attr))
def as_th(self):
# TODO get mark_safe to work
# from django.utils.safestring import mark_safe
if self.head_attrs and self.label in self.head_attrs:
return u"<th %s>%s</th>" % (self.build_attrs(self.head_attrs, self.label), self.text)
return u"<th>%s</th>" % self.text
def as_td(self):
if self.body_attrs and self.label in self.body_attrs:
return u"<td %s>%s</td>" % (self.build_attrs(self.body_attrs, self.label), self.text)
return u"<td>%s</td>" % self.text
def as_text(self):
return self.text
def __repr__(self):
return self.as_th()
class ChartBodyCell(ChartCell):
def __init__(self, obj, fieldname, format=None):
super(ChartBodyCell, self).__init__(obj, fieldname, format)
self.raw_text = getattr(obj, fieldname)
if hasattr(self.raw_text, '__call__'):
self.raw_text = self.raw_text()
if self.raw_text is None:
self.text = NULL_DISPLAY
else:
self.text = self.format % self.raw_text
def as_td_data(self):
return u"<td data-value=\"%s\">%s</td>" % (self.raw_text, self.text)
class SimpleChart(models.Model):
""" Model mixin that enables quick dumps via a template tag """
chart_series = []
chart_excluded_fields = ('id',)
class Meta:
abstract = True
@classmethod
def get_chart_field_names(cls):
return [x.name for x in cls._meta.fields if x.name not in cls.chart_excluded_fields]
@classmethod
def get_chart_series(cls):
if cls.chart_series:
# normalize, wrap in tuple if a series was defined simple
return [(x,) if isinstance(x, basestring) else x for x in cls.chart_series]
return [(x, "%s") for x in cls.get_chart_field_names()]
@classmethod
def get_chart_header(cls):
return [ChartCell(cls, *x) for x in cls.get_chart_series()]
@staticmethod
def chart_set(obj):
# TODO pep-0378, needs python 2.7
try:
cells = [ChartBodyCell(obj, *x) for x in obj.get_chart_series()]
except AttributeError:
fields = [x.name for x in obj._meta.fields]
cells = [ChartBodyCell(obj, field) for field in fields]
return cells
| apache-2.0 |
morpheby/levelup-by | common/djangoapps/student/management/commands/pearson_transfer.py | 5 | 7607 | from optparse import make_option
import os
from stat import S_ISDIR
import boto
from dogapi import dog_http_api, dog_stats_api
import paramiko
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandError
import django_startup
django_startup.autostartup()
class Command(BaseCommand):
help = """
This command handles the importing and exporting of student records for
Pearson. It uses some other Django commands to export and import the
files and then uploads over SFTP to Pearson and stuffs the entry in an
S3 bucket for archive purposes.
Usage: ./manage.py pearson-transfer --mode [import|export|both]
"""
option_list = BaseCommand.option_list + (
make_option('--mode',
action='store',
dest='mode',
default='both',
choices=('import', 'export', 'both'),
help='mode is import, export, or both'),
)
def handle(self, **options):
if not hasattr(settings, 'PEARSON'):
raise CommandError('No PEARSON entries in auth/env.json.')
# check settings needed for either import or export:
for value in ['SFTP_HOSTNAME', 'SFTP_USERNAME', 'SFTP_PASSWORD', 'S3_BUCKET']:
if value not in settings.PEARSON:
raise CommandError('No entry in the PEARSON settings'
'(env/auth.json) for {0}'.format(value))
for value in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']:
if not hasattr(settings, value):
raise CommandError('No entry in the AWS settings'
'(env/auth.json) for {0}'.format(value))
# check additional required settings for import and export:
if options['mode'] in ('export', 'both'):
for value in ['LOCAL_EXPORT', 'SFTP_EXPORT']:
if value not in settings.PEARSON:
raise CommandError('No entry in the PEARSON settings'
'(env/auth.json) for {0}'.format(value))
# make sure that the import directory exists or can be created:
source_dir = settings.PEARSON['LOCAL_EXPORT']
if not os.path.isdir(source_dir):
os.makedirs(source_dir)
if options['mode'] in ('import', 'both'):
for value in ['LOCAL_IMPORT', 'SFTP_IMPORT']:
if value not in settings.PEARSON:
raise CommandError('No entry in the PEARSON settings'
'(env/auth.json) for {0}'.format(value))
# make sure that the import directory exists or can be created:
dest_dir = settings.PEARSON['LOCAL_IMPORT']
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
def sftp(files_from, files_to, mode, deleteAfterCopy=False):
with dog_stats_api.timer('pearson.{0}'.format(mode), tags='sftp'):
try:
t = paramiko.Transport((settings.PEARSON['SFTP_HOSTNAME'], 22))
t.connect(username=settings.PEARSON['SFTP_USERNAME'],
password=settings.PEARSON['SFTP_PASSWORD'])
sftp = paramiko.SFTPClient.from_transport(t)
if mode == 'export':
try:
sftp.chdir(files_to)
except IOError:
raise CommandError('SFTP destination path does not exist: {}'.format(files_to))
for filename in os.listdir(files_from):
sftp.put(files_from + '/' + filename, filename)
if deleteAfterCopy:
os.remove(os.path.join(files_from, filename))
else:
try:
sftp.chdir(files_from)
except IOError:
raise CommandError('SFTP source path does not exist: {}'.format(files_from))
for filename in sftp.listdir('.'):
# skip subdirectories
if not S_ISDIR(sftp.stat(filename).st_mode):
sftp.get(filename, files_to + '/' + filename)
# delete files from sftp server once they are successfully pulled off:
if deleteAfterCopy:
sftp.remove(filename)
except:
dog_http_api.event('pearson {0}'.format(mode),
'sftp uploading failed',
alert_type='error')
raise
finally:
sftp.close()
t.close()
def s3(files_from, bucket, mode, deleteAfterCopy=False):
with dog_stats_api.timer('pearson.{0}'.format(mode), tags='s3'):
try:
for filename in os.listdir(files_from):
source_file = os.path.join(files_from, filename)
# use mode as name of directory into which to write files
dest_file = os.path.join(mode, filename)
upload_file_to_s3(bucket, source_file, dest_file)
if deleteAfterCopy:
os.remove(files_from + '/' + filename)
except:
dog_http_api.event('pearson {0}'.format(mode),
's3 archiving failed')
raise
def upload_file_to_s3(bucket, source_file, dest_file):
"""
Upload file to S3
"""
s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY)
from boto.s3.key import Key
b = s3.get_bucket(bucket)
k = Key(b)
k.key = "{filename}".format(filename=dest_file)
k.set_contents_from_filename(source_file)
def export_pearson():
options = {'dest-from-settings': True}
call_command('pearson_export_cdd', **options)
call_command('pearson_export_ead', **options)
mode = 'export'
sftp(settings.PEARSON['LOCAL_EXPORT'], settings.PEARSON['SFTP_EXPORT'], mode, deleteAfterCopy=False)
s3(settings.PEARSON['LOCAL_EXPORT'], settings.PEARSON['S3_BUCKET'], mode, deleteAfterCopy=True)
def import_pearson():
mode = 'import'
try:
sftp(settings.PEARSON['SFTP_IMPORT'], settings.PEARSON['LOCAL_IMPORT'], mode, deleteAfterCopy=True)
s3(settings.PEARSON['LOCAL_IMPORT'], settings.PEARSON['S3_BUCKET'], mode, deleteAfterCopy=False)
except Exception as e:
dog_http_api.event('Pearson Import failure', str(e))
raise e
else:
for filename in os.listdir(settings.PEARSON['LOCAL_IMPORT']):
filepath = os.path.join(settings.PEARSON['LOCAL_IMPORT'], filename)
call_command('pearson_import_conf_zip', filepath)
os.remove(filepath)
# actually do the work!
if options['mode'] in ('export', 'both'):
export_pearson()
if options['mode'] in ('import', 'both'):
import_pearson()
| agpl-3.0 |
onceuponatimeforever/oh-mainline | vendor/packages/jsmin/jsmin/test.py | 17 | 7967 | import unittest
import jsmin
import sys
class JsTests(unittest.TestCase):
def _minify(self, js):
return jsmin.jsmin(js)
def assertEqual(self, thing1, thing2):
if thing1 != thing2:
print(repr(thing1), repr(thing2))
raise AssertionError
return True
def assertMinified(self, js_input, expected):
minified = jsmin.jsmin(js_input)
assert minified == expected, "%r != %r" % (minified, expected)
def testQuoted(self):
js = r'''
Object.extend(String, {
interpret: function(value) {
return value == null ? '' : String(value);
},
specialChar: {
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\f': '\\f',
'\r': '\\r',
'\\': '\\\\'
}
});
'''
expected = r"""Object.extend(String,{interpret:function(value){return value==null?'':String(value);},specialChar:{'\b':'\\b','\t':'\\t','\n':'\\n','\f':'\\f','\r':'\\r','\\':'\\\\'}});"""
self.assertMinified(js, expected)
def testSingleComment(self):
js = r'''// use native browser JS 1.6 implementation if available
if (Object.isFunction(Array.prototype.forEach))
Array.prototype._each = Array.prototype.forEach;
if (!Array.prototype.indexOf) Array.prototype.indexOf = function(item, i) {
// hey there
function() {// testing comment
foo;
//something something
location = 'http://foo.com;'; // goodbye
}
//bye
'''
expected = r"""
if(Object.isFunction(Array.prototype.forEach))
Array.prototype._each=Array.prototype.forEach;if(!Array.prototype.indexOf)Array.prototype.indexOf=function(item,i){ function(){ foo; location='http://foo.com;';}"""
# print expected
self.assertMinified(js, expected)
def testEmpty(self):
self.assertMinified('', '')
self.assertMinified(' ', '')
self.assertMinified('\n', '')
self.assertMinified('\r\n', '')
self.assertMinified('\t', '')
def testMultiComment(self):
js = r"""
function foo() {
print('hey');
}
/*
if(this.options.zindex) {
this.originalZ = parseInt(Element.getStyle(this.element,'z-index') || 0);
this.element.style.zIndex = this.options.zindex;
}
*/
another thing;
"""
expected = r"""function foo(){print('hey');}
another thing;"""
self.assertMinified(js, expected)
def testLeadingComment(self):
js = r"""/* here is a comment at the top
it ends here */
function foo() {
alert('crud');
}
"""
expected = r"""function foo(){alert('crud');}"""
self.assertMinified(js, expected)
def testJustAComment(self):
self.assertMinified(' // a comment', '')
def testRe(self):
js = r'''
var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, '');
return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);
});'''
expected = r"""var str=this.replace(/\\./g,'@').replace(/"[^"\\\n\r]*"/g,'');return(/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);});"""
self.assertMinified(js, expected)
def testIgnoreComment(self):
js = r"""
var options_for_droppable = {
overlap: options.overlap,
containment: options.containment,
tree: options.tree,
hoverclass: options.hoverclass,
onHover: Sortable.onHover
}
var options_for_tree = {
onHover: Sortable.onEmptyHover,
overlap: options.overlap,
containment: options.containment,
hoverclass: options.hoverclass
}
// fix for gecko engine
Element.cleanWhitespace(element);
"""
expected = r"""var options_for_droppable={overlap:options.overlap,containment:options.containment,tree:options.tree,hoverclass:options.hoverclass,onHover:Sortable.onHover}
var options_for_tree={onHover:Sortable.onEmptyHover,overlap:options.overlap,containment:options.containment,hoverclass:options.hoverclass}
Element.cleanWhitespace(element);"""
self.assertMinified(js, expected)
def testHairyRe(self):
js = r"""
inspect: function(useDoubleQuotes) {
var escapedString = this.gsub(/[\x00-\x1f\\]/, function(match) {
var character = String.specialChar[match[0]];
return character ? character : '\\u00' + match[0].charCodeAt().toPaddedString(2, 16);
});
if (useDoubleQuotes) return '"' + escapedString.replace(/"/g, '\\"') + '"';
return "'" + escapedString.replace(/'/g, '\\\'') + "'";
},
toJSON: function() {
return this.inspect(true);
},
unfilterJSON: function(filter) {
return this.sub(filter || Prototype.JSONFilter, '#{1}');
},
"""
expected = r"""inspect:function(useDoubleQuotes){var escapedString=this.gsub(/[\x00-\x1f\\]/,function(match){var character=String.specialChar[match[0]];return character?character:'\\u00'+match[0].charCodeAt().toPaddedString(2,16);});if(useDoubleQuotes)return'"'+escapedString.replace(/"/g,'\\"')+'"';return"'"+escapedString.replace(/'/g,'\\\'')+"'";},toJSON:function(){return this.inspect(true);},unfilterJSON:function(filter){return this.sub(filter||Prototype.JSONFilter,'#{1}');},"""
self.assertMinified(js, expected)
def testNoBracesWithComment(self):
js = r"""
onSuccess: function(transport) {
var js = transport.responseText.strip();
if (!/^\[.*\]$/.test(js)) // TODO: improve sanity check
throw 'Server returned an invalid collection representation.';
this._collection = eval(js);
this.checkForExternalText();
}.bind(this),
onFailure: this.onFailure
});
"""
expected = r"""onSuccess:function(transport){var js=transport.responseText.strip();if(!/^\[.*\]$/.test(js))
throw'Server returned an invalid collection representation.';this._collection=eval(js);this.checkForExternalText();}.bind(this),onFailure:this.onFailure});"""
self.assertMinified(js, expected)
def testSpaceInRe(self):
js = r"""
num = num.replace(/ /g,'');
"""
self.assertMinified(js, "num=num.replace(/ /g,'');")
def testEmptyString(self):
js = r'''
function foo('') {
}
'''
self.assertMinified(js, "function foo(''){}")
def testDoubleSpace(self):
js = r'''
var foo = "hey";
'''
self.assertMinified(js, 'var foo="hey";')
def testLeadingRegex(self):
js = r'/[d]+/g '
self.assertMinified(js, js.strip())
def testLeadingString(self):
js = r"'a string in the middle of nowhere'; // and a comment"
self.assertMinified(js, "'a string in the middle of nowhere';")
def testSingleCommentEnd(self):
js = r'// a comment\n'
self.assertMinified(js, '')
def testInputStream(self):
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
ins = StringIO(r'''
function foo('') {
}
''')
outs = StringIO()
m = jsmin.JavascriptMinify()
m.minify(ins, outs)
output = outs.getvalue()
assert output == "function foo(''){}"
def testUnicode(self):
instr = u'\u4000 //foo'
expected = u'\u4000'
output = jsmin.jsmin(instr)
self.assertEqual(output, expected)
if __name__ == '__main__':
unittest.main() | agpl-3.0 |
mcardillo55/django | django/middleware/locale.py | 358 | 2983 | "This is the locale selecting middleware that will look at accept headers"
from django.conf import settings
from django.core.urlresolvers import (
LocaleRegexURLResolver, get_resolver, get_script_prefix, is_valid_path,
)
from django.http import HttpResponseRedirect
from django.utils import translation
from django.utils.cache import patch_vary_headers
from django.utils.functional import cached_property
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context. This allows pages to be dynamically
translated to the language the user desires (if the language
is available, of course).
"""
response_redirect_class = HttpResponseRedirect
def process_request(self, request):
language = translation.get_language_from_request(
request, check_path=self.is_language_prefix_patterns_used)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
language = translation.get_language()
language_from_path = translation.get_language_from_path(request.path_info)
if (response.status_code == 404 and not language_from_path
and self.is_language_prefix_patterns_used):
urlconf = getattr(request, 'urlconf', None)
language_path = '/%s%s' % (language, request.path_info)
path_valid = is_valid_path(language_path, urlconf)
path_needs_slash = (
not path_valid and (
settings.APPEND_SLASH and not language_path.endswith('/')
and is_valid_path('%s/' % language_path, urlconf)
)
)
if path_valid or path_needs_slash:
script_prefix = get_script_prefix()
# Insert language after the script prefix and before the
# rest of the URL
language_url = request.get_full_path(force_append_slash=path_needs_slash).replace(
script_prefix,
'%s%s/' % (script_prefix, language),
1
)
return self.response_redirect_class(language_url)
if not (self.is_language_prefix_patterns_used
and language_from_path):
patch_vary_headers(response, ('Accept-Language',))
if 'Content-Language' not in response:
response['Content-Language'] = language
return response
@cached_property
def is_language_prefix_patterns_used(self):
"""
Returns `True` if the `LocaleRegexURLResolver` is used
at root level of the urlpatterns, else it returns `False`.
"""
for url_pattern in get_resolver(None).url_patterns:
if isinstance(url_pattern, LocaleRegexURLResolver):
return True
return False
| bsd-3-clause |
intgr/django-cms | cms/tests/nested_plugins.py | 12 | 46802 | # -*- coding: utf-8 -*-
from __future__ import with_statement
import json
from djangocms_text_ckeditor.models import Text
from cms.api import create_page, add_plugin
from cms.constants import PLUGIN_MOVE_ACTION
from cms.models import Page
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.tests.plugins import PluginsTestBaseCase
from cms.test_utils.util.context_managers import SettingsOverride
from cms.utils.copy_plugins import copy_plugins_to
from cms.utils.compat.tests import UnittestCompatMixin
URL_CMS_MOVE_PLUGIN = u'/en/admin/cms/page/%d/move-plugin/'
class NestedPluginsTestCase(PluginsTestBaseCase, UnittestCompatMixin):
def copy_placeholders_and_check_results(self, placeholders):
"""
This function is not itself a test; rather, it can be used by any test
that has created placeholders. It will check that whatever the plugin
structure in the placeholder, it will be copied accurately when they are
copied.
placeholders is a list of placeholders
"""
for original_placeholder in placeholders:
# get the plugins
original_plugins = original_placeholder.get_plugins()
# copy them to a new placeholder
copied_placeholder = Placeholder.objects.create(slot=original_placeholder.slot)
copy_plugins_to(
original_placeholder.get_plugins(),
copied_placeholder
)
copied_plugins = copied_placeholder.get_plugins()
# we should find the same number of plugins in both placeholders
self.assertEqual(
original_plugins.count(),
copied_plugins.count()
)
# quick check: make sure the two querysets match:
for original, copy in zip(original_plugins, copied_plugins):
self.assertEqual(
Text.objects.get(id=original.id).body,
Text.objects.get(id=copy.id).body
)
# Now build a *tree* of the plugins, and match those - it's not
# enough just to compare querysets as above; we should *also* check
# that when we build a tree, the various nodes are assembled as we
# would expect. We will pump the trees into a pair of lists:
original_plugins_list = []
copied_plugins_list = []
# This function builds the tree of plugins, starting from its roots.
# In that respect it's like many of the plugin tree-building
# routines elsewhere in the system.
def plugin_list_from_tree(roots, plugin_list):
for plugin in roots:
plugin_list.append(plugin)
# recurse over the set of nodes
plugin_list_from_tree(plugin.get_children(), plugin_list)
# build the tree for each set of plugins
plugin_list_from_tree(original_plugins.filter(level=0), original_plugins_list)
plugin_list_from_tree(copied_plugins.filter(level=0), copied_plugins_list)
self.assertEqual(len(original_plugins_list), original_plugins.count())
self.assertEqual(len(copied_plugins_list), copied_plugins.count())
# Check that each pair of items in the two lists match, in lots of
# different ways
for original, copy in zip(original_plugins_list, copied_plugins_list):
original_text_plugin = Text.objects.get(id=original.id)
copied_text_plugin = Text.objects.get(id=copy.id)
# This first one is a sanity test, just to prove that we aren't
# simply comparing *exactly the same items* in all these tests.
# It could happen...
self.assertNotEquals(original.id, copy.id)
self.assertEqual(
original_text_plugin.body,
copied_text_plugin.body
)
self.assertEqual(
original_text_plugin.level,
copied_text_plugin.level
)
self.assertEqual(
original_text_plugin.position,
copied_text_plugin.position
)
self.assertEqual(
original_text_plugin.rght,
copied_text_plugin.rght
)
self.assertEqual(
original_text_plugin.lft,
copied_text_plugin.lft
)
self.assertEqual(
original_text_plugin.get_descendant_count(),
copied_text_plugin.get_descendant_count()
)
self.assertEqual(
original_text_plugin.get_ancestors().count(),
copied_text_plugin.get_ancestors().count()
)
# just in case the test method that called us wants it:
return copied_placeholder
def test_plugin_deep_nesting_and_copying(self):
"""
Create a deeply-nested plugin structure, tests its properties, and tests
that it is copied accurately when the placeholder containing them is
copied.
The structure below isn't arbitrary, but has been designed to test
various conditions, including:
* nodes four levels deep
* multiple successive level increases
* multiple successive level decreases
* successive nodes on the same level followed by level changes
* multiple level decreases between successive nodes
* siblings with and without children
* nodes and branches added to the tree out of sequence
First we create the structure:
11
1
2
12
4
10
8
3
9
5
6
7
13
14
and then we move it all around.
"""
placeholder = Placeholder(slot=u"some_slot")
placeholder.save() # a good idea, if not strictly necessary
# plugin in placeholder
plugin_1 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"01")
plugin_1.save()
# IMPORTANT: plugins must be reloaded, before they can be assigned
# as a parent. Otherwise, the MPTT structure doesn't seem to rebuild
# properly.
# child of plugin_1
plugin_2 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"02",
)
plugin_1 = self.reload(plugin_1)
plugin_2.parent = plugin_1
plugin_2.save()
# plugin_2 should be plugin_1's only child
# for a single item we use assertSequenceEqual
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_1.pk).get_children(),
[CMSPlugin.objects.get(id=plugin_2.pk)])
# create a second child of plugin_1
plugin_3 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"03",
)
plugin_1 = self.reload(plugin_1)
plugin_3.parent = plugin_1
plugin_3.save()
# plugin_2 & plugin_3 should be plugin_1's children
# for multiple items we use assertSequenceEqual, because
# assertSequenceEqual may re-order the list without warning
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_1.pk).get_children(),
[
CMSPlugin.objects.get(id=plugin_2.pk),
CMSPlugin.objects.get(id=plugin_3.pk),
])
# child of plugin_2
plugin_4 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"04",
)
plugin_2 = self.reload(plugin_2)
plugin_4.parent = plugin_2
plugin_4.save()
# plugin_4 should be plugin_2's child
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_2.pk).get_children(),
[CMSPlugin.objects.get(id=plugin_4.pk)])
# 2,3 & 4 should be descendants of 1
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_1.pk).get_descendants(),
[
# note tree_id ordering of MPTT reflected here:
CMSPlugin.objects.get(id=plugin_2.pk),
CMSPlugin.objects.get(id=plugin_4.pk),
CMSPlugin.objects.get(id=plugin_3.pk),
],
)
# create a second root plugin
plugin_5 = add_plugin(placeholder, u"TextPlugin", u"en",
# force this to first-child, to make the tree more challenging
position='first-child',
body=u"05",
)
plugin_5.save()
# child of plugin_5
plugin_6 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"06",
)
plugin_5 = self.reload(plugin_5)
plugin_6.parent = plugin_5
plugin_6.save()
# plugin_6 should be plugin_5's child
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_5.pk).get_children(),
[CMSPlugin.objects.get(id=plugin_6.pk)])
# child of plugin_6
plugin_7 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"07",
)
plugin_5 = self.reload(plugin_5)
plugin_7.parent = plugin_5
plugin_7.save()
# plugin_7 should be plugin_5's child
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_5.pk).get_children(),
[
CMSPlugin.objects.get(id=plugin_6.pk),
CMSPlugin.objects.get(id=plugin_7.pk)
])
# 6 & 7 should be descendants of 5
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_5.pk).get_descendants(),
[
CMSPlugin.objects.get(id=plugin_6.pk),
CMSPlugin.objects.get(id=plugin_7.pk),
])
# another child of plugin_2
plugin_8 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"08",
)
plugin_2 = self.reload(plugin_2)
plugin_8.parent = plugin_2
plugin_8.save()
# plugin_4 should be plugin_2's child
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_2.pk).get_children(),
[
CMSPlugin.objects.get(id=plugin_4.pk),
CMSPlugin.objects.get(id=plugin_8.pk),
])
# child of plugin_3
plugin_9 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"09",
)
plugin_3 = self.reload(plugin_3)
plugin_9.parent = plugin_3
plugin_9.save()
# plugin_9 should be plugin_3's child
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_3.pk).get_children(),
[CMSPlugin.objects.get(id=plugin_9.pk)])
# child of plugin_4
plugin_10 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"10",
)
plugin_4 = self.reload(plugin_4)
plugin_10.parent = plugin_4
plugin_10.save()
# plugin_10 should be plugin_4's child
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_4.pk).get_children(),
[CMSPlugin.objects.get(id=plugin_10.pk)])
original_plugins = placeholder.get_plugins()
self.assertEqual(original_plugins.count(), 10)
# elder sibling of plugin_1
plugin_1 = self.reload(plugin_1)
plugin_11 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"11",
target=plugin_1,
position="left"
)
plugin_11.save()
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_1.pk).get_children(),
[
CMSPlugin.objects.get(id=plugin_2.pk),
CMSPlugin.objects.get(id=plugin_3.pk)
])
# elder sibling of plugin_4
plugin_4 = self.reload(plugin_4)
plugin_12 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"12",
target=plugin_4,
position="left"
)
plugin_12.save()
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_2.pk).get_children(),
[
CMSPlugin.objects.get(id=plugin_12.pk),
CMSPlugin.objects.get(id=plugin_4.pk),
CMSPlugin.objects.get(id=plugin_8.pk)
])
# younger sibling of plugin_7
plugin_7 = self.reload(plugin_7)
plugin_13 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"13",
target=plugin_7,
position="right"
)
plugin_13.save()
self.assertSequenceEqual(
CMSPlugin.objects.get(id=plugin_5.pk).get_children(),
[
CMSPlugin.objects.get(id=plugin_6.pk),
CMSPlugin.objects.get(id=plugin_7.pk),
CMSPlugin.objects.get(id=plugin_13.pk)
])
# new sibling of plugin_5
plugin_5 = self.reload(plugin_5)
plugin_14 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"14"
)
plugin_14.save()
self.assertSequenceEqual(
CMSPlugin.objects.filter(level=0),
[
CMSPlugin.objects.get(id=plugin_11.pk),
CMSPlugin.objects.get(id=plugin_1.pk),
CMSPlugin.objects.get(id=plugin_5.pk),
CMSPlugin.objects.get(id=plugin_14.pk)
])
self.assertEqual(CMSPlugin.objects.get(id=plugin_11.pk).tree_id, 1)
self.copy_placeholders_and_check_results([placeholder])
# now let's move plugins around in the tree
# move plugin_2 before plugin_11
plugin_2 = self.reload(plugin_2)
plugin_2.move_to(target=plugin_1, position="left")
plugin_2.save()
self.assertEqual(CMSPlugin.objects.get(id=plugin_2.pk).tree_id, 1)
self.copy_placeholders_and_check_results([placeholder])
# move plugin_6 after plugin_7
plugin_6 = self.reload(plugin_6)
plugin_7 = self.reload(plugin_7)
plugin_6.move_to(target=plugin_7, position="right")
plugin_6.save()
self.copy_placeholders_and_check_results([placeholder])
# move plugin_3 before plugin_2
plugin_2 = self.reload(plugin_2)
plugin_3 = self.reload(plugin_3)
plugin_3.move_to(target=plugin_2, position="left")
plugin_3.save()
self.copy_placeholders_and_check_results([placeholder])
# make plugin_3 plugin_2's first-child
plugin_2 = self.reload(plugin_2)
plugin_3 = self.reload(plugin_3)
plugin_3.move_to(target=plugin_2, position="first-child")
plugin_3.save()
self.copy_placeholders_and_check_results([placeholder])
# make plugin_7 plugin_2's first-child
self.reload(plugin_2)
plugin_7 = self.reload(plugin_7)
plugin_7.move_to(target=plugin_3, position="right")
plugin_7.save()
self.copy_placeholders_and_check_results([placeholder, ])
def test_nested_plugin_on_page(self):
"""
Validate a textplugin with a nested link plugin
mptt values are correctly showing a parent child relationship
of a nested plugin
"""
with SettingsOverride(CMS_PERMISSION=False):
# setup page 1
page_one = create_page(u"Three Placeholder", u"col_three.html", u"en",
position=u"last-child", published=True, in_navigation=True)
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
# add a plugin
pre_nesting_body = u"<p>the nested text plugin with a link inside</p>"
text_plugin = add_plugin(page_one_ph_two, u"TextPlugin", u"en", body=pre_nesting_body)
# prepare nestin plugin
page_one_ph_two = self.reload(page_one_ph_two)
text_plugin = self.reload(text_plugin)
link_plugin = add_plugin(page_one_ph_two, u"LinkPlugin", u"en", target=text_plugin)
link_plugin.name = u"django-cms Link"
link_plugin.url = u"https://www.django-cms.org"
# as for some reason mptt does not
# update the parent child relationship
# in the add_plugin method when a target present
# but this is not the topic of the test
link_plugin.parent = text_plugin
link_plugin.save()
# reloading needs to be done after every save
link_plugin = self.reload(link_plugin)
text_plugin = self.reload(text_plugin)
# mptt related insertion correct?
msg = u"parent plugin right is not updated, child not inserted correctly"
self.assertTrue(text_plugin.rght > link_plugin.rght, msg=msg)
msg = u"link has no parent"
self.assertFalse(link_plugin.parent == None, msg=msg)
msg = u"parent plugin left is not updated, child not inserted correctly"
self.assertTrue(text_plugin.lft < link_plugin.lft, msg=msg)
msg = u"child level is not bigger than parent level"
self.assertTrue(text_plugin.level < link_plugin.level, msg=msg)
# add the link plugin to the body
# emulate the editor in admin that adds some txt for the nested plugin
in_txt = u"""<img id="plugin_obj_%s" title="Link" alt="Link" src="/static/cms/img/icons/plugins/link.png">"""
nesting_body = u"%s<p>%s</p>" % (text_plugin.body, (in_txt % (link_plugin.id)))
text_plugin.body = nesting_body
text_plugin.save()
text_plugin = self.reload(text_plugin)
# none of the descendants should have a placeholder other then my own one
self.assertEqual(text_plugin.get_descendants().exclude(placeholder=text_plugin.placeholder).count(), 0)
post_add_plugin_count = CMSPlugin.objects.count()
self.assertEqual(post_add_plugin_count, 2)
def test_copy_page_nested_plugin(self):
"""
Test to verify that page copy with a nested plugin works
page one - 3 placeholder
col_sidebar: 1 text plugin
col_left: 1 text plugin with nested link plugin
col_right: no plugin
page two (copy target)
Verify copied page, placeholders, plugins and body text
"""
with SettingsOverride(CMS_PERMISSION=False):
# setup page 1
page_one = create_page(u"Three Placeholder", u"col_three.html", u"en",
position=u"last-child", published=True, in_navigation=True)
page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar")
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
page_one.placeholders.get(slot=u"col_right")
# add the text plugin to placeholder one
text_plugin_en = add_plugin(page_one_ph_one, u"TextPlugin", u"en", body="Hello World")
self.assertEqual(text_plugin_en.id, CMSPlugin.objects.all()[0].id)
self.assertEqual(text_plugin_en.get_children().count(), 0)
pre_add_plugin_count = CMSPlugin.objects.count()
self.assertEqual(pre_add_plugin_count, 1)
###
# add a plugin to placeholder two
###
pre_nesting_body = u"<p>the nested text plugin with a link inside</p>"
text_plugin_two = add_plugin(page_one_ph_two, u"TextPlugin", u"en", body=pre_nesting_body)
text_plugin_two = self.reload(text_plugin_two)
# prepare nesting plugin
page_one_ph_two = self.reload(page_one_ph_two)
text_plugin_two = self.reload(text_plugin_two)
link_plugin = add_plugin(page_one_ph_two, u"LinkPlugin", u"en", target=text_plugin_two)
link_plugin.name = u"django-cms Link"
link_plugin.url = u"https://www.django-cms.org"
link_plugin.parent = text_plugin_two
link_plugin.save()
link_plugin = self.reload(link_plugin)
text_plugin_two = self.reload(text_plugin_two)
in_txt = """<img id="plugin_obj_%s" title="Link" alt="Link" src="/static/cms/img/icons/plugins/link.png">"""
nesting_body = "%s<p>%s</p>" % (text_plugin_two.body, (in_txt % (link_plugin.id)))
# emulate the editor in admin that adds some txt for the nested plugin
text_plugin_two.body = nesting_body
text_plugin_two.save()
text_plugin_two = self.reload(text_plugin_two)
# the link is attached as a child?
self.assertEqual(text_plugin_two.get_children().count(), 1)
post_add_plugin_count = CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count()
self.assertEqual(post_add_plugin_count, 3)
page_one.save()
# get the plugins from the original page
page_one = self.reload(page_one)
page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar")
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
page_one_ph_three = page_one.placeholders.get(slot=u"col_right")
# verify that the plugins got created
org_placeholder_one_plugins = page_one_ph_one.get_plugins()
self.assertEqual(len(org_placeholder_one_plugins), 1)
org_placeholder_two_plugins = page_one_ph_two.get_plugins()
self.assertEqual(len(org_placeholder_two_plugins), 2)
org_placeholder_three_plugins = page_one_ph_three.get_plugins()
self.assertEqual(len(org_placeholder_three_plugins), 0)
self.assertEqual(page_one.placeholders.count(), 3)
placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count()
self.assertEqual(placeholder_count, 3)
self.assertEqual(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count(), 3)
##
# setup page_copy_target page
##
page_copy_target = create_page("Three Placeholder - page copy target", "col_three.html", "en",
position="last-child", published=True, in_navigation=True)
all_page_count = Page.objects.drafts().count()
pre_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count()
self.assertEqual(pre_copy_placeholder_count, 6)
# copy the page
superuser = self.get_superuser()
with self.login_user_context(superuser):
page_two = self.copy_page(page_one, page_copy_target)
# validate the expected pages,placeholders,plugins,pluginbodies
after_copy_page_plugin_count = CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count()
self.assertEqual(after_copy_page_plugin_count, 6)
# check the amount of copied stuff
after_copy_page_count = Page.objects.drafts().count()
after_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count()
self.assertGreater(after_copy_page_count, all_page_count, u"no new page after copy")
self.assertGreater(after_copy_page_plugin_count, post_add_plugin_count, u"plugin count is not grown")
self.assertGreater(after_copy_placeholder_count, pre_copy_placeholder_count,
u"placeholder count is not grown")
self.assertEqual(after_copy_page_count, 3, u"no new page after copy")
# original placeholder
page_one = self.reload(page_one)
page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar")
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
page_one_ph_three = page_one.placeholders.get(slot=u"col_right")
# check if there are multiple pages assigned to this placeholders
found_page = page_one_ph_one.page if page_one_ph_one else None
self.assertEqual(found_page, page_one)
found_page = page_one_ph_two.page if page_one_ph_two else None
self.assertEqual(found_page, page_one)
found_page = page_one_ph_three.page if page_one_ph_three else None
self.assertEqual(found_page, page_one)
page_two = self.reload(page_two)
page_two_ph_one = page_two.placeholders.get(slot=u"col_sidebar")
page_two_ph_two = page_two.placeholders.get(slot=u"col_left")
page_two_ph_three = page_two.placeholders.get(slot=u"col_right")
# check if there are multiple pages assigned to this placeholders
found_page = page_two_ph_one.page if page_two_ph_one else None
self.assertEqual(found_page, page_two)
found_page = page_two_ph_two.page if page_two_ph_two else None
self.assertEqual(found_page, page_two)
found_page = page_two_ph_three.page if page_two_ph_three else None
self.assertEqual(found_page, page_two)
# check the stored placeholders org vs copy
msg = 'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % (
page_two_ph_one.pk, page_one_ph_one.pk, page_two.pk)
self.assertNotEquals(page_two_ph_one.pk, page_one_ph_one.pk, msg)
msg = 'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % (
page_two_ph_two.pk, page_one_ph_two.pk, page_two.pk)
self.assertNotEquals(page_two_ph_two.pk, page_one_ph_two.pk, msg)
msg = 'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % (
page_two_ph_three.pk, page_one_ph_three.pk, page_two.pk)
self.assertNotEquals(page_two_ph_three.pk, page_one_ph_three.pk, msg)
# get the plugins from the original page
org_placeholder_one_plugins = page_one_ph_one.get_plugins()
self.assertEqual(len(org_placeholder_one_plugins), 1)
org_placeholder_two_plugins = page_one_ph_two.get_plugins()
self.assertEqual(len(org_placeholder_two_plugins), 2)
org_placeholder_three_plugins = page_one_ph_three.get_plugins()
self.assertEqual(len(org_placeholder_three_plugins), 0)
# get the plugins from the copied page
copied_placeholder_one_plugins = page_two_ph_one.get_plugins()
self.assertEqual(len(copied_placeholder_one_plugins), 1)
copied_placeholder_two_plugins = page_two_ph_two.get_plugins()
self.assertEqual(len(copied_placeholder_two_plugins), 2)
copied_placeholder_three_plugins = page_two_ph_three.get_plugins()
self.assertEqual(len(copied_placeholder_three_plugins), 0)
# verify the plugins got copied
# placeholder 1
count_plugins_copied = len(copied_placeholder_one_plugins)
count_plugins_org = len(org_placeholder_one_plugins)
msg = u"plugin count %s %s for placeholder one not equal" % (count_plugins_copied, count_plugins_org)
self.assertEqual(count_plugins_copied, count_plugins_org, msg)
# placeholder 2
count_plugins_copied = len(copied_placeholder_two_plugins)
count_plugins_org = len(org_placeholder_two_plugins)
msg = u"plugin count %s %s for placeholder two not equal" % (count_plugins_copied, count_plugins_org)
self.assertEqual(count_plugins_copied, count_plugins_org, msg)
# placeholder 3
count_plugins_copied = len(copied_placeholder_three_plugins)
count_plugins_org = len(org_placeholder_three_plugins)
msg = u"plugin count %s %s for placeholder three not equal" % (count_plugins_copied, count_plugins_org)
self.assertEqual(count_plugins_copied, count_plugins_org, msg)
# verify the body of text plugin with nested link plugin
# org to copied
org_nested_text_plugin = None
# do this iteration to find the real text plugin with the attached link
# the inheritance mechanism for the cmsplugins works through
# (tuple)get_plugin_instance()
for x in org_placeholder_two_plugins:
if x.plugin_type == u"TextPlugin":
instance = x.get_plugin_instance()[0]
if instance.body.startswith(pre_nesting_body):
org_nested_text_plugin = instance
break
copied_nested_text_plugin = None
for x in copied_placeholder_two_plugins:
if x.plugin_type == u"TextPlugin":
instance = x.get_plugin_instance()[0]
if instance.body.startswith(pre_nesting_body):
copied_nested_text_plugin = instance
break
msg = u"orginal nested text plugin not found"
self.assertNotEquals(org_nested_text_plugin, None, msg=msg)
msg = u"copied nested text plugin not found"
self.assertNotEquals(copied_nested_text_plugin, None, msg=msg)
# get the children ids of the texplugin with a nested link
# to check if the body of the text is genrated correctly
org_link_child_plugin = org_nested_text_plugin.get_children()[0]
copied_link_child_plugin = copied_nested_text_plugin.get_children()[0]
# validate the textplugin body texts
msg = u"org plugin and copied plugin are the same"
self.assertTrue(org_link_child_plugin.id != copied_link_child_plugin.id, msg)
needle = u"plugin_obj_%s"
msg = u"child plugin id differs to parent in body plugin_obj_id"
# linked child is in body
self.assertTrue(org_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) != -1, msg)
msg = u"copy: child plugin id differs to parent in body plugin_obj_id"
self.assertTrue(copied_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) != -1, msg)
# really nothing else
msg = u"child link plugin id differs to parent body plugin_obj_id"
self.assertTrue(org_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) == -1, msg)
msg = u"copy: child link plugin id differs to parent body plugin_obj_id"
self.assertTrue(copied_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) == -1, msg)
# now reverse lookup the placeholders from the plugins
org_placeholder = org_link_child_plugin.placeholder
copied_placeholder = copied_link_child_plugin.placeholder
msg = u"placeholder of the orginal plugin and copied plugin are the same"
ok = ((org_placeholder.id != copied_placeholder.id))
self.assertTrue(ok, msg)
def test_copy_page_nested_plugin_moved_parent_plugin(self):
"""
Test to verify that page copy with a nested plugin works
when a plugin with child got moved to another placeholder
page one - 3 placeholder
col_sidebar:
1 text plugin
col_left: 1 text plugin with nested link plugin
col_right: no plugin
page two (copy target)
step2: move the col_left text plugin to col_right
col_sidebar:
1 text plugin
col_left: no plugin
col_right: 1 text plugin with nested link plugin
verify the copied page structure
"""
with SettingsOverride(CMS_PERMISSION=False):
# setup page 1
page_one = create_page(u"Three Placeholder", u"col_three.html", u"en",
position=u"last-child", published=True, in_navigation=True)
page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar")
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
page_one.placeholders.get(slot=u"col_right")
# add the text plugin to placeholder one
text_plugin_en = add_plugin(page_one_ph_one, u"TextPlugin", u"en", body=u"Hello World")
self.assertEqual(text_plugin_en.id, CMSPlugin.objects.all()[0].id)
self.assertEqual(text_plugin_en.get_children().count(), 0)
pre_add_plugin_count = CMSPlugin.objects.count()
self.assertEqual(pre_add_plugin_count, 1)
# add a plugin to placeholder twho
pre_nesting_body = u"<p>the nested text plugin with a link inside</p>"
text_plugin_two = add_plugin(page_one_ph_two, u"TextPlugin", u"en", body=pre_nesting_body)
text_plugin_two = self.reload(text_plugin_two)
# prepare nestin plugin
page_one_ph_two = self.reload(page_one_ph_two)
text_plugin_two = self.reload(text_plugin_two)
link_plugin = add_plugin(page_one_ph_two, u"LinkPlugin", u"en", target=text_plugin_two)
link_plugin.name = u"django-cms Link"
link_plugin.url = u"https://www.django-cms.org"
link_plugin.parent = text_plugin_two
link_plugin.save()
# reload after every save
link_plugin = self.reload(link_plugin)
text_plugin_two = self.reload(text_plugin_two)
in_txt = u"""<img id="plugin_obj_%s" title="Link" alt="Link" src="/static/cms/img/icons/plugins/link.png">"""
nesting_body = "%s<p>%s</p>" % (text_plugin_two.body, (in_txt % (link_plugin.id)))
# emulate the editor in admin that adds some txt for the nested plugin
text_plugin_two.body = nesting_body
text_plugin_two.save()
text_plugin_two = self.reload(text_plugin_two)
# the link is attached as a child?
self.assertEqual(text_plugin_two.get_children().count(), 1)
post_add_plugin_count = CMSPlugin.objects.count()
self.assertEqual(post_add_plugin_count, 3)
page_one.save()
# get the plugins from the original page
page_one = self.reload(page_one)
page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar")
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
page_one_ph_three = page_one.placeholders.get(slot=u"col_right")
# verify the plugins got created
org_placeholder_one_plugins = page_one_ph_one.get_plugins()
self.assertEqual(len(org_placeholder_one_plugins), 1)
org_placeholder_two_plugins = page_one_ph_two.get_plugins()
self.assertEqual(len(org_placeholder_two_plugins), 2)
org_placeholder_three_plugins = page_one_ph_three.get_plugins()
self.assertEqual(len(org_placeholder_three_plugins), 0)
self.assertEqual(page_one.placeholders.count(), 3)
placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count()
self.assertEqual(placeholder_count, 3)
self.assertEqual(CMSPlugin.objects.count(), 3)
# setup page_copy_target
page_copy_target = create_page("Three Placeholder - page copy target", "col_three.html", "en",
position="last-child", published=True, in_navigation=True)
all_page_count = Page.objects.drafts().count()
pre_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count()
self.assertEqual(pre_copy_placeholder_count, 6)
superuser = self.get_superuser()
with self.login_user_context(superuser):
# now move the parent text plugin to another placeholder
post_data = {
'placeholder_id': page_one_ph_three.id,
'plugin_id': text_plugin_two.id,
'plugin_language':'en',
'plugin_parent':'',
}
plugin_class = text_plugin_two.get_plugin_class_instance()
expected = {'reload': plugin_class.requires_reload(PLUGIN_MOVE_ACTION)}
edit_url = URL_CMS_MOVE_PLUGIN % page_one.id
response = self.client.post(edit_url, post_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf8')), expected)
# check if the plugin got moved
page_one = self.reload(page_one)
self.reload(text_plugin_two)
page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar")
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
page_one_ph_three = page_one.placeholders.get(slot=u"col_right")
org_placeholder_one_plugins = page_one_ph_one.get_plugins()
self.assertEqual(len(org_placeholder_one_plugins), 1)
org_placeholder_two_plugins = page_one_ph_two.get_plugins()
# the plugin got moved and child got moved
self.assertEqual(len(org_placeholder_two_plugins), 0)
org_placeholder_three_plugins = page_one_ph_three.get_plugins()
self.assertEqual(len(org_placeholder_three_plugins), 2)
# copy the page
page_two = self.copy_page(page_one, page_copy_target)
# validate the expected pages,placeholders,plugins,pluginbodies
after_copy_page_plugin_count = CMSPlugin.objects.count()
self.assertEqual(after_copy_page_plugin_count, 6)
after_copy_page_count = Page.objects.drafts().count()
after_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count()
self.assertGreater(after_copy_page_count, all_page_count, u"no new page after copy")
self.assertGreater(after_copy_page_plugin_count, post_add_plugin_count, u"plugin count is not grown")
self.assertGreater(after_copy_placeholder_count, pre_copy_placeholder_count,
u"placeholder count is not grown")
self.assertEqual(after_copy_page_count, 3, u"no new page after copy")
# validate the structure
# orginal placeholder
page_one = self.reload(page_one)
page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar")
page_one_ph_two = page_one.placeholders.get(slot=u"col_left")
page_one_ph_three = page_one.placeholders.get(slot=u"col_right")
# check if there are multiple pages assigned to this placeholders
found_page = page_one_ph_one.page if page_one_ph_one else None
self.assertEqual(found_page, page_one)
found_page = page_one_ph_two.page if page_one_ph_two else None
self.assertEqual(found_page, page_one)
found_page = page_one_ph_three.page if page_one_ph_three else None
self.assertEqual(found_page, page_one)
page_two = self.reload(page_two)
page_two_ph_one = page_two.placeholders.get(slot=u"col_sidebar")
page_two_ph_two = page_two.placeholders.get(slot=u"col_left")
page_two_ph_three = page_two.placeholders.get(slot=u"col_right")
# check if there are multiple pages assigned to this placeholders
found_page = page_two_ph_one.page if page_two_ph_one else None
self.assertEqual(found_page, page_two)
found_page = page_two_ph_two.page if page_two_ph_two else None
self.assertEqual(found_page, page_two)
found_page = page_two_ph_three.page if page_two_ph_three else None
self.assertEqual(found_page, page_two)
# check the stored placeholders org vs copy
msg = u'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % (
page_two_ph_one.pk, page_one_ph_one.pk, page_two.pk)
self.assertNotEquals(page_two_ph_one.pk, page_one_ph_one.pk, msg)
msg = u'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % (
page_two_ph_two.pk, page_one_ph_two.pk, page_two.pk)
self.assertNotEquals(page_two_ph_two.pk, page_one_ph_two.pk, msg)
msg = u'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % (
page_two_ph_three.pk, page_one_ph_three.pk, page_two.pk)
self.assertNotEquals(page_two_ph_three.pk, page_one_ph_three.pk, msg)
# get the plugins from the original page
org_placeholder_one_plugins = page_one_ph_one.get_plugins()
self.assertEqual(len(org_placeholder_one_plugins), 1)
org_placeholder_two_plugins = page_one_ph_two.get_plugins()
self.assertEqual(len(org_placeholder_two_plugins), 0)
org_placeholder_three_plugins = page_one_ph_three.get_plugins()
self.assertEqual(len(org_placeholder_three_plugins), 2)
# get the plugins from the copied page
copied_placeholder_one_plugins = page_two_ph_one.get_plugins()
self.assertEqual(len(copied_placeholder_one_plugins), 1)
copied_placeholder_two_plugins = page_two_ph_two.get_plugins()
self.assertEqual(len(copied_placeholder_two_plugins), 0)
copied_placeholder_three_plugins = page_two_ph_three.get_plugins()
self.assertEqual(len(copied_placeholder_three_plugins), 2)
# verify the plugins got copied
# placeholder 1
count_plugins_copied = len(copied_placeholder_one_plugins)
count_plugins_org = len(org_placeholder_one_plugins)
msg = u"plugin count %s %s for placeholder one not equal" % (count_plugins_copied, count_plugins_org)
self.assertEqual(count_plugins_copied, count_plugins_org, msg)
# placeholder 2
count_plugins_copied = len(copied_placeholder_two_plugins)
count_plugins_org = len(org_placeholder_two_plugins)
msg = u"plugin count %s %s for placeholder two not equal" % (count_plugins_copied, count_plugins_org)
self.assertEqual(count_plugins_copied, count_plugins_org, msg)
# placeholder 3
count_plugins_copied = len(copied_placeholder_three_plugins)
count_plugins_org = len(org_placeholder_three_plugins)
msg = u"plugin count %s %s for placeholder three not equal" % (count_plugins_copied, count_plugins_org)
self.assertEqual(count_plugins_copied, count_plugins_org, msg)
# verify the body of text plugin with nested link plugin
# org to copied
org_nested_text_plugin = None
# do this iteration to find the real text plugin with the attached link
# the inheritance mechanism for the cmsplugins works through
# (tuple)get_plugin_instance()
for x in org_placeholder_three_plugins:
if x.plugin_type == u"TextPlugin":
instance = x.get_plugin_instance()[0]
if instance.body.startswith(pre_nesting_body):
org_nested_text_plugin = instance
break
copied_nested_text_plugin = None
for x in copied_placeholder_three_plugins:
if x.plugin_type == u"TextPlugin":
instance = x.get_plugin_instance()[0]
if instance.body.startswith(pre_nesting_body):
copied_nested_text_plugin = instance
break
msg = u"orginal nested text plugin not found"
self.assertNotEquals(org_nested_text_plugin, None, msg=msg)
msg = u"copied nested text plugin not found"
self.assertNotEquals(copied_nested_text_plugin, None, msg=msg)
# get the children ids of the texplugin with a nested link
# to check if the body of the text is generated correctly
org_link_child_plugin = org_nested_text_plugin.get_children()[0]
copied_link_child_plugin = copied_nested_text_plugin.get_children()[0]
# validate the textplugin body texts
msg = u"org plugin and copied plugin are the same"
self.assertNotEqual(org_link_child_plugin.id, copied_link_child_plugin.id, msg)
needle = u"plugin_obj_%s"
msg = u"child plugin id differs to parent in body plugin_obj_id"
# linked child is in body
self.assertTrue(org_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) != -1, msg)
msg = u"copy: child plugin id differs to parent in body plugin_obj_id"
self.assertTrue(copied_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) != -1, msg)
# really nothing else
msg = u"child link plugin id differs to parent body plugin_obj_id"
self.assertTrue(org_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) == -1, msg)
msg = u"copy: child link plugin id differs to parent body plugin_obj_id"
self.assertTrue(copied_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) == -1, msg)
# now reverse lookup the placeholders from the plugins
org_placeholder = org_link_child_plugin.placeholder
copied_placeholder = copied_link_child_plugin.placeholder
msg = u"placeholder of the orginal plugin and copied plugin are the same"
self.assertNotEqual(org_placeholder.id, copied_placeholder.id, msg)
| bsd-3-clause |
pratikmallya/hue | desktop/core/ext-py/Django-1.6.10/django/contrib/sessions/backends/signed_cookies.py | 288 | 2798 | from django.conf import settings
from django.core import signing
from django.contrib.sessions.backends.base import SessionBase
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies')
except (signing.BadSignature, ValueError):
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer)
@classmethod
def clear_expired(cls):
pass
| apache-2.0 |
jamslevy/gsoc | app/django/contrib/gis/gdal/prototypes/generation.py | 8 | 3695 | """
This module contains functions that generate ctypes prototypes for the
GDAL routines.
"""
from ctypes import c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.prototypes.errcheck import \
check_arg_errcode, check_errcode, check_geom, check_geom_offset, \
check_pointer, check_srs, check_str_arg, check_string, check_const_string
def double_output(func, argtypes, errcheck=False, strarg=False):
"Generates a ctypes function that returns a double value."
func.argtypes = argtypes
func.restype = c_double
if errcheck: func.errcheck = check_arg_errcode
if strarg: func.errcheck = check_str_arg
return func
def geom_output(func, argtypes, offset=None):
"""
Generates a function that returns a Geometry either by reference
or directly (if the return_geom keyword is set to True).
"""
# Setting the argument types
func.argtypes = argtypes
if not offset:
# When a geometry pointer is directly returned.
func.restype = c_void_p
func.errcheck = check_geom
else:
# Error code returned, geometry is returned by-reference.
func.restype = c_int
def geomerrcheck(result, func, cargs):
return check_geom_offset(result, func, cargs, offset)
func.errcheck = geomerrcheck
return func
def int_output(func, argtypes):
"Generates a ctypes function that returns an integer value."
func.argtypes = argtypes
func.restype = c_int
return func
def srs_output(func, argtypes):
"""
Generates a ctypes prototype for the given function with
the given C arguments that returns a pointer to an OGR
Spatial Reference System.
"""
func.argtypes = argtypes
func.restype = c_void_p
func.errcheck = check_srs
return func
def const_string_output(func, argtypes, offset=None):
func.argtypes = argtypes
if offset:
func.restype = c_int
else:
func.restype = c_char_p
def _check_const(result, func, cargs):
return check_const_string(result, func, cargs, offset=offset)
func.errcheck = _check_const
return func
def string_output(func, argtypes, offset=-1, str_result=False):
"""
Generates a ctypes prototype for the given function with the
given argument types that returns a string from a GDAL pointer.
The `const` flag indicates whether the allocated pointer should
be freed via the GDAL library routine VSIFree -- but only applies
only when `str_result` is True.
"""
func.argtypes = argtypes
if str_result:
# String is the result, don't explicitly define
# the argument type so we can get the pointer.
pass
else:
# Error code is returned
func.restype = c_int
# Dynamically defining our error-checking function with the
# given offset.
def _check_str(result, func, cargs):
return check_string(result, func, cargs,
offset=offset, str_result=str_result)
func.errcheck = _check_str
return func
def void_output(func, argtypes, errcheck=True):
"""
For functions that don't only return an error code that needs to
be examined.
"""
if argtypes: func.argtypes = argtypes
if errcheck:
# `errcheck` keyword may be set to False for routines that
# return void, rather than a status code.
func.restype = c_int
func.errcheck = check_errcode
else:
func.restype = None
return func
def voidptr_output(func, argtypes):
"For functions that return c_void_p."
func.argtypes = argtypes
func.restype = c_void_p
func.errcheck = check_pointer
return func
| apache-2.0 |
kbroughton/ansible-modules-core | packaging/language/easy_install.py | 198 | 6956 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Matt Wright <matt@nobien.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import tempfile
import os.path
DOCUMENTATION = '''
---
module: easy_install
short_description: Installs Python libraries
description:
- Installs Python libraries, optionally in a I(virtualenv)
version_added: "0.7"
options:
name:
description:
- A Python library name
required: true
default: null
aliases: []
virtualenv:
description:
- an optional I(virtualenv) directory path to install into. If the
I(virtualenv) does not exist, it is created automatically
required: false
default: null
virtualenv_site_packages:
version_added: "1.1"
description:
- Whether the virtual environment will inherit packages from the
global site-packages directory. Note that if this setting is
changed on an already existing virtual environment it will not
have any effect, the environment must be deleted and newly
created.
required: false
default: "no"
choices: [ "yes", "no" ]
virtualenv_command:
version_added: "1.1"
description:
- The command to create the virtual environment with. For example
C(pyvenv), C(virtualenv), C(virtualenv2).
required: false
default: virtualenv
executable:
description:
- The explicit executable or a pathname to the executable to be used to
run easy_install for a specific version of Python installed in the
system. For example C(easy_install-3.3), if there are both Python 2.7
and 3.3 installations in the system and you want to run easy_install
for the Python 3.3 installation.
version_added: "1.3"
required: false
default: null
state:
version_added: "2.0"
description:
- The desired state of the library. C(latest) ensures that the latest version is installed.
required: false
choices: [present, latest]
default: present
notes:
- Please note that the M(easy_install) module can only install Python
libraries. Thus this module is not able to remove libraries. It is
generally recommended to use the M(pip) module which you can first install
using M(easy_install).
- Also note that I(virtualenv) must be installed on the remote host if the
C(virtualenv) parameter is specified.
requirements: [ "virtualenv" ]
author: "Matt Wright (@mattupstate)"
'''
EXAMPLES = '''
# Examples from Ansible Playbooks
- easy_install: name=pip state=latest
# Install Bottle into the specified virtualenv.
- easy_install: name=bottle virtualenv=/webapps/myapp/venv
'''
def _is_package_installed(module, name, easy_install, executable_arguments):
executable_arguments = executable_arguments + ['--dry-run']
cmd = '%s %s %s' % (easy_install, ' '.join(executable_arguments), name)
rc, status_stdout, status_stderr = module.run_command(cmd)
return not ('Reading' in status_stdout or 'Downloading' in status_stdout)
def _get_easy_install(module, env=None, executable=None):
candidate_easy_inst_basenames = ['easy_install']
easy_install = None
if executable is not None:
if os.path.isabs(executable):
easy_install = executable
else:
candidate_easy_inst_basenames.insert(0, executable)
if easy_install is None:
if env is None:
opt_dirs = []
else:
# Try easy_install with the virtualenv directory first.
opt_dirs = ['%s/bin' % env]
for basename in candidate_easy_inst_basenames:
easy_install = module.get_bin_path(basename, False, opt_dirs)
if easy_install is not None:
break
# easy_install should have been found by now. The final call to
# get_bin_path will trigger fail_json.
if easy_install is None:
basename = candidate_easy_inst_basenames[0]
easy_install = module.get_bin_path(basename, True, opt_dirs)
return easy_install
def main():
arg_spec = dict(
name=dict(required=True),
state=dict(required=False,
default='present',
choices=['present','latest'],
type='str'),
virtualenv=dict(default=None, required=False),
virtualenv_site_packages=dict(default='no', type='bool'),
virtualenv_command=dict(default='virtualenv', required=False),
executable=dict(default='easy_install', required=False),
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params['name']
env = module.params['virtualenv']
executable = module.params['executable']
site_packages = module.params['virtualenv_site_packages']
virtualenv_command = module.params['virtualenv_command']
executable_arguments = []
if module.params['state'] == 'latest':
executable_arguments.append('--upgrade')
rc = 0
err = ''
out = ''
if env:
virtualenv = module.get_bin_path(virtualenv_command, True)
if not os.path.exists(os.path.join(env, 'bin', 'activate')):
if module.check_mode:
module.exit_json(changed=True)
command = '%s %s' % (virtualenv, env)
if site_packages:
command += ' --system-site-packages'
cwd = tempfile.gettempdir()
rc_venv, out_venv, err_venv = module.run_command(command, cwd=cwd)
rc += rc_venv
out += out_venv
err += err_venv
easy_install = _get_easy_install(module, env, executable)
cmd = None
changed = False
installed = _is_package_installed(module, name, easy_install, executable_arguments)
if not installed:
if module.check_mode:
module.exit_json(changed=True)
cmd = '%s %s %s' % (easy_install, ' '.join(executable_arguments), name)
rc_easy_inst, out_easy_inst, err_easy_inst = module.run_command(cmd)
rc += rc_easy_inst
out += out_easy_inst
err += err_easy_inst
changed = True
if rc != 0:
module.fail_json(msg=err, cmd=cmd)
module.exit_json(changed=changed, binary=easy_install,
name=name, virtualenv=env)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
SchrodingersGat/kicad-footprint-generator | scripts/Packages/TO_SOT_THT/TO_SOT_THT_generate.py | 2 | 54660 | #usr/bin/env python
import sys
import os
import math
import time
# ensure that the kicad-footprint-generator directory is available
#sys.path.append(os.environ.get('KIFOOTPRINTGENERATOR')) # enable package import from parent directory
#sys.path.append("D:\hardware\KiCAD\kicad-footprint-generator") # enable package import from parent directory
sys.path.append(os.path.join(sys.path[0],"..","..","..","kicad_mod")) # load kicad_mod path
sys.path.append(os.path.join(sys.path[0],"..","..","..")) # load kicad_mod path
from KicadModTree import * # NOQA
from tools import *
from TO_THT_packages import *
# vertical symbols for rectangular transistors
def makeVERT(lib_name, pck, has3d=False, x_3d=[0, 0, 0], s_3d=[1,1,1], lptext="_LargePads", r_3d=[0, 0, 0]):
padsize=pck.pad
l_fabp = -pck.pin_offset_x
t_fabp = -pck.pin_offset_z
if pck.staggered_type >0:
t_fabp=-pck.staggered_pin_offset_z
w_fabp = pck.plastic[0]
h_fabp = pck.plastic[2]
w_fabm = pck.metal[0]
h_fabm = pck.metal[2]
l_slkp = l_fabp - slk_offset
t_slkp = t_fabp - slk_offset
w_slkp = w_fabp + 2 * slk_offset
h_slkp = h_fabp + 2 * slk_offset
w_slkm = w_fabm + 2 * slk_offset
h_slkm = h_fabm + 2 * slk_offset
l_mounth = l_fabp + pck.mounting_hole_pos[0]
txt_x = l_slkp + max(w_slkp, w_slkm) / 2
# calculate pad positions
pads=[]
yshift=0
y1=0
y2=0
maxpiny=0
if pck.staggered_type == 1:
y1 = pck.staggered_rm[0]
yshift = -pck.staggered_rm[0]
y2 = 0
maxpiny = pck.staggered_rm[0]
if len(pck.staggered_pad)>0:
padsize=pck.staggered_pad
elif pck.staggered_type == 2:
y1 = 0
yshift = 0
y2=pck.staggered_rm[0]
maxpiny = pck.staggered_rm[0]
if len(pck.staggered_pad) > 0:
padsize = pck.staggered_pad
pinwid = (pck.pins - 1) * pck.rm
if len(pck.rm_list) > 0:
pinwid = 0
for rm in pck.rm_list:
pinwid = pinwid + rm
l_crt = min(-padsize[0] / 2, l_fabp) - crt_offset
t_crt = min(-padsize[1] / 2, t_fabp) - crt_offset
w_crt = max(max(w_fabp, w_fabm), pinwid + padsize[0]) + 2 * crt_offset
h_crt = max(t_fabp+max(h_fabp, h_fabm)+ crt_offset-t_crt, -t_crt + maxpiny + padsize[1] / 2 + crt_offset)
y=y1
x = 0
for p in range(1, pck.pins + 1):
if (p % 2) == 1:
y = y1
else:
y = y2
pads.append([x, y])
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
tag_items = ["Vertical", "RM {0}mm".format(pck.rm)]
footprint_name = pck.name
for t in pck.more_packnames:
footprint_name = footprint_name + "_" + t
footprint_name = footprint_name + "_Vertical"
for t in pck.fpnametags:
footprint_name = footprint_name + "_" + t
if pck.staggered_type>0:
footprint_name = footprint_name + "_Py{0}mm".format(pck.staggered_rm[0],3)
if pck.largepads:
tag_items.append("large Pads")
footprint_name = footprint_name + lptext
print(footprint_name)
description = pck.name
tags = pck.name
for t in tag_items:
description = description + ", " + t
tags = tags + " " + t
for t in pck.tags:
description = description + ", " + t
tags = tags + " " + t
if len(pck.webpage)>0:
description = description + ", see " + pck.webpage
# init kicad footprint
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription(description)
kicad_mod.setTags(tags)
kicad_modt = Translation(0, yshift)
kicad_mod.append(kicad_modt)
# set general values
kicad_modt.append(Text(type='reference', text='REF**', at=[txt_x, t_slkp - txt_offset], layer='F.SilkS'))
kicad_modt.append(Text(type='user', text='%R', at=[txt_x, t_slkp - txt_offset], layer='F.Fab'))
kicad_modt.append(
Text(type='value', text=footprint_name, at=[txt_x, t_slkp + max(h_slkm, h_slkp, -t_slkp+h_crt+t_crt) + txt_offset], layer='F.Fab'))
# create FAB-layer
kicad_modt.append(
RectLine(start=[l_fabp, t_fabp], end=[l_fabp + w_fabp, t_fabp + h_fabp], layer='F.Fab', width=lw_fab))
if (pck.metal[2] > 0):
kicad_modt.append(
Line(start=[l_fabp, t_fabp + h_fabm], end=[l_fabp + w_fabp, t_fabp + h_fabm], layer='F.Fab', width=lw_fab))
if pck.mounting_hole_diameter > 0:
kicad_modt.append(Line(start=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp + h_fabm], layer='F.Fab',
width=lw_fab))
kicad_modt.append(Line(start=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp + h_fabm], layer='F.Fab',
width=lw_fab))
else:
if pck.mounting_hole_diameter > 0:
kicad_modt.append(Line(start=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp + h_fabp], layer='F.Fab',
width=lw_fab))
kicad_modt.append(Line(start=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp + h_fabp], layer='F.Fab',
width=lw_fab))
for p in range(0, len(pads)):
yl1=t_fabp + h_fabp
yl2=pads[p][1]
if yl2>yl1:
kicad_modt.append(Line(start=[pads[p][0], yl1], end=[pads[p][0], yl2], layer='F.Fab', width=lw_fab))
# create SILKSCREEN-layer
keepouts = []
for p in range(0,len(pads)):
if p==0:
keepouts=keepouts+addKeepoutRect(pads[p][0],pads[p][1],padsize[0]+2*slk_dist,padsize[1]+2*slk_dist)
else:
keepouts=keepouts+addKeepoutRound(pads[p][0],pads[p][1],padsize[0]+2*slk_dist,padsize[1]+2*slk_dist)
#for ko in keepouts:
# kicad_modt.append(
# RectLine(start=[ko[0],ko[2]], end=[ko[1],ko[3]], layer='B.Fab', width=lw_fab))
addHLineWithKeepout(kicad_modt, l_slkp, l_slkp + w_slkp, t_slkp, 'F.SilkS', lw_slk, keepouts)
addHLineWithKeepout(kicad_modt, l_slkp, l_slkp + w_slkp, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_slkp, t_slkp, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_slkp + w_slkp, t_slkp, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
if (pck.metal[2] > 0):
addHLineWithKeepout(kicad_modt, l_slkp, l_slkp + w_slkp, t_slkp + h_slkm, 'F.SilkS', lw_slk, keepouts)
if pck.mounting_hole_diameter > 0:
addVLineWithKeepout(kicad_modt, l_mounth - pck.mounting_hole_diameter / 2, t_slkp, t_slkp + h_slkm,
'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_mounth + pck.mounting_hole_diameter / 2, t_slkp, t_slkp + h_slkm,
'F.SilkS', lw_slk, keepouts)
else:
if pck.mounting_hole_diameter > 0:
addVLineWithKeepout(kicad_modt, l_mounth - pck.mounting_hole_diameter / 2, t_slkp, t_slkp + h_slkp,
'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_mounth + pck.mounting_hole_diameter / 2, t_slkp, t_slkp + h_slkp,
'F.SilkS', lw_slk, keepouts)
for p in range(0, len(pads)):
yl1 = t_slkp + h_slkp
yl2 = pads[p][1]
if yl2>yl1:
addVLineWithKeepout(kicad_modt,pads[p][0], yl1, yl2, 'F.SilkS', lw_slk, keepouts)
# create courtyard
kicad_mod.append(
RectLine(start=[roundCrt(l_crt), roundCrt(t_crt+yshift)], end=[roundCrt(l_crt + w_crt), roundCrt(t_crt + h_crt+yshift)],
layer='F.CrtYd', width=lw_crt))
# create pads
for p in range(0,len(pads)):
if p==0:
kicad_modt.append(
Pad(number=p+1, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT, at=pads[p], size=padsize, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
else:
kicad_modt.append(
Pad(number=p+1, type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, at=pads[p], size=padsize, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
# add model
if (has3d):
kicad_modt.append(
Model(filename=lib_name + ".3dshapes/" + footprint_name + ".wrl", at=x_3d, scale=s_3d, rotate=r_3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(footprint_name + '.kicad_mod')
# horizontal symbols for rectangular transistors
def makeHOR(lib_name, pck, has3d=False, x_3d=[0, 0, 0], s_3d=[1,1,1], lptext="_LargePads", r_3d=[0, 0, 0]):
padsize = pck.pad
l_fabp = -pck.pin_offset_x
t_fabp = -pck.pin_minlength
if pck.staggered_type >0:
t_fabp=-pck.staggered_pin_minlength
w_fabp = pck.plastic[0]
h_fabp = pck.plastic[1]
w_fabm = pck.metal[0]
h_fabm = pck.metal[1]
l_mounth = l_fabp + pck.mounting_hole_pos[0]
t_mounth = t_fabp - pck.mounting_hole_pos[1]
# calculate pad positions
pads = []
yshift = 0
y1 = 0
y2 = 0
maxpiny = 0
if pck.staggered_type == 1:
y1 = pck.staggered_rm[1]
yshift = -pck.staggered_rm[1]
y2 = 0
maxpiny = pck.staggered_rm[1]
if len(pck.staggered_pad) > 0:
padsize = pck.staggered_pad
elif pck.staggered_type == 2:
y1 = 0
yshift = 0
y2 = pck.staggered_rm[1]
maxpiny = pck.staggered_rm[1]
if len(pck.staggered_pad) > 0:
padsize = pck.staggered_pad
y=y1
x = 0
for p in range(1, pck.pins + 1):
if (p % 2) == 1:
y = y1
else:
y = y2
pads.append([x, y])
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
pinwid = (pck.pins - 1) * pck.rm
if len(pck.rm_list) > 0:
pinwid = 0
for rm in pck.rm_list:
pinwid = pinwid + rm
l_slkp = l_fabp - slk_offset
t_slkp = t_fabp + slk_offset
w_slkp = w_fabp + 2 * slk_offset
h_slkp = h_fabp + 2 * slk_offset
w_slkm = w_fabm + 2 * slk_offset
h_slkm = h_fabm + 2 * slk_offset
addpad = 0
l_crt = min(-padsize[0] / 2, l_fabp) - crt_offset
t_crt = t_fabp - max(h_fabp, h_fabm) - crt_offset
h_crt = (-t_crt + maxpiny+padsize[1] / 2) + crt_offset
if len(pck.additional_pin_pad_size) > 0:
h_crt = h_crt + (pck.additional_pin_pad[1] + pck.additional_pin_pad_size[1] / 2 - h_fabm)
t_crt = t_crt - (pck.additional_pin_pad[1] + pck.additional_pin_pad_size[1] / 2 - h_fabm)
addpad = pck.additional_pin_pad_size[0]
addpadx = l_fabp + pck.additional_pin_pad[0]
addpady = t_fabp - pck.additional_pin_pad[1]
w_crt = max(max(max(w_fabp, w_fabm), pinwid + padsize[0]), addpad) + 2 * crt_offset
txt_x = l_slkp + max(w_slkp, w_slkm) / 2
txt_t = (t_slkp - max(h_slkm, h_slkp)) - txt_offset
txt_b = maxpiny+padsize[1] / 2 + txt_offset
if len(pck.additional_pin_pad_size) > 0:
txt_t = txt_t - (pck.additional_pin_pad[1] + pck.additional_pin_pad_size[1] / 2 - h_fabm)
tag_items = ["Horizontal", "RM {0}mm".format(pck.rm)]
footprint_name = pck.name
if len(pck.additional_pin_pad_size) > 0:
footprint_name = footprint_name + "-1EP"
for t in pck.more_packnames:
footprint_name = footprint_name + "_" + t
footprint_name = footprint_name + "_Horizontal_TabDown"
for t in pck.fpnametags:
footprint_name = footprint_name + "_" + t
if pck.staggered_type>0:
footprint_name = footprint_name + "_Py{0}mm".format(pck.staggered_rm[1],3)
if pck.largepads:
tag_items.append("large Pads")
footprint_name = footprint_name + lptext
print(footprint_name)
description = pck.name
tags = pck.name
for t in tag_items:
description = description + ", " + t
tags = tags + " " + t
for t in pck.tags:
description = description + ", " + t
tags = tags + " " + t
if len(pck.webpage)>0:
description = description + ", see " + pck.webpage
# init kicad footprint
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription(description)
kicad_mod.setTags(tags)
kicad_modt = Translation(0, yshift)
kicad_mod.append(kicad_modt)
# set general values
kicad_modt.append(Text(type='reference', text='REF**', at=[txt_x, txt_t], layer='F.SilkS'))
kicad_modt.append(Text(type='user', text='%R', at=[txt_x, txt_t], layer='F.Fab'))
kicad_modt.append(Text(type='value', text=footprint_name, at=[txt_x, txt_b], layer='F.Fab'))
# create FAB-layer
if (h_fabm > 0):
if len(pck.plastic_angled)>0:
if len(pck.metal_angled) > 0:
addRectAngledTopNoBottom(kicad_modt, [l_fabp + pck.metal_offset_x, t_fabp - h_fabp+pck.plastic_angled[1]],
[l_fabp + pck.metal_offset_x + w_fabm, t_fabp - h_fabm], pck.metal_angled, 'F.Fab',
lw_fab)
else:
kicad_modt.append(RectLine(start=[l_fabp + pck.metal_offset_x, t_fabp - h_fabp-pck.plastic_angled[1]],
end=[l_fabp + pck.metal_offset_x + w_fabm, t_fabp - h_fabm], layer='F.Fab',
width=lw_fab))
else:
if len(pck.metal_angled)>0:
addRectAngledTop(kicad_modt, [l_fabp + pck.metal_offset_x, t_fabp - h_fabp], [l_fabp + pck.metal_offset_x + w_fabm, t_fabp - h_fabm], pck.metal_angled, 'F.Fab', lw_fab)
else:
kicad_modt.append(RectLine(start=[l_fabp + pck.metal_offset_x, t_fabp - h_fabp],
end=[l_fabp + pck.metal_offset_x + w_fabm, t_fabp - h_fabm], layer='F.Fab',
width=lw_fab))
if len(pck.plastic_angled)>0:
addRectAngledTop(kicad_modt, [l_fabp, t_fabp],
[l_fabp + w_fabp, t_fabp - h_fabp], pck.plastic_angled, 'F.Fab', lw_fab)
else:
kicad_modt.append(
RectLine(start=[l_fabp, t_fabp], end=[l_fabp + w_fabp, t_fabp - h_fabp], layer='F.Fab', width=lw_fab))
if pck.mounting_hole_diameter > 0:
kicad_modt.append(
Circle(center=[l_mounth, t_mounth], radius=pck.mounting_hole_diameter / 2, layer='F.Fab', width=lw_fab))
for p in range(0, len(pads)):
kicad_modt.append(Line(start=[pads[p][0], t_fabp], end=[pads[p][0], pads[p][1]], layer='F.Fab', width=lw_fab))
# create SILKSCREEN-layer
keepouts = []
for p in range(0,len(pads)):
if p==0:
keepouts=keepouts+addKeepoutRect(pads[p][0],pads[p][1],padsize[0]+2*slk_dist,padsize[1]+2*slk_dist)
else:
keepouts=keepouts+addKeepoutRound(pads[p][0],pads[p][1],padsize[0]+2*slk_dist,padsize[1]+2*slk_dist)
if len(pck.additional_pin_pad_size) > 0:
keepouts.append([addpadx - pck.additional_pin_pad_size[0] / 2 - slk_dist,
addpadx + pck.additional_pin_pad_size[0] / 2 + slk_dist,
addpady - pck.additional_pin_pad_size[1] / 2 - slk_dist,
addpady + pck.additional_pin_pad_size[1] / 2 + slk_dist])
addHLineWithKeepout(kicad_modt, l_slkp, l_slkp + w_slkp, t_slkp, 'F.SilkS', lw_slk, keepouts)
if h_fabm > 0:
addHLineWithKeepout(kicad_modt, l_slkp, l_slkp + w_slkp, t_slkp - h_slkm, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_slkp, t_slkp, t_slkp - h_slkm, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_slkp + w_slkp, t_slkp, t_slkp - h_slkm, 'F.SilkS', lw_slk, keepouts)
else:
addHLineWithKeepout(kicad_modt, l_slkp, l_slkp + w_slkp, t_slkp - h_slkp, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_slkp, t_slkp, t_slkp - h_slkp, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_modt, l_slkp + w_slkp, t_slkp, t_slkp - h_slkp, 'F.SilkS', lw_slk, keepouts)
for p in range(0, len(pads)):
addVLineWithKeepout(kicad_modt, pads[p][0], t_slkp, pads[p][1], 'F.SilkS', lw_slk, keepouts)
# create courtyard
kicad_mod.append(
RectLine(start=[roundCrt(l_crt), roundCrt(t_crt+yshift)], end=[roundCrt(l_crt + w_crt), roundCrt(t_crt + h_crt+yshift)],
layer='F.CrtYd', width=lw_crt))
# create mounting hole
if pck.mounting_hole_drill > 0:
kicad_modt.append(Pad(type=Pad.TYPE_NPTH, shape=Pad.SHAPE_OVAL, at=[l_mounth, t_mounth],
size=[pck.mounting_hole_drill, pck.mounting_hole_drill], drill=pck.mounting_hole_drill,
layers=['*.Cu', '*.Mask']))
if len(pck.additional_pin_pad_size) > 0:
kicad_modt.append(Pad(number=pck.pins + 1, type=Pad.TYPE_SMT, shape=Pad.SHAPE_RECT, at=[addpadx, addpady],
size=pck.additional_pin_pad_size, drill=0, layers=['F.Cu', 'F.Mask', 'F.Paste']))
# create pads
for p in range(0,len(pads)):
if p==0:
kicad_modt.append(
Pad(number=p+1, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT, at=pads[p], size=padsize, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
else:
kicad_modt.append(
Pad(number=p+1, type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, at=pads[p], size=padsize, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
# add model
if (has3d):
kicad_modt.append(
Model(filename=lib_name + ".3dshapes/" + footprint_name + ".wrl", at=x_3d, scale=s_3d, rotate=r_3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(footprint_name + '.kicad_mod')
# vertical, mounted-from-Lowerside symbols for rectangular transistors
def makeVERTLS(lib_name, pck, has3d=False, x_3d=[0, 0, 0], s_3d=[1,1,1], lptext="_LargePads", r_3d=[0, 0, 0]):
l_fabp = -pck.pin_offset_x
t_fabp = -pck.pin_offset_z
w_fabp = pck.plastic[0]
h_fabp = pck.plastic[2]
w_fabm = pck.metal[0]
h_fabm = pck.metal[2]
pinwid=(pck.pins - 1) * pck.rm
if len(pck.rm_list)>0:
pinwid=0
for rm in pck.rm_list:
pinwid=pinwid+rm
l_slkp = l_fabp - slk_offset
t_slkp = t_fabp - slk_offset
w_slkp = w_fabp + 2 * slk_offset
h_slkp = h_fabp + 2 * slk_offset
w_slkm = w_fabm + 2 * slk_offset
h_slkm = h_fabm + 2 * slk_offset
l_crt = min(-pck.pad[0] / 2, l_fabp) - crt_offset
t_crt = min(-pck.pad[1] / 2, t_fabp) - crt_offset
w_crt = max(max(w_fabp, w_fabm), pinwid + pck.pad[0]) + 2 * crt_offset
h_crt = max(t_fabp+max(h_fabp, h_fabm) + crt_offset-t_crt, -t_crt + pck.pad[1] / 2+crt_offset)
l_mounth = l_fabp + pck.mounting_hole_pos[0]
txt_x = l_slkp + max(w_slkp, w_slkm) / 2
tag_items = ["Vertical", "RM {0}mm".format(pck.rm), "mount on lower-side of PCB"]
footprint_name = pck.name
for t in pck.more_packnames:
footprint_name = footprint_name + "_" + t
footprint_name = footprint_name + "_Vertical"
for t in pck.fpnametags:
footprint_name = footprint_name + "_" + t
footprint_name = footprint_name + "_MountFromLS"
if pck.largepads:
tag_items.append("large Pads")
footprint_name = footprint_name + lptext
print(footprint_name)
description = pck.name
tags = pck.name
for t in tag_items:
description = description + ", " + t
tags = tags + " " + t
for t in pck.tags:
description = description + ", " + t
tags = tags + " " + t
if len(pck.webpage)>0:
description = description + ", see " + pck.webpage
# init kicad footprint
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription(description)
kicad_mod.setTags(tags)
kicad_modt = Translation(-pinwid, 0)
kicad_mod.append(kicad_modt)
# set general values
kicad_modt.append(Text(type='reference', text='REF**', at=[txt_x, t_slkp - txt_offset], layer='F.SilkS'))
kicad_modt.append(Text(type='user', text='%R', at=[txt_x, t_slkp - txt_offset], layer='B.Fab'))
kicad_modt.append(
Text(type='value', text=footprint_name, at=[txt_x, t_slkp + max(h_slkm, h_slkp) + txt_offset], layer='B.Fab'))
# create FAB-layer
kicad_modt.append(
RectLine(start=[l_fabp, t_fabp], end=[l_fabp + w_fabp, t_fabp + h_fabp], layer='B.Fab', width=lw_fab))
if (pck.metal[2] > 0):
kicad_modt.append(
Line(start=[l_fabp, t_fabp + h_fabm], end=[l_fabp + w_fabp, t_fabp + h_fabm], layer='B.Fab', width=lw_fab))
if pck.mounting_hole_diameter > 0:
kicad_modt.append(Line(start=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp + h_fabm], layer='B.Fab',
width=lw_fab))
kicad_modt.append(Line(start=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp + h_fabm], layer='B.Fab',
width=lw_fab))
else:
if pck.mounting_hole_diameter > 0:
kicad_modt.append(Line(start=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth - pck.mounting_hole_diameter / 2, t_fabp + h_fabp], layer='B.Fab',
width=lw_fab))
kicad_modt.append(Line(start=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp],
end=[l_mounth + pck.mounting_hole_diameter / 2, t_fabp + h_fabp], layer='B.Fab',
width=lw_fab))
# create SILKSCREEN-layer
keepouts = []
x = pinwid
for p in range(1, pck.pins + 1):
if p == 1:
keepouts = keepouts + addKeepoutRect(x, 0, pck.pad[0] + 2 * slk_dist, pck.pad[1] + 2 * slk_dist)
else:
keepouts = keepouts + addKeepoutRound(x, 0, pck.pad[0] + 2 * slk_dist, pck.pad[1] + 2 * slk_dist)
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x - pck.rm_list[p-1]
else:
x = x - pck.rm
#for ko in keepouts:
# kicad_modt.append(
# RectLine(start=[ko[0], ko[2]],
# end=[ko[1], ko[3]],
# layer='F.CrtYd', width=0.01))
addHDLineWithKeepout(kicad_modt, l_slkp, 3 * lw_slk, l_slkp + w_slkp, t_slkp, 'F.SilkS', lw_slk, keepouts)
addHDLineWithKeepout(kicad_modt, l_slkp, 3 * lw_slk, l_slkp + w_slkp, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_slkp, t_slkp, 3 * lw_slk, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_slkp + w_slkp, t_slkp, 3 * lw_slk, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
if (pck.metal[2] > 0):
addHDLineWithKeepout(kicad_modt, l_slkp, 3 * lw_slk, l_slkp + w_slkp, t_slkp + h_slkm, 'F.SilkS', lw_slk,
keepouts)
if pck.mounting_hole_diameter > 0:
addVDLineWithKeepout(kicad_modt, l_mounth - pck.mounting_hole_diameter / 2, t_slkp, 3 * lw_slk,
t_slkp + h_slkm,
'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_mounth + pck.mounting_hole_diameter / 2, t_slkp, 3 * lw_slk,
t_slkp + h_slkm,
'F.SilkS', lw_slk, keepouts)
else:
if pck.mounting_hole_diameter > 0:
addVDLineWithKeepout(kicad_modt, l_mounth - pck.mounting_hole_diameter / 2, t_slkp, 3 * lw_slk,
t_slkp + h_slkp,
'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_mounth + pck.mounting_hole_diameter / 2, t_slkp, 3 * lw_slk,
t_slkp + h_slkp,
'F.SilkS', lw_slk, keepouts)
# create courtyard
kicad_mod.append(
RectLine(start=[roundCrt(l_crt-pinwid), roundCrt(t_crt)], end=[roundCrt(l_crt + w_crt-pinwid), roundCrt(t_crt + h_crt)],
layer='B.CrtYd', width=lw_crt))
# create pads
x = pinwid
for p in range(1, pck.pins + 1):
if (p == 1):
kicad_modt.append(
Pad(number=p, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT, at=[x, 0], size=pck.pad, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
else:
kicad_modt.append(
Pad(number=p, type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, at=[x, 0], size=pck.pad, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x - pck.rm_list[p-1]
else:
x = x - pck.rm
# add model
if (has3d):
kicad_modt.append(
Model(filename=lib_name + ".3dshapes/" + footprint_name + ".wrl", at=x_3d, scale=s_3d, rotate=r_3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(footprint_name + '.kicad_mod')
# horizontal, mounted-from-Lowerside symbols for rectangular transistors
def makeHORLS(lib_name, pck, has3d=False, x_3d=[0, 0, 0], s_3d=[1,1,1], lptext="_LargePads", r_3d=[0, 0, 0]):
l_fabp = -pck.pin_offset_x
t_fabp = -pck.pin_minlength
w_fabp = pck.plastic[0]
h_fabp = pck.plastic[1]
w_fabm = pck.metal[0]
h_fabm = pck.metal[1]
l_slkp = l_fabp - slk_offset
t_slkp = t_fabp + slk_offset
w_slkp = w_fabp + 2 * slk_offset
h_slkp = h_fabp + 2 * slk_offset
w_slkm = w_fabm + 2 * slk_offset
h_slkm = h_fabm + 2 * slk_offset
pinwid = (pck.pins - 1) * pck.rm
if len(pck.rm_list) > 0:
pinwid = 0
for rm in pck.rm_list:
pinwid = pinwid + rm
l_crt = min(-pck.pad[0] / 2, l_fabp) - crt_offset
t_crt = t_fabp - max(h_fabp, h_fabm) - crt_offset
h_crt = (-t_crt + pck.pad[1] / 2) + crt_offset
addpad = 0
if len(pck.additional_pin_pad_size) > 0:
h_crt = h_crt + (pck.additional_pin_pad[1] + pck.additional_pin_pad_size[1] / 2 - h_fabm)
t_crt = t_crt - (pck.additional_pin_pad[1] + pck.additional_pin_pad_size[1] / 2 - h_fabm)
addpad = pck.additional_pin_pad_size[0]
addpadx = l_fabp + pck.additional_pin_pad[0]
addpady = t_fabp - pck.additional_pin_pad[1]
w_crt = max(max(max(w_fabp, w_fabm), pinwid + pck.pad[0]), addpad) + 2 * crt_offset
l_mounth = l_fabp + pck.mounting_hole_pos[0]
t_mounth = t_fabp - pck.mounting_hole_pos[1]
txt_x = l_slkp + max(w_slkp, w_slkm) / 2
txt_t = (t_slkp - max(h_slkm, h_slkp)) - txt_offset
txt_b = pck.pad[1] / 2 + txt_offset
if len(pck.additional_pin_pad_size) > 0:
txt_t = txt_t - (pck.additional_pin_pad[1] + pck.additional_pin_pad_size[1] / 2 - h_fabm)
tag_items = ["Horizontal", "RM {0}mm".format(pck.rm), "mount on lower-side of PCB", "mount with cooling pad pointing away from PCB", "Reversed"]
footprint_name = pck.name
if len(pck.additional_pin_pad_size) > 0:
footprint_name = footprint_name + "-1EP"
for t in pck.more_packnames:
footprint_name = footprint_name + "_" + t
footprint_name = footprint_name + "_Horizontal"
for t in pck.fpnametags:
footprint_name = footprint_name + "_" + t
footprint_name = footprint_name + "_TabUp_MountFromLS"
if pck.largepads:
tag_items.append("large Pads")
footprint_name = footprint_name + lptext
print(footprint_name)
description = pck.name
tags = pck.name
for t in tag_items:
description = description + ", " + t
tags = tags + " " + t
for t in pck.tags:
description = description + ", " + t
tags = tags + " " + t
if len(pck.webpage)>0:
description = description + ", see " + pck.webpage
# init kicad footprint
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription(description)
kicad_mod.setTags(tags)
kicad_modt=Translation(0,0)#-(pck.pins - 1) * pck.rm,0)
kicad_mod.append(kicad_modt)
# set general values
kicad_modt.append(Text(type='reference', text='REF**', at=[txt_x, txt_t], layer='F.SilkS'))
kicad_modt.append(Text(type='user', text='%R', at=[txt_x, txt_t], layer='B.Fab'))
kicad_modt.append(Text(type='value', text=footprint_name, at=[txt_x, txt_b], layer='B.Fab'))
# create FAB-layer
if (h_fabm > 0):
if len(pck.metal_angled) > 0:
addRectAngledTop(kicad_modt, [l_fabp + pck.metal_offset_x, t_fabp - h_fabp],
[l_fabp + pck.metal_offset_x + w_fabm, t_fabp - h_fabm], pck.metal_angled, 'B.Fab', lw_fab)
else:
kicad_modt.append(RectLine(start=[l_fabp + pck.metal_offset_x, t_fabp - h_fabp],
end=[l_fabp + pck.metal_offset_x + w_fabm, t_fabp - h_fabm], layer='B.Fab',
width=lw_fab))
if len(pck.plastic_angled) > 0:
addRectAngledTop(kicad_modt, [l_fabp, t_fabp],
[l_fabp + w_fabp, t_fabp - h_fabp], pck.plastic_angled, 'B.Fab', lw_fab)
else:
kicad_modt.append(
RectLine(start=[l_fabp, t_fabp], end=[l_fabp + w_fabp, t_fabp - h_fabp], layer='B.Fab', width=lw_fab))
# if (h_fabm > 0):
# kicad_modt.append(RectLine(start=[l_fabp + pck.metal_offset_x, t_fabp - h_fabp],
# end=[l_fabp + pck.metal_offset_x + w_fabm, t_fabp - h_fabm], layer='B.Fab',
# width=lw_fab))
# kicad_modt.append(
# RectLine(start=[l_fabp, t_fabp], end=[l_fabp + w_fabp, t_fabp - h_fabp], layer='B.Fab', width=lw_fab))
# kicad_modt.append(
# RectLine(start=[l_fabp, t_fabp], end=[l_fabp + w_fabp, t_fabp - h_fabp], layer='B.Fab', width=lw_fab))
if pck.mounting_hole_diameter > 0:
kicad_modt.append(
Circle(center=[l_mounth, t_mounth], radius=pck.mounting_hole_diameter / 2, layer='B.Fab', width=lw_fab))
x = 0
for p in range(1, pck.pins + 1):
kicad_modt.append(Line(start=[x, t_fabp], end=[x, 0], layer='B.Fab', width=lw_fab))
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
# create SILKSCREEN-layer
keepouts = []
x = 0
for p in range(1, pck.pins + 1):
if p == 1:
keepouts = keepouts + addKeepoutRect(x, 0, pck.pad[0] + 2 * slk_dist, pck.pad[1] + 2 * slk_dist)
else:
keepouts = keepouts + addKeepoutRound(x, 0, pck.pad[0] + 2 * slk_dist, pck.pad[1] + 2 * slk_dist)
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
if len(pck.additional_pin_pad_size) > 0:
keepouts.append([addpadx - pck.additional_pin_pad_size[0] / 2 - slk_dist,
addpadx + pck.additional_pin_pad_size[0] / 2 + slk_dist,
addpady - pck.additional_pin_pad_size[1] / 2 - slk_dist,
addpady + pck.additional_pin_pad_size[1] / 2 + slk_dist])
addHDLineWithKeepout(kicad_modt, l_slkp, 3 * lw_slk, l_slkp + w_slkp, t_slkp, 'F.SilkS', lw_slk, keepouts)
if h_fabm > 0:
addHDLineWithKeepout(kicad_modt, l_slkp, 3 * lw_slk, l_slkp + w_slkp, t_slkp - h_slkm, 'F.SilkS', lw_slk, keepouts)
addHDLineWithKeepout(kicad_modt, l_slkp, 3 * lw_slk, l_slkp + w_slkp, t_fabp - h_fabp, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_slkp, t_slkp, 3 * lw_slk, t_slkp - h_slkm, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_slkp + w_slkp, t_slkp, 3 * lw_slk, t_slkp - h_slkm, 'F.SilkS', lw_slk, keepouts)
else:
addHDLineWithKeepout(kicad_modt, l_slkp, 3 * lw_slk, l_slkp + w_slkp, t_slkp - h_slkp, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_slkp, t_slkp, 3 * lw_slk, t_slkp - h_slkp, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_modt, l_slkp + w_slkp, t_slkp, 3 * lw_slk, t_slkp - h_slkp, 'F.SilkS', lw_slk, keepouts)
x = 0
for p in range(1, pck.pins + 1):
addVDLineWithKeepout(kicad_modt, x, t_slkp, 3 * lw_slk, -(pck.pad[1]/2+slk_dist), 'F.SilkS', lw_slk, keepouts)
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
# create courtyard
kicad_modt.append(
RectLine(start=[roundCrt(l_crt), roundCrt(t_crt)], end=[roundCrt(l_crt + w_crt), roundCrt(t_crt + h_crt)],
layer='B.CrtYd', width=lw_crt))
# create mounting hole
if pck.mounting_hole_drill > 0:
kicad_modt.append(Pad(type=Pad.TYPE_NPTH, shape=Pad.SHAPE_OVAL, at=[l_mounth, t_mounth],
size=[pck.mounting_hole_drill, pck.mounting_hole_drill], drill=pck.mounting_hole_drill,
layers=['*.Cu', '*.Mask']))
if len(pck.additional_pin_pad_size) > 0:
kicad_modt.append(Pad(number=pck.pins + 1, type=Pad.TYPE_SMT, shape=Pad.SHAPE_RECT, at=[addpadx, addpady],
size=pck.additional_pin_pad_size, drill=0, layers=['B.Cu', 'F.Mask', 'B.Paste']))
# create pads
x = 0
for p in range(1, pck.pins + 1):
if (p == 1):
kicad_modt.append(
Pad(number=p, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT, at=[x, 0], size=pck.pad, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
else:
kicad_modt.append(
Pad(number=p, type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, at=[x, 0], size=pck.pad, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
# add model
if (has3d):
kicad_modt.append(
Model(filename=lib_name + ".3dshapes/" + footprint_name + ".wrl", at=x_3d, scale=s_3d, rotate=r_3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(footprint_name + '.kicad_mod')
# horizontal reversed symbols for rectangular transistors
def makeHORREV(lib_name, pck, has3d=False, x_3d=[0, 0, 0], s_3d=[1 ,1,1], lptext="_LargePads", r_3d=[0, 0, 0]):
l_fabp = -pck.pin_offset_x
t_fabp = pck.pin_minlength
w_fabp = pck.plastic[0]
h_fabp = pck.plastic[1]
w_fabm = pck.metal[0]
h_fabm = pck.metal[1]
l_slkp = l_fabp - slk_offset
t_slkp = t_fabp - slk_offset
w_slkp = w_fabp + 2 * slk_offset
h_slkp = h_fabp + 2 * slk_offset
w_slkm = w_fabm + 2 * slk_offset
h_slkm = h_fabm + 2 * slk_offset
pinwid = (pck.pins - 1) * pck.rm
if len(pck.rm_list) > 0:
pinwid = 0
for rm in pck.rm_list:
pinwid = pinwid + rm
l_crt = min(-pck.pad[0] / 2, l_fabp) - crt_offset
t_crt = -pck.pad[1]/2- crt_offset
w_crt = max(max(w_fabp, w_fabm), pinwid + pck.pad[0]) + 2 * crt_offset
h_crt = -t_crt + t_fabp+max(h_fabp, h_fabm) + crt_offset
l_mounth = l_fabp + pck.mounting_hole_pos[0]
t_mounth = t_fabp + pck.mounting_hole_pos[1]
txt_x = l_slkp + max(w_slkp, w_slkm) / 2
txt_t = (t_slkp + max(h_slkm, h_slkp)) + txt_offset
txt_b = -pck.pad[1] / 2 - txt_offset
tag_items = ["Horizontal", "RM {0}mm".format(pck.rm)]
footprint_name = pck.name
for t in pck.more_packnames:
footprint_name = footprint_name + "_" + t
footprint_name = footprint_name + "_Horizontal" + "_TabUp"
for t in pck.fpnametags:
footprint_name = footprint_name + "_" + t
if pck.largepads:
tag_items.append("large Pads")
footprint_name = footprint_name + lptext
print(footprint_name)
description = pck.name
tags = pck.name
for t in tag_items:
description = description + ", " + t
tags = tags + " " + t
for t in pck.tags:
description = description + ", " + t
tags = tags + " " + t
if len(pck.webpage)>0:
description = description + ", see " + pck.webpage
# init kicad footprint
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription(description)
kicad_mod.setTags(tags)
# set general values
kicad_mod.append(Text(type='reference', text='REF**', at=[txt_x, txt_t], layer='F.SilkS'))
kicad_mod.append(Text(type='user', text='%R', at=[txt_x, txt_t], layer='F.Fab'))
kicad_mod.append(Text(type='value', text=footprint_name, at=[txt_x, txt_b], layer='F.Fab'))
if (h_fabm > 0):
if len(pck.metal_angled) > 0:
addRectAngledBottom(kicad_mod, [l_fabp + pck.metal_offset_x, t_fabp + h_fabp],
[l_fabp + pck.metal_offset_x + w_fabm, t_fabp + h_fabm], pck.metal_angled, 'F.Fab', lw_fab)
else:
kicad_mod.append(RectLine(start=[l_fabp + pck.metal_offset_x, t_fabp + h_fabp],
end=[l_fabp + pck.metal_offset_x + w_fabm, t_fabp + h_fabm], layer='F.Fab',
width=lw_fab))
if len(pck.plastic_angled) > 0:
addRectAngledBottom(kicad_mod, [l_fabp, t_fabp],
[l_fabp + w_fabp, t_fabp + h_fabp], pck.plastic_angled, 'F.Fab', lw_fab)
else:
kicad_mod.append(
RectLine(start=[l_fabp, t_fabp], end=[l_fabp + w_fabp, t_fabp + h_fabp], layer='F.Fab', width=lw_fab))
if pck.mounting_hole_diameter > 0:
kicad_mod.append(Circle(center=[l_mounth, t_mounth], radius=pck.mounting_hole_diameter / 2, layer='F.Fab', width=lw_fab))
x = 0
for p in range(1, pck.pins + 1):
kicad_mod.append(Line(start=[x, t_fabp], end=[x, 0], layer='F.Fab', width=lw_fab))
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
# create SILKSCREEN-layer
keepouts = []
x = 0
for p in range(1, pck.pins + 1):
if p==1:
keepouts=keepouts+addKeepoutRect(x,0,pck.pad[0]+2*slk_dist,pck.pad[1]+2*slk_dist)
else:
keepouts=keepouts+addKeepoutRound(x,0,pck.pad[0]+2*slk_dist,pck.pad[1]+2*slk_dist)
x = x + pck.rm
addHLineWithKeepout(kicad_mod, l_slkp, l_slkp + w_slkp, t_slkp, 'F.SilkS', lw_slk, keepouts)
addHLineWithKeepout(kicad_mod, l_slkp, l_slkp + w_slkp, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_mod, l_slkp, t_slkp, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
addVLineWithKeepout(kicad_mod, l_slkp + w_slkp, t_slkp, t_slkp + h_slkp, 'F.SilkS', lw_slk, keepouts)
if (h_fabm > 0):
addHDLineWithKeepout(kicad_mod, l_slkp + pck.metal_offset_x, 10*lw_slk, l_slkp + pck.metal_offset_x + w_slkm, t_slkp + h_slkm, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_mod, l_slkp + pck.metal_offset_x, t_slkp + h_slkp+lw_slk*2, 10 * lw_slk, t_slkp + h_slkm, 'F.SilkS', lw_slk, keepouts)
addVDLineWithKeepout(kicad_mod, l_slkp + pck.metal_offset_x + w_slkm, t_slkp + h_slkp+lw_slk*2, 10 * lw_slk, t_slkp + h_slkm, 'F.SilkS', lw_slk, keepouts)
x = 0
for p in range(1, pck.pins + 1):
addVLineWithKeepout(kicad_mod, x, t_slkp, pck.pad[1]/2+slk_dist, 'F.SilkS', lw_slk, keepouts)
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
# create courtyard
kicad_mod.append(
RectLine(start=[roundCrt(l_crt), roundCrt(t_crt)], end=[roundCrt(l_crt + w_crt), roundCrt(t_crt + h_crt)],
layer='F.CrtYd', width=lw_crt))
# create mounting hole
if pck.mounting_hole_drill > 0:
kicad_mod.append(Pad(type=Pad.TYPE_NPTH, shape=Pad.SHAPE_OVAL, at=[l_mounth, t_mounth],
size=[pck.mounting_hole_drill, pck.mounting_hole_drill], drill=pck.mounting_hole_drill,
layers=['*.Cu', '*.Mask']))
# create pads
x = 0
for p in range(1, pck.pins + 1):
if (p==1):
kicad_mod.append(Pad(number=p, type=Pad.TYPE_THT, shape=Pad.SHAPE_RECT, at=[x, 0], size=pck.pad, drill=pck.drill, layers=['*.Cu', '*.Mask']))
else:
kicad_mod.append(Pad(number=p, type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, at=[x, 0], size=pck.pad, drill=pck.drill,layers=['*.Cu', '*.Mask']))
if len(pck.rm_list)>0 and p<=len(pck.rm_list):
x = x + pck.rm_list[p-1]
else:
x = x + pck.rm
# add model
if (has3d):
kicad_mod.append(
Model(filename=lib_name + ".3dshapes/" + footprint_name + ".wrl", at=x_3d, scale=s_3d, rotate=r_3d))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(footprint_name + '.kicad_mod')
# horizontal symbols for rectangular transistors
def makeTORound(lib_name, pck, has3d=False, x_3d=[0, 0, 0], s_3d=[1,1,1], lptext="_LargePads"):
padsize = pck.pad
d_fab=pck.diameter_outer
d_slk=pck.diameter_outer+2*slk_offset
# calculate pad positions
pads = []
yshift = 0
xshift = 0
a=pck.pin1_angle
firstPin=True
for p in range(1, pck.pins + 1):
x=pck.pin_circle_diameter/2*math.cos(a/180*math.pi)
y=pck.pin_circle_diameter/2*math.sin(a/180*math.pi)
a = a + pck.pin_dangle
if (len(pck.used_pins)<=0) or ((p-1) in pck.used_pins):
pads.append([x, y])
if firstPin:
xshift=-x
yshift=-y
firstPin=False
txt_t = -d_slk/2 - txt_offset
txt_b = d_slk/2 + txt_offset
tag_items = []
footprint_name = pck.name
for t in pck.more_packnames:
footprint_name = footprint_name + "_" + t
for t in pck.fpnametags:
footprint_name = footprint_name + "_" + t
if pck.largepads:
tag_items.append("large Pads")
footprint_name = footprint_name + lptext
print(footprint_name)
description = pck.name
tags = pck.name
for t in tag_items:
description = description + ", " + t
tags = tags + " " + t
for t in pck.tags:
description = description + ", " + t
tags = tags + " " + t
if len(pck.webpage)>0:
description = description + ", see " + pck.webpage
# init kicad footprint
kicad_mod = Footprint(footprint_name)
kicad_mod.setDescription(description)
kicad_mod.setTags(tags)
kicad_modt = Translation(xshift, yshift)
kicad_mod.append(kicad_modt)
# set general values
kicad_modt.append(Text(type='reference', text='REF**', at=[0, txt_t], layer='F.SilkS'))
kicad_modt.append(Text(type='user', text='%R', at=[0, txt_t], layer='F.Fab'))
kicad_modt.append(Text(type='value', text=footprint_name, at=[0, txt_b], layer='F.Fab'))
# create FAB-layer
kicad_modt.append(Circle(center=[0, 0], radius=pck.diameter_inner / 2, layer='F.Fab', width=lw_fab))
if pck.mark_width > 0 and pck.mark_len > 0:
a=pck.mark_angle
da=math.asin(pck.mark_width/d_fab)/math.pi*180
a1=a+da
a2=a-da
x1 = [(pck.diameter_outer / 2) * math.cos(a1 / 180 * math.pi), (pck.diameter_outer / 2) * math.sin(a1 / 180 * math.pi)]
x3 = [(pck.diameter_outer / 2) * math.cos(a2 / 180 * math.pi), (pck.diameter_outer / 2) * math.sin(a2 / 180 * math.pi)]
dx1= (pck.mark_len) * math.cos(a / 180 * math.pi)
dx2 = (pck.mark_len) * math.sin(a / 180 * math.pi)
x2 = [x1[0] + dx1, x1[1] + dx2]
x4 = [x3[0] + dx1, x3[1] + dx2]
minx=min(x2[0],x4[0])
miny=min(x2[1],x4[1])
kicad_modt.append(Arc(center=[0, 0], start=x1, angle=(360-2*da), layer='F.Fab', width=lw_fab))
kicad_modt.append(Line(start=x1, end=x2, angle=0, layer='F.Fab', width=lw_fab))
kicad_modt.append(Line(start=x2, end=x4, angle=0, layer='F.Fab', width=lw_fab))
kicad_modt.append(Line(start=x4, end=x3, angle=0, layer='F.Fab', width=lw_fab))
else:
kicad_modt.append(Circle(center=[0, 0], radius=pck.diameter_outer / 2, layer='F.Fab', width=lw_fab))
if pck.window_diameter>0:
addCircleLF(kicad_modt, [0,0], pck.window_diameter/2, 'F.Fab', lw_fab, 4*lw_fab)
# create SILKSCREEN-layer
if pck.mark_width>0 and pck.mark_len>0:
a=pck.mark_angle
da=math.asin((pck.mark_width+2*slk_offset)/d_slk)/math.pi*180
a1=a+da
a2=a-da
x1 = [(d_slk / 2) * math.cos(a1 / 180 * math.pi), (d_slk / 2) * math.sin(a1 / 180 * math.pi)]
x3 = [(d_slk / 2) * math.cos(a2 / 180 * math.pi), (d_slk / 2) * math.sin(a2 / 180 * math.pi)]
dx1= (pck.mark_len+slk_offset) * math.cos(a / 180 * math.pi)
dx2 = (pck.mark_len+slk_offset) * math.sin(a / 180 * math.pi)
x2 = [x1[0] + dx1, x1[1] + dx2]
x4 = [x3[0] + dx1, x3[1] + dx2]
#minx=min(x2[0],x4[0])
#miny=min(x2[1],x4[1])
kicad_modt.append(Arc(center=[0, 0], start=x1, angle=(360-2*da), layer='F.SilkS', width=lw_slk))
kicad_modt.append(Line(start=x1, end=x2, angle=0, layer='F.SilkS', width=lw_slk))
kicad_modt.append(Line(start=x2, end=x4, angle=0, layer='F.SilkS', width=lw_slk))
kicad_modt.append(Line(start=x4, end=x3, angle=0, layer='F.SilkS', width=lw_slk))
else:
kicad_modt.append(Circle(center=[0, 0], radius=d_slk/2, layer='F.SilkS', width=lw_slk))
if pck.mark_width > 0 and pck.mark_len > 0:
kicad_mod.append(
RectLine(start=[roundCrt(xshift+min(minx-crt_offset,-d_fab/2-crt_offset)), roundCrt(yshift+min(miny-crt_offset,-d_fab/2-crt_offset))], end=[roundCrt(xshift+d_fab/2+crt_offset), roundCrt(yshift+d_fab/2+crt_offset)],
layer='F.CrtYd', width=lw_crt))
else:
kicad_mod.append(Circle(center=[roundCrt(xshift), roundCrt(yshift)], radius=roundCrt(d_fab / 2+crt_offset), layer='F.CrtYd', width=lw_crt))
# create pads
for p in range(0, len(pads)):
if p == 0:
kicad_modt.append(
Pad(number=p + 1, type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, at=pads[p], size=[roundG(padsize[0]*1.3,0.1),padsize[1]], drill=pck.drill,
layers=['*.Cu', '*.Mask']))
else:
kicad_modt.append(
Pad(number=p + 1, type=Pad.TYPE_THT, shape=Pad.SHAPE_OVAL, at=pads[p], size=padsize, drill=pck.drill,
layers=['*.Cu', '*.Mask']))
# add model
if (has3d):
kicad_modt.append(
Model(filename=lib_name + ".3dshapes/" + footprint_name + ".wrl", at=x_3d, scale=s_3d, rotate=[0, 0, 0]))
# print render tree
# print(kicad_mod.getRenderTree())
# print(kicad_mod.getCompleteRenderTree())
# write file
file_handler = KicadFileHandler(kicad_mod)
file_handler.writeFile(footprint_name + '.kicad_mod')
if __name__ == '__main__':
# make standard packages
packs = ["TO-264", "TO-247", "TO-218", "TO-251", "TO-126", "TO-220", "TO-3P", "TO-262", "SIPAK","TO-3PB", "TO-220F"]
pins = [[2, 3,5], [2, 3,4,5], [2, 3], [2, 3], [2, 3], [2, 3, 4,5], [3], [3,5], [3], [3], [2, 3, 4, 5, ]]
rms = [ [0, 0,3.81], [0, 0,2.54,2.54], [0, 0], [0, 0], [0, 0], [0, 0, 2.54,1.7], [0], [0,1.7], [0], [0], [0, 0, 2.54,1.7]]
#makeVERTLS("${KISYS3DMOD}/Package_TO_SOT_THT", pack("SOT93", 2, 0, 0, False),False, [0, 0, 0], [0, 0, 0])
#exit()
for p in range(0, len(packs)):
for pidx in range(0, len(pins[p])):
o3d = [0, 0, 0]
o3dh = [0, 0, 0]
o3dls = [0, 0, 0]
o3dvls = [0, 0, 0]
s3d = [1,1,1]
r3d=[0,0,0]
r3dr=r3d
pack_norm = pack(packs[p], pins[p][pidx], rms[p][pidx], 0, False)
libn = "${KISYS3DMOD}/Package_TO_SOT_THT"
makeVERT(libn, pack_norm, True, o3d, s3d, "_LargePads", r3d)
#makeVERTLS(libn, pack_norm, True, o3dvls, s3d, "_LargePads", r3d)
makeHOR(libn, pack_norm, True, o3dh, s3d, "_LargePads", r3d)
if (len(pack_norm.additional_pin_pad) <= 0):
#makeHORLS(libn, pack_norm, True, o3dls, s3d, "_LargePads", r3d)
makeHORREV(libn, pack_norm, True, o3d, s3d, "_LargePads", r3dr)
# make staggered packages
packs = [ "TO-220", "TO-220F", ]
pins = [ [4, 5, 7, 9, 11, 15, ], [ 4, 4, 5, 5, 7, 9, 11, 15 ], ]
rms = [ [2.54, 1.7, 1.27, 0.97, 1.7, 1.27, ], [ 2.54, 2.54, 1.7, 1.7, 1.27, 0.9, 1.7, 1.27 ], ]
pitchys = [ [], [ 0, 2.05, 0, 2.06, 0, 0, 0, 0 ], ]
ypinoffsets = [ [], [ 0, 0, 0, 4.5, 0, 0, 0, 0 ], ]
for p in range(0, len(packs)):
for pidx in range(0, len(pins[p])):
o3d = [0, 0, 0]
s3d = [1,1,1]
pitchy=0
if p<len(pitchys):
if pidx<len(pitchys[p]):
pitchy=pitchys[p][pidx]
ypinoffset=0
if p<len(ypinoffsets):
if pidx<len(ypinoffsets[p]):
ypinoffset=ypinoffsets[p][pidx]
pack_norm1 = pack(packs[p], pins[p][pidx], rms[p][pidx], 1, False, pitchy,ypinoffset)
pack_norm2 = pack(packs[p], pins[p][pidx], rms[p][pidx], 2, False, pitchy,ypinoffset)
libn = "${KISYS3DMOD}/Package_TO_SOT_THT"
makeVERT(libn, pack_norm1, True, o3d, s3d)
makeVERT(libn, pack_norm2, True, o3d, s3d)
makeHOR(libn, pack_norm1, True, o3d, s3d)
makeHOR(libn, pack_norm2, True, o3d, s3d)
#pack_largepins=pack(packs[p], pins[p][pidx], rms[p][pidx], True)
#makeVERT("${KISYS3DMOD}/Package_TO_SOT_THT", pack_largepins, True, o3d, s3d)
#makeHOR("${KISYS3DMOD}/Package_TO_SOT_THT", pack_largepins, True, o3d, s3d)
#if (len(pack_largepins.additional_pin_pad) <= 0):
# makeHORREV("${KISYS3DMOD}/Package_TO_SOT_THT", pack_largepins, True, o3d, s3d)
# make round packages
packs=[]
modifiers=[]
pins=[]
has3d=[]
off3d=[]
scale3d=[]
packs.append("TO-5")
modifiers.append(["", "Window"])
pins.append([2, 3, 4, 6, 8, 10])
has3d.append([True, True, True, True, True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-5_PD5.08")
modifiers.append(["", "Window"])
pins.append([8])
has3d.append([True])
off3d.append([])
scale3d.append([])
packs.append("TO-8")
modifiers.append(["", "Window"])
pins.append([2, 3])
has3d.append([True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-11")
modifiers.append(["", "Window"])
pins.append([2, 3])
has3d.append([True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-12")
modifiers.append(["", "Window"])
pins.append([4])
has3d.append([True])
off3d.append([[]])
scale3d.append([[]])
packs.append("TO-17")
modifiers.append(["", "Window"])
pins.append([4])
has3d.append([True])
off3d.append([[]])
scale3d.append([[]])
packs.append("TO-18")
modifiers.append(["", "Window", "Lens"])
pins.append([2, 3, 4])
has3d.append([True, True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-33")
modifiers.append(["", "Window"])
pins.append([4])
has3d.append([True])
off3d.append([[]])
scale3d.append([[]])
packs.append("TO-38")
modifiers.append(["", "Window"])
pins.append([2, 3])
has3d.append([True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-39")
modifiers.append(["", "Window"])
pins.append([2, 3, 4, 6, 8, 10])
has3d.append([True, True, True, True, True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-46")
modifiers.append(["", "Window"])
pins.append([2, 3, 4])
has3d.append([True, True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-52")
modifiers.append(["", "Window"])
pins.append([2, 3])
has3d.append([True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-72")
modifiers.append(["", "Window"])
pins.append([4])
has3d.append([True])
off3d.append([[]])
scale3d.append([[]])
packs.append("TO-78")
modifiers.append(["", "Window"])
pins.append([6, 8, 10])
has3d.append([True, True, True])
off3d.append([])
scale3d.append([])
packs.append("TO-99")
modifiers.append(["", "Window"])
pins.append([6,8])
has3d.append([True,True])
off3d.append([])
scale3d.append([])
packs.append("TO-100")
modifiers.append(["", "Window"])
pins.append([10])
has3d.append([True,True])
off3d.append([])
scale3d.append([])
packs.append("TO-75")
modifiers.append(["", "Window"])
pins.append([6])
has3d.append([True])
off3d.append([[]])
scale3d.append([[]])
for p in range(0, len(packs)):
mi=0
for m in modifiers[p]:
for pidx in range(0, len(pins[p])):
o3d = [0, 0, 0]
s3d = [1,1,1]
pack = pack_round(packs[p], pins[p][pidx], m, False)
libn = "${KISYS3DMOD}/Package_TO_SOT_THT"
makeTORound(libn, pack, has3d[p][pidx], o3d, s3d)
mi=mi+1
| gpl-3.0 |
jfoote/vulture | vlib/analyzers/reproducibility.py | 1 | 2072 | import json, logging, os, re, subprocess, shlex
from tools import get_category_by_status
log = logging.getLogger()
meta_files = ['Disassembly', 'Stacktrace', 'Registers',
'SegvAnalysis', 'ProcMaps', "BootLog" , "CoreDump",
"BootDmesg", "syslog", "UbiquityDebug.gz", "Casper.gz",
"UbiquityPartman.gz", "UbiquityDm.gz", "GdmLog", "XorgLog"
"log", "Log"]
def get(metadata, bugdir):
indicators = {}
# look for file arg; this needs work TODO
cmdline = None
uri = None
for line in metadata['description'].splitlines():
if "proccmdline" in line.lower():
cmdline = ":".join(line.split(":")[1:]).strip()
try:
toks = shlex.split(cmdline)
except ValueError as e:
log.error("error while parsing cmdline: %s" % cmdline)
log.exception(e)
continue
if len(toks) > 1:
if ("//" in toks[-1]) or ("." in toks[-1]):
uri = toks[-1].strip()
indicators['cmdline'] = cmdline
indicators['cmdline_uri'] = uri
# look for interesting attachments; ugly
interesting_files = []
for f in os.listdir(bugdir):
fpath = os.path.join(bugdir, f)
if not os.path.isfile(fpath):
continue
for fn in meta_files:
if fn.lower() in f.lower():
break
else:
# no break in loop above, i.e. still interested
out = subprocess.check_output(["file", fpath])
ftype = out.split(":")[-1]
if ftype.strip() == "empty":
continue
for tstr in ["ASCII", "text", "core file"]:
if tstr in ftype:
break
else:
# only runs if we didn't break, i.e., this might be interesting
interesting_files.append(f)
indicators['files'] = interesting_files
# TODO: look for recv, etc. in stacks (shoudl this be in exploitability maybe (remote?))
return indicators
| mit |
potassco/clingo | doc/py/_gen/gen.py | 1 | 2705 | #!/usr/bin/env python3
import os
import re
import importlib.machinery
import pdoc
import clingo
SUB = [
("clingo_main", "Application", clingo.clingo_main),
("Control.register_observer", "Observer", clingo.Control.register_observer),
("Control.register_propagator", "Propagator", clingo.Control.register_propagator)]
def parse_class(aux, name, doc):
"""
Extract a class declaration from a docstring.
"""
match = re.search(r"class ({}(\([^)]*\))?):".format(name), doc)
start = match.start()
end = doc.find("```", start)
doc = doc[start:end]
aux.write(doc)
def parse_aux():
with open("aux.py", "w") as aux:
aux.write("from clingo import *\nfrom abc import *\nfrom typing import *\n")
for _, name, obj in SUB:
parse_class(aux, name, obj.__doc__)
import aux
return aux
clingo.ast.__spec__ = importlib.machinery.ModuleSpec("clingo.ast", None)
clingo.__pdoc__ = {}
for key, name, obj in SUB:
clingo.__pdoc__[key] = re.sub(r"```python.*?class ({}(\([^)]*\))?):.*?```".format(name), "", obj.__doc__, flags=re.MULTILINE|re.DOTALL)
pdoc.tpl_lookup.directories.insert(0, './templates')
ctx = pdoc.Context()
cmod = pdoc.Module(clingo, context=ctx)
amod = pdoc.Module(clingo.ast, supermodule=cmod, context=ctx)
xmod = parse_aux()
cmod.doc["Application"] = pdoc.Class("Application", cmod, xmod.Application)
cmod.doc["Observer"] = pdoc.Class("Observer", cmod, xmod.Observer)
cmod.doc["Propagator"] = pdoc.Class("Propagator", cmod, xmod.Propagator)
cmod.doc["ast"] = amod
cmod.doc["__version__"] = pdoc.Variable("__version__", cmod, "__version__: str\n\nVersion of the clingo module (`'{}'`).".format(clingo.__version__))
cmod.doc["Infimum"] = pdoc.Variable("Infimum", cmod, '''Infimum: Symbol\n\nRepresents a symbol of type `clingo.SymbolType.Infimum`.''')
cmod.doc["Supremum"] = pdoc.Variable("Supremum", cmod, '''Supremum: Symbol\n\nRepresents a symbol of type `clingo.SymbolType.Supremum`.''')
pdoc.link_inheritance(ctx)
prefix = "../clingo/python-api/{}".format(".".join(clingo.__version__.split(".")[:2]))
cprefix = "../clingo/python-api/current"
os.makedirs("{}/ast".format(prefix), exist_ok=True)
os.makedirs("{}/ast".format(cprefix), exist_ok=True)
cmod_html = cmod.html(external_links=True)
amod_html = amod.html(external_links=True)
open("{}/index.html".format(prefix), "w").write(cmod_html)
open("{}/ast/index.html".format(prefix), "w").write(amod_html)
open("{}/index.html".format(cprefix), "w").write(cmod_html.replace("clingo/python-api/5.4", "clingo/python-api/current"))
open("{}/ast/index.html".format(cprefix), "w").write(amod_html.replace("clingo/python-api/5.4", "clingo/python-api/current"))
| mit |
kernel64/AutobahnPython | examples/wamp/dirwatch/server.py | 9 | 1684 | ###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.python import log
from twisted.internet import reactor
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.websocket import listenWS
from autobahn.wamp import WampServerFactory, \
WampServerProtocol
class DirWatchServerProtocol(WampServerProtocol):
def onSessionOpen(self):
## register a URI and all URIs having the string as prefix as PubSub topic
self.registerForPubSub("http://dirwatch.autobahn.ws", True)
if __name__ == '__main__':
log.startLogging(sys.stdout)
debug = len(sys.argv) > 1 and sys.argv[1] == 'debug'
factory = WampServerFactory("ws://localhost:9000", debugWamp = debug)
factory.protocol = DirWatchServerProtocol
factory.setProtocolOptions(allowHixie76 = True)
listenWS(factory)
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(8080, web)
reactor.run()
| apache-2.0 |
PourroyJean/performance_modelisation | script/data visualisation/venv/lib/python3.6/site-packages/pip/_vendor/packaging/markers.py | 324 | 8230 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import operator
import os
import platform
import sys
from pip._vendor.pyparsing import (
ParseException, ParseResults, stringStart, stringEnd,
)
from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString
from pip._vendor.pyparsing import Literal as L # noqa
from ._compat import string_types
from .specifiers import Specifier, InvalidSpecifier
__all__ = [
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
"Marker", "default_environment",
]
class InvalidMarker(ValueError):
"""
An invalid marker was found, users should refer to PEP 508.
"""
class UndefinedComparison(ValueError):
"""
An invalid operation was attempted on a value that doesn't support it.
"""
class UndefinedEnvironmentName(ValueError):
"""
A name was attempted to be used that does not exist inside of the
environment.
"""
class Node(object):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
def __repr__(self):
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
def serialize(self):
raise NotImplementedError
class Variable(Node):
def serialize(self):
return str(self)
class Value(Node):
def serialize(self):
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
return str(self)
VARIABLE = (
L("implementation_version") |
L("platform_python_implementation") |
L("implementation_name") |
L("python_full_version") |
L("platform_release") |
L("platform_version") |
L("platform_machine") |
L("platform_system") |
L("python_version") |
L("sys_platform") |
L("os_name") |
L("os.name") | # PEP-345
L("sys.platform") | # PEP-345
L("platform.version") | # PEP-345
L("platform.machine") | # PEP-345
L("platform.python_implementation") | # PEP-345
L("python_implementation") | # undocumented setuptools legacy
L("extra")
)
ALIASES = {
'os.name': 'os_name',
'sys.platform': 'sys_platform',
'platform.version': 'platform_version',
'platform.machine': 'platform_machine',
'platform.python_implementation': 'platform_python_implementation',
'python_implementation': 'platform_python_implementation'
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") |
L("==") |
L(">=") |
L("<=") |
L("!=") |
L("~=") |
L(">") |
L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
BOOLOP = L("and") | L("or")
MARKER_VAR = VARIABLE | MARKER_VALUE
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
def _format_marker(marker, first=True):
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
if (isinstance(marker, list) and len(marker) == 1 and
isinstance(marker[0], (list, tuple))):
return _format_marker(marker[0])
if isinstance(marker, list):
inner = (_format_marker(m, first=False) for m in marker)
if first:
return " ".join(inner)
else:
return "(" + " ".join(inner) + ")"
elif isinstance(marker, tuple):
return " ".join([m.serialize() for m in marker])
else:
return marker
_operators = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
"<=": operator.le,
"==": operator.eq,
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
}
def _eval_op(lhs, op, rhs):
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
pass
else:
return spec.contains(lhs)
oper = _operators.get(op.serialize())
if oper is None:
raise UndefinedComparison(
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
)
return oper(lhs, rhs)
_undefined = object()
def _get_env(environment, name):
value = environment.get(name, _undefined)
if value is _undefined:
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
return value
def _evaluate_markers(markers, environment):
groups = [[]]
for marker in markers:
assert isinstance(marker, (list, tuple, string_types))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
elif isinstance(marker, tuple):
lhs, op, rhs = marker
if isinstance(lhs, Variable):
lhs_value = _get_env(environment, lhs.value)
rhs_value = rhs.value
else:
lhs_value = lhs.value
rhs_value = _get_env(environment, rhs.value)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
assert marker in ["and", "or"]
if marker == "or":
groups.append([])
return any(all(item) for item in groups)
def format_full_version(info):
version = '{0.major}.{0.minor}.{0.micro}'.format(info)
kind = info.releaselevel
if kind != 'final':
version += kind[0] + str(info.serial)
return version
def default_environment():
if hasattr(sys, 'implementation'):
iver = format_full_version(sys.implementation.version)
implementation_name = sys.implementation.name
else:
iver = '0'
implementation_name = ''
return {
"implementation_name": implementation_name,
"implementation_version": iver,
"os_name": os.name,
"platform_machine": platform.machine(),
"platform_release": platform.release(),
"platform_system": platform.system(),
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
"python_version": platform.python_version()[:3],
"sys_platform": sys.platform,
}
class Marker(object):
def __init__(self, marker):
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
marker, marker[e.loc:e.loc + 8])
raise InvalidMarker(err_str)
def __str__(self):
return _format_marker(self._markers)
def __repr__(self):
return "<Marker({0!r})>".format(str(self))
def evaluate(self, environment=None):
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process.
"""
current_environment = default_environment()
if environment is not None:
current_environment.update(environment)
return _evaluate_markers(self._markers, current_environment)
| gpl-3.0 |
TuSimple/simpledet | config/FPG/faster_r50v1b_fpg6@256_syncbn_1x.py | 1 | 8833 | from symbol.builder import FasterRcnn as Detector
from symbol.builder import ResNetV1bFPN as Backbone
from symbol.builder import add_anchor_to_arg
from models.FPG.builder import FPGNeckP2P6 as Neck
from models.FPN.builder import FPNRpnHead as RpnHead
from models.FPN.builder import FPNRoiAlign as RoiExtractor
from models.FPN.builder import FPNBbox2fcHead as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 2 if is_train else 1
fp16 = False
loader_worker = 8
class KvstoreParam:
kvstore = "nccl"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
normalizer = normalizer_factory(type="fixbn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
depth = 50
class NeckParam:
fp16 = General.fp16
normalizer = normalizer_factory(type="syncbn", ndev=len(KvstoreParam.gpus))
dim_reduced = 256
num_stage = 6
S0_kernel = 1
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image
nnvm_proposal = True
nnvm_rpn_target = False
class anchor_generate:
scale = (8,)
ratio = (0.5, 1.0, 2.0)
stride = (4, 8, 16, 32, 64)
max_side = 1400
class anchor_assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
image_anchor = 256
pos_fraction = 0.5
class head:
conv_channel = 256
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 2000 if is_train else 1000
post_nms_top_n = 2000 if is_train else 1000
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = False
image_roi = 512
fg_fraction = 0.25
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 81
class_agnostic = False
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 512
batch_image = General.batch_image
class regress_target:
class_agnostic = False
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = (4, 8, 16, 32)
roi_canonical_scale = 224
roi_canonical_level = 4
class DatasetParam:
if is_train:
image_set = ("coco_train2017", )
else:
image_set = ("coco_val2017", )
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head)
test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = False
random = True
memonger = False
memonger_until = "stage2_unit3_plus"
class pretrain:
prefix = "pretrain_model/resnet%s_v1b" % BackboneParam.depth
epoch = 0
fixed_param = ["conv0", "stage1", "bn_gamma", "bn_beta", "bn0",
"bn1", "bn2", "bn3", "bn4"]
def process_weight(sym, arg, aux):
for stride in RpnParam.anchor_generate.stride:
add_anchor_to_arg(
sym, arg, aux, RpnParam.anchor_generate.max_side,
stride, RpnParam.anchor_generate.scale,
RpnParam.anchor_generate.ratio)
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 0.0001
clip_gradient = None
class schedule:
begin_epoch = 0
end_epoch = 6
lr_iter = [60000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
80000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image / 3.0
iter = 500
class TestParam:
min_det_score = 0.05
max_det_per_image = 100
process_roidb = lambda x: x
process_output = lambda x, y: x
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_val2017.json"
# data processing
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
# data processing
class ResizeParam:
short = 800
long = 1333
class PadParam:
short = 800
long = 1333
max_num_gt = 100
class AnchorTarget2DParam:
def __init__(self):
self.generate = self._generate()
class _generate:
def __init__(self):
self.stride = (4, 8, 16, 32, 64)
self.short = (200, 100, 50, 25, 13)
self.long = (334, 167, 84, 42, 21)
scales = (8)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, Norm2DImage
from models.FPN.input import PyramidAnchorTarget2D
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data"]
label_name = ["gt_bbox", "im_info"]
if not RpnParam.nnvm_rpn_target:
transform.append(PyramidAnchorTarget2D(AnchorTarget2DParam()))
label_name += ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
| apache-2.0 |
h3biomed/ansible | lib/ansible/modules/cloud/alicloud/ali_instance_facts.py | 10 | 14095 | #!/usr/bin/python
# Copyright (c) 2017 Alibaba Group Holding Limited. He Guimin <heguimin36@163.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see http://www.gnu.org/licenses/.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ali_instance_facts
version_added: "2.8"
short_description: Gather facts on instances of Alibaba Cloud ECS.
description:
- This module fetches data from the Open API in Alicloud.
The module must be called from within the ECS instance itself.
options:
availability_zone:
description:
- Aliyun availability zone ID in which to launch the instance
aliases: ['alicloud_zone']
instance_names:
description:
- A list of ECS instance names.
aliases: [ "names"]
instance_ids:
description:
- A list of ECS instance ids.
aliases: ["ids"]
instance_tags:
description:
- A hash/dictionaries of instance tags. C({"key":"value"})
aliases: ["tags"]
author:
- "He Guimin (@xiaozhu36)"
requirements:
- "python >= 2.6"
- "footmark >= 1.1.16"
extends_documentation_fragment:
- alicloud
'''
EXAMPLES = '''
# Fetch instances details according to setting different filters
- name: fetch instances details example
hosts: localhost
vars:
alicloud_access_key: <your-alicloud-access-key>
alicloud_secret_key: <your-alicloud-secret-key>
alicloud_region: cn-beijing
availability_zone: cn-beijing-a
tasks:
- name: Find all instances in the specified region
ali_instance_facts:
alicloud_access_key: '{{ alicloud_access_key }}'
alicloud_secret_key: '{{ alicloud_secret_key }}'
alicloud_region: '{{ alicloud_region }}'
register: all_instances
- name: Find all instances based on the specified ids
ali_instance_facts:
alicloud_access_key: '{{ alicloud_access_key }}'
alicloud_secret_key: '{{ alicloud_secret_key }}'
alicloud_region: '{{ alicloud_region }}'
instance_ids:
- "i-35b333d9"
- "i-ddav43kd"
register: instances_by_ids
- name: Find all instances based on the specified names/name-prefixes
ali_instance_facts:
alicloud_access_key: '{{ alicloud_access_key }}'
alicloud_secret_key: '{{ alicloud_secret_key }}'
alicloud_region: '{{ alicloud_region }}'
instance_names:
- "ecs_instance-1"
- "ecs_instance_2"
register: instances_by_ids
'''
RETURN = '''
instances:
description: List of ECS instances
returned: always
type: complex
contains:
availability_zone:
description: The availability zone of the instance is in.
returned: always
type: str
sample: cn-beijing-a
block_device_mappings:
description: Any block device mapping entries for the instance.
returned: always
type: complex
contains:
device_name:
description: The device name exposed to the instance (for example, /dev/xvda).
returned: always
type: str
sample: /dev/xvda
attach_time:
description: The time stamp when the attachment initiated.
returned: always
type: str
sample: "2018-06-25T04:08:26Z"
delete_on_termination:
description: Indicates whether the volume is deleted on instance termination.
returned: always
type: bool
sample: true
status:
description: The attachment state.
returned: always
type: str
sample: in_use
volume_id:
description: The ID of the cloud disk.
returned: always
type: str
sample: d-2zei53pjsi117y6gf9t6
cpu:
description: The CPU core count of the instance.
returned: always
type: int
sample: 4
creation_time:
description: The time the instance was created.
returned: always
type: str
sample: "2018-06-25T04:08Z"
description:
description: The instance description.
returned: always
type: str
sample: "my ansible instance"
eip:
description: The attribution of EIP associated with the instance.
returned: always
type: complex
contains:
allocation_id:
description: The ID of the EIP.
returned: always
type: str
sample: eip-12345
internet_charge_type:
description: The internet charge type of the EIP.
returned: always
type: str
sample: "paybybandwidth"
ip_address:
description: EIP address.
returned: always
type: str
sample: 42.10.2.2
expired_time:
description: The time the instance will expire.
returned: always
type: str
sample: "2099-12-31T15:59Z"
gpu:
description: The attribution of instance GPU.
returned: always
type: complex
contains:
amount:
description: The count of the GPU.
returned: always
type: int
sample: 0
spec:
description: The specification of the GPU.
returned: always
type: str
sample: ""
host_name:
description: The host name of the instance.
returned: always
type: str
sample: iZ2zewaoZ
id:
description: Alias of instance_id.
returned: always
type: str
sample: i-abc12345
instance_id:
description: ECS instance resource ID.
returned: always
type: str
sample: i-abc12345
image_id:
description: The ID of the image used to launch the instance.
returned: always
type: str
sample: m-0011223344
inner_ip_address:
description: The inner IPv4 address of the classic instance.
returned: always
type: str
sample: 10.0.0.2
instance_charge_type:
description: The instance charge type.
returned: always
type: str
sample: PostPaid
instance_name:
description: The name of the instance.
returned: always
type: str
sample: my-ecs
instance_type:
description: The instance type of the running instance.
returned: always
type: str
sample: ecs.sn1ne.xlarge
internet_charge_type:
description: The billing method of the network bandwidth.
returned: always
type: str
sample: PayByBandwidth
internet_max_bandwidth_in:
description: Maximum incoming bandwidth from the internet network.
returned: always
type: int
sample: 200
internet_max_bandwidth_out:
description: Maximum incoming bandwidth from the internet network.
returned: always
type: int
sample: 20
io_optimized:
description: Indicates whether the instance is optimized for EBS I/O.
returned: always
type: bool
sample: false
memory:
description: Memory size of the instance.
returned: always
type: int
sample: 8192
network_interfaces:
description: One or more network interfaces for the instance.
returned: always
type: complex
contains:
mac_address:
description: The MAC address.
returned: always
type: str
sample: "00:11:22:33:44:55"
network_interface_id:
description: The ID of the network interface.
returned: always
type: str
sample: eni-01234567
primary_ip_address:
description: The primary IPv4 address of the network interface within the vswitch.
returned: always
type: str
sample: 10.0.0.1
osname:
description: The operation system name of the instance owned.
returned: always
type: str
sample: CentOS
ostype:
description: The operation system type of the instance owned.
returned: always
type: str
sample: linux
private_ip_address:
description: The IPv4 address of the network interface within the subnet.
returned: always
type: str
sample: 10.0.0.1
public_ip_address:
description: The public IPv4 address assigned to the instance
returned: always
type: str
sample: 43.0.0.1
resource_group_id:
description: The id of the resource group to which the instance belongs.
returned: always
type: str
sample: my-ecs-group
security_groups:
description: One or more security groups for the instance.
returned: always
type: complex
contains:
- group_id:
description: The ID of the security group.
returned: always
type: str
sample: sg-0123456
- group_name:
description: The name of the security group.
returned: always
type: str
sample: my-security-group
status:
description: The current status of the instance.
returned: always
type: str
sample: running
tags:
description: Any tags assigned to the instance.
returned: always
type: dict
sample:
vswitch_id:
description: The ID of the vswitch in which the instance is running.
returned: always
type: str
sample: vsw-dew00abcdef
vpc_id:
description: The ID of the VPC the instance is in.
returned: always
type: dict
sample: vpc-0011223344
ids:
description: List of ECS instance IDs
returned: always
type: list
sample: [i-12345er, i-3245fs]
'''
# import time
# import sys
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.alicloud_ecs import get_acs_connection_info, ecs_argument_spec, ecs_connect
HAS_FOOTMARK = False
FOOTMARK_IMP_ERR = None
try:
from footmark.exception import ECSResponseError
HAS_FOOTMARK = True
except ImportError:
FOOTMARK_IMP_ERR = traceback.format_exc()
HAS_FOOTMARK = False
def main():
argument_spec = ecs_argument_spec()
argument_spec.update(dict(
availability_zone=dict(aliases=['alicloud_zone']),
instance_ids=dict(type='list', aliases=['ids']),
instance_names=dict(type='list', aliases=['names']),
instance_tags=dict(type='list', aliases=['tags']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if HAS_FOOTMARK is False:
module.fail_json(msg=missing_required_lib('footmark'), exception=FOOTMARK_IMP_ERR)
ecs = ecs_connect(module)
instances = []
instance_ids = []
ids = module.params['instance_ids']
names = module.params['instance_names']
zone_id = module.params['availability_zone']
if ids and (not isinstance(ids, list) or len(ids) < 1):
module.fail_json(msg='instance_ids should be a list of instances, aborting')
if names and (not isinstance(names, list) or len(names) < 1):
module.fail_json(msg='instance_ids should be a list of instances, aborting')
if names:
for name in names:
for inst in ecs.get_all_instances(zone_id=zone_id, instance_ids=ids, instance_name=name):
instances.append(inst.read())
instance_ids.append(inst.id)
else:
for inst in ecs.get_all_instances(zone_id=zone_id, instance_ids=ids):
instances.append(inst.read())
instance_ids.append(inst.id)
module.exit_json(changed=False, ids=instance_ids, instances=instances)
if __name__ == '__main__':
main()
| gpl-3.0 |
eleonrk/SickRage | lib/sqlalchemy/testing/assertions.py | 75 | 15418 | # testing/assertions.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import absolute_import
from . import util as testutil
from sqlalchemy import pool, orm, util
from sqlalchemy.engine import default, create_engine, url
from sqlalchemy import exc as sa_exc
from sqlalchemy.util import decorator
from sqlalchemy import types as sqltypes, schema
import warnings
import re
from .warnings import resetwarnings
from .exclusions import db_spec, _is_excluded
from . import assertsql
from . import config
import itertools
from .util import fail
import contextlib
def emits_warning(*messages):
"""Mark a test as emitting a warning.
With no arguments, squelches all SAWarning failures. Or pass one or more
strings; these will be matched to the root of the warning description by
warnings.filterwarnings().
"""
# TODO: it would be nice to assert that a named warning was
# emitted. should work with some monkeypatching of warnings,
# and may work on non-CPython if they keep to the spirit of
# warnings.showwarning's docstring.
# - update: jython looks ok, it uses cpython's module
@decorator
def decorate(fn, *args, **kw):
# todo: should probably be strict about this, too
filters = [dict(action='ignore',
category=sa_exc.SAPendingDeprecationWarning)]
if not messages:
filters.append(dict(action='ignore',
category=sa_exc.SAWarning))
else:
filters.extend(dict(action='ignore',
message=message,
category=sa_exc.SAWarning)
for message in messages)
for f in filters:
warnings.filterwarnings(**f)
try:
return fn(*args, **kw)
finally:
resetwarnings()
return decorate
def emits_warning_on(db, *warnings):
"""Mark a test as emitting a warning on a specific dialect.
With no arguments, squelches all SAWarning failures. Or pass one or more
strings; these will be matched to the root of the warning description by
warnings.filterwarnings().
"""
spec = db_spec(db)
@decorator
def decorate(fn, *args, **kw):
if isinstance(db, util.string_types):
if not spec(config._current):
return fn(*args, **kw)
else:
wrapped = emits_warning(*warnings)(fn)
return wrapped(*args, **kw)
else:
if not _is_excluded(*db):
return fn(*args, **kw)
else:
wrapped = emits_warning(*warnings)(fn)
return wrapped(*args, **kw)
return decorate
def uses_deprecated(*messages):
"""Mark a test as immune from fatal deprecation warnings.
With no arguments, squelches all SADeprecationWarning failures.
Or pass one or more strings; these will be matched to the root
of the warning description by warnings.filterwarnings().
As a special case, you may pass a function name prefixed with //
and it will be re-written as needed to match the standard warning
verbiage emitted by the sqlalchemy.util.deprecated decorator.
"""
@decorator
def decorate(fn, *args, **kw):
with expect_deprecated(*messages):
return fn(*args, **kw)
return decorate
@contextlib.contextmanager
def expect_deprecated(*messages):
# todo: should probably be strict about this, too
filters = [dict(action='ignore',
category=sa_exc.SAPendingDeprecationWarning)]
if not messages:
filters.append(dict(action='ignore',
category=sa_exc.SADeprecationWarning))
else:
filters.extend(
[dict(action='ignore',
message=message,
category=sa_exc.SADeprecationWarning)
for message in
[(m.startswith('//') and
('Call to deprecated function ' + m[2:]) or m)
for m in messages]])
for f in filters:
warnings.filterwarnings(**f)
try:
yield
finally:
resetwarnings()
def global_cleanup_assertions():
"""Check things that have to be finalized at the end of a test suite.
Hardcoded at the moment, a modular system can be built here
to support things like PG prepared transactions, tables all
dropped, etc.
"""
_assert_no_stray_pool_connections()
_STRAY_CONNECTION_FAILURES = 0
def _assert_no_stray_pool_connections():
global _STRAY_CONNECTION_FAILURES
# lazy gc on cPython means "do nothing." pool connections
# shouldn't be in cycles, should go away.
testutil.lazy_gc()
# however, once in awhile, on an EC2 machine usually,
# there's a ref in there. usually just one.
if pool._refs:
# OK, let's be somewhat forgiving. Increment a counter,
# we'll allow a couple of these at most.
_STRAY_CONNECTION_FAILURES += 1
print("Encountered a stray connection in test cleanup: %s"
% str(pool._refs))
# then do a real GC sweep. We shouldn't even be here
# so a single sweep should really be doing it, otherwise
# there's probably a real unreachable cycle somewhere.
testutil.gc_collect()
# if we've already had two of these occurrences, or
# after a hard gc sweep we still have pool._refs?!
# now we have to raise.
if _STRAY_CONNECTION_FAILURES >= 2 or pool._refs:
err = str(pool._refs)
# but clean out the pool refs collection directly,
# reset the counter,
# so the error doesn't at least keep happening.
pool._refs.clear()
_STRAY_CONNECTION_FAILURES = 0
assert False, "Stray conections in cleanup: %s" % err
def eq_(a, b, msg=None):
"""Assert a == b, with repr messaging on failure."""
assert a == b, msg or "%r != %r" % (a, b)
def ne_(a, b, msg=None):
"""Assert a != b, with repr messaging on failure."""
assert a != b, msg or "%r == %r" % (a, b)
def is_(a, b, msg=None):
"""Assert a is b, with repr messaging on failure."""
assert a is b, msg or "%r is not %r" % (a, b)
def is_not_(a, b, msg=None):
"""Assert a is not b, with repr messaging on failure."""
assert a is not b, msg or "%r is %r" % (a, b)
def startswith_(a, fragment, msg=None):
"""Assert a.startswith(fragment), with repr messaging on failure."""
assert a.startswith(fragment), msg or "%r does not start with %r" % (
a, fragment)
def assert_raises(except_cls, callable_, *args, **kw):
try:
callable_(*args, **kw)
success = False
except except_cls:
success = True
# assert outside the block so it works for AssertionError too !
assert success, "Callable did not raise an exception"
def assert_raises_message(except_cls, msg, callable_, *args, **kwargs):
try:
callable_(*args, **kwargs)
assert False, "Callable did not raise an exception"
except except_cls as e:
assert re.search(msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e)
print(util.text_type(e).encode('utf-8'))
class AssertsCompiledSQL(object):
def assert_compile(self, clause, result, params=None,
checkparams=None, dialect=None,
checkpositional=None,
use_default_dialect=False,
allow_dialect_select=False,
literal_binds=False):
if use_default_dialect:
dialect = default.DefaultDialect()
elif allow_dialect_select:
dialect = None
else:
if dialect is None:
dialect = getattr(self, '__dialect__', None)
if dialect is None:
dialect = config.db.dialect
elif dialect == 'default':
dialect = default.DefaultDialect()
elif isinstance(dialect, util.string_types):
dialect = url.URL(dialect).get_dialect()()
kw = {}
compile_kwargs = {}
if params is not None:
kw['column_keys'] = list(params)
if literal_binds:
compile_kwargs['literal_binds'] = True
if isinstance(clause, orm.Query):
context = clause._compile_context()
context.statement.use_labels = True
clause = context.statement
if compile_kwargs:
kw['compile_kwargs'] = compile_kwargs
c = clause.compile(dialect=dialect, **kw)
param_str = repr(getattr(c, 'params', {}))
if util.py3k:
param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
print(("\nSQL String:\n" + util.text_type(c) + param_str).encode('utf-8'))
else:
print("\nSQL String:\n" + util.text_type(c).encode('utf-8') + param_str)
cc = re.sub(r'[\n\t]', '', util.text_type(c))
eq_(cc, result, "%r != %r on dialect %r" % (cc, result, dialect))
if checkparams is not None:
eq_(c.construct_params(params), checkparams)
if checkpositional is not None:
p = c.construct_params(params)
eq_(tuple([p[x] for x in c.positiontup]), checkpositional)
class ComparesTables(object):
def assert_tables_equal(self, table, reflected_table, strict_types=False):
assert len(table.c) == len(reflected_table.c)
for c, reflected_c in zip(table.c, reflected_table.c):
eq_(c.name, reflected_c.name)
assert reflected_c is reflected_table.c[c.name]
eq_(c.primary_key, reflected_c.primary_key)
eq_(c.nullable, reflected_c.nullable)
if strict_types:
msg = "Type '%s' doesn't correspond to type '%s'"
assert type(reflected_c.type) is type(c.type), \
msg % (reflected_c.type, c.type)
else:
self.assert_types_base(reflected_c, c)
if isinstance(c.type, sqltypes.String):
eq_(c.type.length, reflected_c.type.length)
eq_(
set([f.column.name for f in c.foreign_keys]),
set([f.column.name for f in reflected_c.foreign_keys])
)
if c.server_default:
assert isinstance(reflected_c.server_default,
schema.FetchedValue)
assert len(table.primary_key) == len(reflected_table.primary_key)
for c in table.primary_key:
assert reflected_table.primary_key.columns[c.name] is not None
def assert_types_base(self, c1, c2):
assert c1.type._compare_type_affinity(c2.type),\
"On column %r, type '%s' doesn't correspond to type '%s'" % \
(c1.name, c1.type, c2.type)
class AssertsExecutionResults(object):
def assert_result(self, result, class_, *objects):
result = list(result)
print(repr(result))
self.assert_list(result, class_, objects)
def assert_list(self, result, class_, list):
self.assert_(len(result) == len(list),
"result list is not the same size as test list, " +
"for class " + class_.__name__)
for i in range(0, len(list)):
self.assert_row(class_, result[i], list[i])
def assert_row(self, class_, rowobj, desc):
self.assert_(rowobj.__class__ is class_,
"item class is not " + repr(class_))
for key, value in desc.items():
if isinstance(value, tuple):
if isinstance(value[1], list):
self.assert_list(getattr(rowobj, key), value[0], value[1])
else:
self.assert_row(value[0], getattr(rowobj, key), value[1])
else:
self.assert_(getattr(rowobj, key) == value,
"attribute %s value %s does not match %s" % (
key, getattr(rowobj, key), value))
def assert_unordered_result(self, result, cls, *expected):
"""As assert_result, but the order of objects is not considered.
The algorithm is very expensive but not a big deal for the small
numbers of rows that the test suite manipulates.
"""
class immutabledict(dict):
def __hash__(self):
return id(self)
found = util.IdentitySet(result)
expected = set([immutabledict(e) for e in expected])
for wrong in util.itertools_filterfalse(lambda o: type(o) == cls, found):
fail('Unexpected type "%s", expected "%s"' % (
type(wrong).__name__, cls.__name__))
if len(found) != len(expected):
fail('Unexpected object count "%s", expected "%s"' % (
len(found), len(expected)))
NOVALUE = object()
def _compare_item(obj, spec):
for key, value in spec.items():
if isinstance(value, tuple):
try:
self.assert_unordered_result(
getattr(obj, key), value[0], *value[1])
except AssertionError:
return False
else:
if getattr(obj, key, NOVALUE) != value:
return False
return True
for expected_item in expected:
for found_item in found:
if _compare_item(found_item, expected_item):
found.remove(found_item)
break
else:
fail(
"Expected %s instance with attributes %s not found." % (
cls.__name__, repr(expected_item)))
return True
def assert_sql_execution(self, db, callable_, *rules):
assertsql.asserter.add_rules(rules)
try:
callable_()
assertsql.asserter.statement_complete()
finally:
assertsql.asserter.clear_rules()
def assert_sql(self, db, callable_, list_, with_sequences=None):
if with_sequences is not None and config.db.dialect.supports_sequences:
rules = with_sequences
else:
rules = list_
newrules = []
for rule in rules:
if isinstance(rule, dict):
newrule = assertsql.AllOf(*[
assertsql.ExactSQL(k, v) for k, v in rule.items()
])
else:
newrule = assertsql.ExactSQL(*rule)
newrules.append(newrule)
self.assert_sql_execution(db, callable_, *newrules)
def assert_sql_count(self, db, callable_, count):
self.assert_sql_execution(
db, callable_, assertsql.CountStatements(count))
@contextlib.contextmanager
def assert_execution(self, *rules):
assertsql.asserter.add_rules(rules)
try:
yield
assertsql.asserter.statement_complete()
finally:
assertsql.asserter.clear_rules()
def assert_statement_count(self, count):
return self.assert_execution(assertsql.CountStatements(count))
| gpl-3.0 |
InsightSoftwareConsortium/ITKExamples | src/Core/Common/CreateAnImage/Code.py | 1 | 1123 | #!/usr/bin/env python
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itk
Dimension = 3
PixelType = itk.ctype("unsigned char")
ImageType = itk.Image[PixelType, Dimension]
image = ImageType.New()
start = itk.Index[Dimension]()
start[0] = 0 # first index on X
start[1] = 0 # first index on Y
start[2] = 0 # first index on Z
size = itk.Size[Dimension]()
size[0] = 200 # size along X
size[1] = 200 # size along Y
size[2] = 200 # size along Z
region = itk.ImageRegion[Dimension]()
region.SetSize(size)
region.SetIndex(start)
image.SetRegions(region)
image.Allocate()
print(image)
| apache-2.0 |
MediaSapiens/autonormix | django/contrib/localflavor/ro/ro_counties.py | 428 | 1231 | # -*- coding: utf-8 -*-
"""
A list of Romanian counties as `choices` in a formfield.
This exists as a standalone file so that it's only imported into memory when
explicitly needed.
"""
COUNTIES_CHOICES = (
('AB', u'Alba'),
('AR', u'Arad'),
('AG', u'Argeş'),
('BC', u'Bacău'),
('BH', u'Bihor'),
('BN', u'Bistriţa-Năsăud'),
('BT', u'Botoşani'),
('BV', u'Braşov'),
('BR', u'Brăila'),
('B', u'Bucureşti'),
('BZ', u'Buzău'),
('CS', u'Caraş-Severin'),
('CL', u'Călăraşi'),
('CJ', u'Cluj'),
('CT', u'Constanţa'),
('CV', u'Covasna'),
('DB', u'Dâmboviţa'),
('DJ', u'Dolj'),
('GL', u'Galaţi'),
('GR', u'Giurgiu'),
('GJ', u'Gorj'),
('HR', u'Harghita'),
('HD', u'Hunedoara'),
('IL', u'Ialomiţa'),
('IS', u'Iaşi'),
('IF', u'Ilfov'),
('MM', u'Maramureş'),
('MH', u'Mehedinţi'),
('MS', u'Mureş'),
('NT', u'Neamţ'),
('OT', u'Olt'),
('PH', u'Prahova'),
('SM', u'Satu Mare'),
('SJ', u'Sălaj'),
('SB', u'Sibiu'),
('SV', u'Suceava'),
('TR', u'Teleorman'),
('TM', u'Timiş'),
('TL', u'Tulcea'),
('VS', u'Vaslui'),
('VL', u'Vâlcea'),
('VN', u'Vrancea'),
)
| bsd-3-clause |
hanya/BookmarksMenu | pythonpath/bookmarks/migrate.py | 1 | 9378 | # Copyright 2012 Tsutomu Uchino
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from bookmarks.cmdparse import bk_urlencode
OLD_EXT_ID = "mytools.BookmarksMenu"
OLD_COMMAND = "mytools:BookmarskMenu"
OLD_BOOKMARKS_LIB = "BookmarksMenu"
OLD_SHELLCOMMANDS_MODULE = "ShellCommands"
OLD_MACRO_MODULE = OLD_BOOKMARKS_LIB + "." + OLD_SHELLCOMMANDS_MODULE
OLD_MACRO_SHELL = "ShellOpenFile"
OLD_MACRO_EXECUTE = "ExecuteCommand"
OLD_MACRO_OPEN = "OpenWithSpecifiedDirectory"
OLD_MACRO_SAVEAS = "SaveAsWithSpecifiedDirectory"
OLD_ADDTHIS = "mytools_BookmarksMenu.Module4.AddCurrentDocument"
OLD_EDIT = "mytools_BookmarksMenu.Module1.Main"
from bookmarks.bookmark import Separator, Item, Container
from bookmarks.command import BookmarksCommands
class Migration(object):
""" Migrate from older bookmarks menu. """
SUB_EXP = re.compile("Sub ([^\n]*)$(.*?)^End Sub", re.I + re.M + re.S)
TWO_ARGS_EXP = re.compile("\"(.*?)\", \"(.*?)\"")
ONE_ARG_EXP = re.compile("\"(.*?)\"")
def __init__(self, ctx):
self.ctx = ctx
def create_service(self, name, args=None):
if args:
return self.ctx.getServiceManager().\
createInstanceWithArgumentsAndContext(name, args, self.ctx)
else:
return self.ctx.getServiceManager().\
createInstanceWithContext(name, self.ctx)
def check(self):
""" To find older bookmarks in the menu. """
self.config_supplier = self.create_service(
"com.sun.star.ui.ModuleUIConfigurationManagerSupplier")
manager = self.config_supplier.getUIConfigurationManager(
"com.sun.star.text.TextDocument")
self.menu_settings = manager.getSettings(
"private:resource/menubar/menubar", False)
self.container = self.find_bookmarks(self.menu_settings)
return not self.container is None
def migrate(self):
""" Starting to migrate and generate items. """
self.macro = None
self.macros = None
self.commands = BookmarksCommands()
self.load_macro()
self.parse_macro()
items = self.convert_item(self.container)
return items
def find_bookmarks(self, settings):
""" Find older bookmarks top menu. """
for i in range(self.menu_settings.getCount()):
item = self.menu_settings.getByIndex(i)
command, label, type, container = self.read_item(item)
if command == OLD_COMMAND:
return item
def read_item(self, item):
""" Get velue from menu item. """
label = ""
command = ""
container = None
type = None
for value in item:
if value.Name == "CommandURL":
command = value.Value
elif value.Name == "ItemDescriptorContainer":
container = value.Value
elif value.Name == "Label":
label = value.Value
elif value.Name == "Type":
type = value.Value
#else:
# print(value.Name)
#print(value.Name)
return command, label, type, container
def load_macro(self):
""" Load old bookmarks macro generated automatically. """
lib = self.get_macro_lib()
if lib:
if lib.hasByName(OLD_SHELLCOMMANDS_MODULE):
self.macro = lib.getByName(OLD_SHELLCOMMANDS_MODULE)
def get_macro_lib(self):
""" Load macro source from application library. """
libs = self.create_service(
"com.sun.star.script.ApplicationScriptLibraryContainer")
if libs.hasByName(OLD_BOOKMARKS_LIB):
libs.loadLibrary(OLD_BOOKMARKS_LIB)
return libs.getByName(OLD_BOOKMARKS_LIB)
return None
def parse_macro(self):
""" Split macro in each subroutine. """
if self.macro:
macros = {}
for m in self.SUB_EXP.finditer(self.macro):
if m:
macros[m.group(1)] = m.group(2)
self.macros = macros
def convert_item(self, item):
""" Convert entry to bookmarks item in new format. """
command, label, type, container = self.read_item(item)
if container:
command, label, type, container = self.read_item(item)
c = Container(label, "")
children = c.children
for i in range(container.getCount()):
_child = self.convert_item(container.getByIndex(i))
if _child:
children.append(_child)
return c
elif type == 0:
try:
return Item(label, "", self.convert_command(command))
except:
return None
else:
return Separator()
def convert_command(self, command):
""" Convert old command to new format. """
if command.startswith(BookmarksCommands.PROTOCOL_SCRIPT):
# check ShellCommands
return self.convert_macro(command)
elif command.startswith(BookmarksCommands.PROTOCOL_MACRO):
return command
elif command.startswith(".uno"):
if command.startswith(".uno:Open"):
parts = command.split("?", 1)
if len(parts) == 2:
queries = [i.split("=") for i in parts[1].split("&")]
d = {}
d["type"] = "document"
for key, value in queries:
if key == self.commands.QUERY_NAME_URL:
d["path"] = value
return self.commands.generate_command(d)
elif command.find("?") >= 0:
parts = command.split("?", 1)
if len(parts) == 2:
queries = [i.split("=") for i in parts[1].split("&")]
q = bk_urlencode(dict(queries))
return parts[0] + q
return command
def convert_macro(self, command):
""" Convert macro entry with data from macros. """
parts = command.split(":", 1)
if len(parts) == 2:
parts = parts[1].split("?", 1)
if len(parts) == 2:
path = parts[0]
if path in (OLD_ADDTHIS, OLD_EDIT):
raise Exception("This item is ignored: %s" % command)
elif path.startswith(OLD_MACRO_MODULE):
_path = path[len(OLD_MACRO_MODULE)+1:]
try:
return self.generate_command_from_macro(
_path, self.macros[_path])
except:
pass
return command
def generate_command_from_macro(self, path, macro):
""" Generate command from type and its macro. """
if path.startswith(OLD_MACRO_SHELL):
path, args = self.get_arguments_pair(OLD_MACRO_SHELL, macro)
d = {"type": "something", "flag": "file"}
d["path"] = path
return self.commands.generate_command(d)
elif path.startswith(OLD_MACRO_EXECUTE):
prog, args = self.get_arguments_pair(OLD_MACRO_EXECUTE, macro)
if prog in ("xdg-open", "open", "kfmclient openURL", "explorer.exe"):
d = {"type": "something", "flag": "folder"}
d["path"] = args
else:
d = {"type": "program"}
d["path"] = prog
d["arguments"] = args
return self.commands.generate_command(d)
elif path.startswith(OLD_MACRO_OPEN):
path = self.get_argument(OLD_MACRO_OPEN, macro)
d = {"type": "special", "flag": "open_from_folder"}
d["path"] = path
return self.commands.generate_command(d)
elif path.startswith(OLD_MACRO_SAVEAS):
path = self.get_argument(OLD_MACRO_SAVEAS, macro)
d = {"type": "special", "flag": "saveas_into_folder"}
d["path"] = path
return self.commands.generate_command(d)
def get_arguments_pair(self, name, macro):
""" Get arguments from macro. """
index = macro.find(name)
if index >= 0:
found = macro[index+len(name):]
m = self.TWO_ARGS_EXP.search(found)
if m:
return m.group(1), m.group(2)
return "", ""
def get_argument(self, name, macro):
""" Get an argument from macro. """
index = macro.find(name)
if index >= 0:
found = macro[index+len(name):]
m = self.ONE_ARG_EXP.search(found)
if m:
return m.group(1)
return ""
| apache-2.0 |
delta2323/chainer | chainer/functions/math/trigonometric.py | 6 | 5416 | import numpy
from chainer import cuda
from chainer import function
from chainer import utils
from chainer.utils import type_check
class Sin(function.Function):
@property
def label(self):
return 'sin'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward(self, x):
xp = cuda.get_array_module(*x)
return utils.force_array(xp.sin(x[0])),
def backward_cpu(self, x, gy):
gx = utils.force_array(numpy.cos(x[0]))
gx *= gy[0]
return gx,
def backward_gpu(self, x, gy):
gx = cuda.elementwise(
'T x, T gy', 'T gx', 'gx = cos(x) * gy', 'sin_bwd'
)(x[0], gy[0])
return gx,
def sin(x):
"""Elementwise sin function."""
return Sin()(x)
class Cos(function.Function):
@property
def label(self):
return 'cos'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward(self, x):
xp = cuda.get_array_module(*x)
return utils.force_array(xp.cos(x[0])),
def backward_cpu(self, x, gy):
gx = utils.force_array(numpy.sin(x[0]))
numpy.negative(gx, out=gx)
gx *= gy[0]
return gx,
def backward_gpu(self, x, gy):
gx = cuda.elementwise(
'T x, T gy', 'T gx', 'gx = -sin(x) * gy', 'cos_bwd'
)(x[0], gy[0])
return gx,
def cos(x):
"""Elementwise cos function."""
return Cos()(x)
class Tan(function.Function):
@property
def label(self):
return 'tan'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward(self, x):
xp = cuda.get_array_module(*x)
return utils.force_array(xp.tan(x[0])),
def backward(self, x, gy):
xp = cuda.get_array_module(*x)
gx = utils.force_array(xp.cos(x[0]))
xp.square(gx, out=gx)
xp.reciprocal(gx, out=gx)
gx *= gy[0]
return gx,
def tan(x):
"""Elementwise tan function."""
return Tan()(x)
class Arcsin(function.Function):
@property
def label(self):
return 'arcsin'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward(self, x):
xp = cuda.get_array_module(*x)
return utils.force_array(xp.arcsin(x[0])),
def backward_cpu(self, x, gy):
gx = utils.force_array(numpy.square(x[0]))
numpy.negative(gx, out=gx)
gx += 1
numpy.sqrt(gx, out=gx)
numpy.reciprocal(gx, out=gx)
gx *= gy[0]
return gx,
def backward_gpu(self, x, gy):
gx = cuda.elementwise(
'T x, T gy', 'T gx',
'gx = rsqrt((T)1.0 - x * x) * gy',
'arcsin_bwd'
)(x[0], gy[0])
return gx,
def arcsin(x):
"""Elementwise arcsine function.
.. math::
y_i = \\arcsin x_i.
Args:
x (~chainer.Variable): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arcsin()(x)
class Arccos(function.Function):
@property
def label(self):
return 'arccos'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward(self, x):
xp = cuda.get_array_module(*x)
return utils.force_array(xp.arccos(x[0])),
def backward_cpu(self, x, gy):
gx = utils.force_array(numpy.square(x[0]))
numpy.negative(gx, out=gx)
gx += 1
numpy.sqrt(gx, out=gx)
numpy.reciprocal(gx, out=gx)
numpy.negative(gx, out=gx)
gx *= gy[0]
return gx,
def backward_gpu(self, x, gy):
gx = cuda.elementwise(
'T x, T gy', 'T gx',
'gx = -rsqrt((T)1.0 - x * x) * gy',
'arccos_bwd'
)(x[0], gy[0])
return gx,
def arccos(x):
"""Elementwise arccosine function.
.. math::
y_i = \\arccos x_i.
Args:
x (~chainer.Variable): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arccos()(x)
class Arctan(function.Function):
@property
def label(self):
return 'arctan'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward(self, x):
xp = cuda.get_array_module(*x)
return utils.force_array(xp.arctan(x[0])),
def backward_cpu(self, x, gy):
gx = utils.force_array(numpy.square(x[0]))
gx += 1
numpy.reciprocal(gx, out=gx)
gx *= gy[0]
return gx,
def backward_gpu(self, x, gy):
gx = cuda.elementwise(
'T x, T gy', 'T gx',
'gx = (T)1.0 / ((T)1.0 + x * x) * gy',
'arctan_bwd'
)(x[0], gy[0])
return gx,
def arctan(x):
"""Elementwise arctangent function.
.. math::
y_i = \\arctan x_i.
Args:
x (~chainer.Variable): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctan()(x)
| mit |
def-/commandergenius | project/jni/python/src/Lib/collections.py | 49 | 6086 | __all__ = ['deque', 'defaultdict', 'namedtuple']
# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
# They should however be considered an integral part of collections.py.
from _abcoll import *
import _abcoll
__all__ += _abcoll.__all__
from _collections import deque, defaultdict
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
def namedtuple(typename, field_names, verbose=False):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', 'x y')
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessable by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Parse and validate the field names. Validation serves two purposes,
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, basestring):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
field_names = tuple(map(str, field_names))
for name in (typename,) + field_names:
if not all(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_'):
raise ValueError('Field names cannot start with an underscore: %r' % name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
dicttxt = ', '.join('%r: t[%d]' % (name, pos) for pos, name in enumerate(field_names))
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(cls, %(argtxt)s):
return tuple.__new__(cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(t):
'Return a new dict which maps field names to their values'
return {%(dicttxt)s} \n
def _replace(self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = self._make(map(kwds.pop, %(field_names)r, self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += ' %s = property(itemgetter(%d))\n' % (name, i)
if verbose:
print template
# Execute the template string in a temporary namespace and
# support tracing utilities by setting a value for frame.f_globals['__name__']
namespace = dict(itemgetter=_itemgetter, __name__='namedtuple_%s' % typename)
try:
exec template in namespace
except SyntaxError, e:
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example).
if hasattr(_sys, '_getframe'):
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
return result
if __name__ == '__main__':
# verify that instances can be pickled
from cPickle import loads, dumps
Point = namedtuple('Point', 'x, y', True)
p = Point(x=10, y=20)
assert p == loads(dumps(p))
# test and demonstrate ability to override methods
class Point(namedtuple('Point', 'x y')):
__slots__ = ()
@property
def hypot(self):
return (self.x ** 2 + self.y ** 2) ** 0.5
def __str__(self):
return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot)
for p in Point(3, 4), Point(14, 5/7.):
print p
class Point(namedtuple('Point', 'x y')):
'Point class with optimized _make() and _replace() without error-checking'
__slots__ = ()
_make = classmethod(tuple.__new__)
def _replace(self, _map=map, **kwds):
return self._make(_map(kwds.get, ('x', 'y'), self))
print Point(11, 22)._replace(x=100)
Point3D = namedtuple('Point3D', Point._fields + ('z',))
print Point3D.__doc__
import doctest
TestResults = namedtuple('TestResults', 'failed attempted')
print TestResults(*doctest.testmod())
| lgpl-2.1 |
DougBurke/astropy | astropy/nddata/nddata_withmixins.py | 14 | 2222 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module implements a class based on NDData with all Mixins.
"""
from .nddata import NDData
from .mixins.ndslicing import NDSlicingMixin
from .mixins.ndarithmetic import NDArithmeticMixin
from .mixins.ndio import NDIOMixin
__all__ = ['NDDataRef']
class NDDataRef(NDArithmeticMixin, NDIOMixin, NDSlicingMixin, NDData):
"""Implements `NDData` with all Mixins.
This class implements a `NDData`-like container that supports reading and
writing as implemented in the ``astropy.io.registry`` and also slicing
(indexing) and simple arithmetics (add, subtract, divide and multiply).
Notes
-----
A key distinction from `NDDataArray` is that this class does not attempt
to provide anything that was not defined in any of the parent classes.
See also
--------
NDData
NDArithmeticMixin
NDSlicingMixin
NDIOMixin
Examples
--------
The mixins allow operation that are not possible with `NDData` or
`NDDataBase`, i.e. simple arithmetics::
>>> from astropy.nddata import NDDataRef, StdDevUncertainty
>>> import numpy as np
>>> data = np.ones((3,3), dtype=float)
>>> ndd1 = NDDataRef(data, uncertainty=StdDevUncertainty(data))
>>> ndd2 = NDDataRef(data, uncertainty=StdDevUncertainty(data))
>>> ndd3 = ndd1.add(ndd2)
>>> ndd3.data # doctest: +FLOAT_CMP
array([[2., 2., 2.],
[2., 2., 2.],
[2., 2., 2.]])
>>> ndd3.uncertainty.array # doctest: +FLOAT_CMP
array([[1.41421356, 1.41421356, 1.41421356],
[1.41421356, 1.41421356, 1.41421356],
[1.41421356, 1.41421356, 1.41421356]])
see `NDArithmeticMixin` for a complete list of all supported arithmetic
operations.
But also slicing (indexing) is possible::
>>> ndd4 = ndd3[1,:]
>>> ndd4.data # doctest: +FLOAT_CMP
array([2., 2., 2.])
>>> ndd4.uncertainty.array # doctest: +FLOAT_CMP
array([1.41421356, 1.41421356, 1.41421356])
See `NDSlicingMixin` for a description how slicing works (which attributes)
are sliced.
"""
pass
| bsd-3-clause |
oVirt/ovirt-engine-sdk-tests | src/resource/hostresourcemanager.py | 1 | 3156 | #
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ovirtsdk.xml import params
from src.resource.abstractresourcemanager import AbstractResourceManager
from src.infrastructure.annotations import requires
from src.utils.statusutils import StatusUtils
class HostResourceManager(AbstractResourceManager):
'''
Host ResourceManager provides host construction,
location and manipulation services.
'''
def __init__(self):
super(HostResourceManager, self).__init__(params.Host)
# abstract impl
def get(self, get_only=False, **kwargs):
"""
Fetches default Host (creates it if not exist)
@param get_only: do not create on absence
@param kwargs: keyword args
@return: Host
"""
return self._doGet(
self.getResourceManager().getSdk().hosts,
get_only=get_only,
**kwargs
)
# abstract impl
def list(self, **kwargs):
"""
Lists all available Hosts according
to keyword agrs
@param kwargs: keyword args
@return: Hosts
"""
return self.getResourceManager() \
.getSdk() \
.hosts \
.list(**kwargs)
# abstract impl
@requires.resources([params.Cluster])
def add(self, **kwargs):
"""
Adds default Host/s according to the default configuration/s
(default configuration can be overridden with custom config
via keyword args)
@param kwargs: keyword args
@return: Host
"""
return self._doAdd(
self.getResourceManager().getSdk().hosts,
**kwargs
)
# abstract impl
def update(self, **kwargs):
"""
Updates default Host according to keyword args
@param kwargs: keyword args
@return: Host
"""
host = self.get()
if not host:
self.raiseNotFoundError()
return host.update(**kwargs)
# abstract impl
def remove(self, **kwargs):
"""
Removes default host according to keyword args
@param kwargs: keyword args
@return: Response
"""
host = self.get()
if not host:
self.raiseNotFoundError()
if host.status.state != 'maintenance':
host.deactivate()
StatusUtils.wait(self.get, 'maintenance')
# delete
response = host.delete()
# wait till gone
StatusUtils.waitRemoved(self.get)
return response
| apache-2.0 |
alexzatsepin/omim | 3party/jansson/doc/ext/refcounting.py | 95 | 1715 | """
refcounting
~~~~~~~~~~~
Reference count annotations for C API functions. Has the same
result as the sphinx.ext.refcounting extension but works for all
functions regardless of the signature, and the reference counting
information is written inline with the documentation instead of a
separate file.
Adds a new directive "refcounting". The directive has no content
and one required positional parameter:: "new" or "borrow".
Example:
.. cfunction:: json_t *json_object(void)
.. refcounting:: new
<description of the json_object function>
:copyright: Copyright (c) 2009-2012 Petri Lehtinen <petri@digip.org>
:license: MIT, see LICENSE for details.
"""
from docutils import nodes
class refcounting(nodes.emphasis): pass
def visit(self, node):
self.visit_emphasis(node)
def depart(self, node):
self.depart_emphasis(node)
def html_visit(self, node):
self.body.append(self.starttag(node, 'em', '', CLASS='refcount'))
def html_depart(self, node):
self.body.append('</em>')
def refcounting_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
if arguments[0] == 'borrow':
text = 'Return value: Borrowed reference.'
elif arguments[0] == 'new':
text = 'Return value: New reference.'
else:
raise Error('Valid arguments: new, borrow')
return [refcounting(text, text)]
def setup(app):
app.add_node(refcounting,
html=(html_visit, html_depart),
latex=(visit, depart),
text=(visit, depart))
app.add_directive('refcounting', refcounting_directive, 0, (1, 0, 0))
| apache-2.0 |
zjuwangg/scrapy | extras/qpsclient.py | 135 | 1531 | """
A spider that generate light requests to meassure QPS troughput
usage:
scrapy runspider qpsclient.py --loglevel=INFO --set RANDOMIZE_DOWNLOAD_DELAY=0 --set CONCURRENT_REQUESTS=50 -a qps=10 -a latency=0.3
"""
from scrapy.spiders import Spider
from scrapy.http import Request
class QPSSpider(Spider):
name = 'qps'
benchurl = 'http://localhost:8880/'
# Max concurrency is limited by global CONCURRENT_REQUESTS setting
max_concurrent_requests = 8
# Requests per second goal
qps = None # same as: 1 / download_delay
download_delay = None
# time in seconds to delay server responses
latency = None
# number of slots to create
slots = 1
def __init__(self, *a, **kw):
super(QPSSpider, self).__init__(*a, **kw)
if self.qps is not None:
self.qps = float(self.qps)
self.download_delay = 1 / self.qps
elif self.download_delay is not None:
self.download_delay = float(self.download_delay)
def start_requests(self):
url = self.benchurl
if self.latency is not None:
url += '?latency={0}'.format(self.latency)
slots = int(self.slots)
if slots > 1:
urls = [url.replace('localhost', '127.0.0.%d' % (x + 1)) for x in xrange(slots)]
else:
urls = [url]
idx = 0
while True:
url = urls[idx % len(urls)]
yield Request(url, dont_filter=True)
idx += 1
def parse(self, response):
pass
| bsd-3-clause |
ininex/geofire-python | resource/lib/python2.7/site-packages/click/utils.py | 201 | 14916 | import os
import sys
from .globals import resolve_color_default
from ._compat import text_type, open_stream, get_filesystem_encoding, \
get_streerror, string_types, PY2, binary_streams, text_streams, \
filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \
_default_text_stdout, _default_text_stderr, is_bytes, WIN
if not PY2:
from ._compat import _find_binary_writer
elif WIN:
from ._winconsole import _get_windows_argv, \
_hash_py_argv, _initial_argv_hash
echo_native_types = string_types + (bytes, bytearray)
def _posixify(name):
return '-'.join(name.split()).lower()
def safecall(func):
"""Wraps a function so that it swallows exceptions."""
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
pass
return wrapper
def make_str(value):
"""Converts a value into a valid string."""
if isinstance(value, bytes):
try:
return value.decode(get_filesystem_encoding())
except UnicodeError:
return value.decode('utf-8', 'replace')
return text_type(value)
def make_default_short_help(help, max_length=45):
words = help.split()
total_length = 0
result = []
done = False
for word in words:
if word[-1:] == '.':
done = True
new_length = result and 1 + len(word) or len(word)
if total_length + new_length > max_length:
result.append('...')
done = True
else:
if result:
result.append(' ')
result.append(word)
if done:
break
total_length += new_length
return ''.join(result)
class LazyFile(object):
"""A lazy file works like a regular file but it does not fully open
the file but it does perform some basic checks early to see if the
filename parameter does make sense. This is useful for safely opening
files for writing.
"""
def __init__(self, filename, mode='r', encoding=None, errors='strict',
atomic=False):
self.name = filename
self.mode = mode
self.encoding = encoding
self.errors = errors
self.atomic = atomic
if filename == '-':
self._f, self.should_close = open_stream(filename, mode,
encoding, errors)
else:
if 'r' in mode:
# Open and close the file in case we're opening it for
# reading so that we can catch at least some errors in
# some cases early.
open(filename, mode).close()
self._f = None
self.should_close = True
def __getattr__(self, name):
return getattr(self.open(), name)
def __repr__(self):
if self._f is not None:
return repr(self._f)
return '<unopened file %r %s>' % (self.name, self.mode)
def open(self):
"""Opens the file if it's not yet open. This call might fail with
a :exc:`FileError`. Not handling this error will produce an error
that Click shows.
"""
if self._f is not None:
return self._f
try:
rv, self.should_close = open_stream(self.name, self.mode,
self.encoding,
self.errors,
atomic=self.atomic)
except (IOError, OSError) as e:
from .exceptions import FileError
raise FileError(self.name, hint=get_streerror(e))
self._f = rv
return rv
def close(self):
"""Closes the underlying file, no matter what."""
if self._f is not None:
self._f.close()
def close_intelligently(self):
"""This function only closes the file if it was opened by the lazy
file wrapper. For instance this will never close stdin.
"""
if self.should_close:
self.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close_intelligently()
def __iter__(self):
self.open()
return iter(self._f)
class KeepOpenFile(object):
def __init__(self, file):
self._file = file
def __getattr__(self, name):
return getattr(self._file, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
pass
def __repr__(self):
return repr(self._file)
def __iter__(self):
return iter(self._file)
def echo(message=None, file=None, nl=True, err=False, color=None):
"""Prints a message plus a newline to the given file or stdout. On
first sight, this looks like the print function, but it has improved
support for handling Unicode and binary data that does not fail no
matter how badly configured the system is.
Primarily it means that you can print binary data as well as Unicode
data on both 2.x and 3.x to the given file in the most appropriate way
possible. This is a very carefree function as in that it will try its
best to not fail. As of Click 6.0 this includes support for unicode
output on the Windows console.
In addition to that, if `colorama`_ is installed, the echo function will
also support clever handling of ANSI codes. Essentially it will then
do the following:
- add transparent handling of ANSI color codes on Windows.
- hide ANSI codes automatically if the destination file is not a
terminal.
.. _colorama: http://pypi.python.org/pypi/colorama
.. versionchanged:: 6.0
As of Click 6.0 the echo function will properly support unicode
output on the windows console. Not that click does not modify
the interpreter in any way which means that `sys.stdout` or the
print statement or function will still not provide unicode support.
.. versionchanged:: 2.0
Starting with version 2.0 of Click, the echo function will work
with colorama if it's installed.
.. versionadded:: 3.0
The `err` parameter was added.
.. versionchanged:: 4.0
Added the `color` flag.
:param message: the message to print
:param file: the file to write to (defaults to ``stdout``)
:param err: if set to true the file defaults to ``stderr`` instead of
``stdout``. This is faster and easier than calling
:func:`get_text_stderr` yourself.
:param nl: if set to `True` (the default) a newline is printed afterwards.
:param color: controls if the terminal supports ANSI colors or not. The
default is autodetection.
"""
if file is None:
if err:
file = _default_text_stderr()
else:
file = _default_text_stdout()
# Convert non bytes/text into the native string type.
if message is not None and not isinstance(message, echo_native_types):
message = text_type(message)
if nl:
message = message or u''
if isinstance(message, text_type):
message += u'\n'
else:
message += b'\n'
# If there is a message, and we're in Python 3, and the value looks
# like bytes, we manually need to find the binary stream and write the
# message in there. This is done separately so that most stream
# types will work as you would expect. Eg: you can write to StringIO
# for other cases.
if message and not PY2 and is_bytes(message):
binary_file = _find_binary_writer(file)
if binary_file is not None:
file.flush()
binary_file.write(message)
binary_file.flush()
return
# ANSI-style support. If there is no message or we are dealing with
# bytes nothing is happening. If we are connected to a file we want
# to strip colors. If we are on windows we either wrap the stream
# to strip the color or we use the colorama support to translate the
# ansi codes to API calls.
if message and not is_bytes(message):
color = resolve_color_default(color)
if should_strip_ansi(file, color):
message = strip_ansi(message)
elif WIN:
if auto_wrap_for_ansi is not None:
file = auto_wrap_for_ansi(file)
elif not color:
message = strip_ansi(message)
if message:
file.write(message)
file.flush()
def get_binary_stream(name):
"""Returns a system stream for byte processing. This essentially
returns the stream from the sys module with the given name but it
solves some compatibility issues between different Python versions.
Primarily this function is necessary for getting binary streams on
Python 3.
:param name: the name of the stream to open. Valid names are ``'stdin'``,
``'stdout'`` and ``'stderr'``
"""
opener = binary_streams.get(name)
if opener is None:
raise TypeError('Unknown standard stream %r' % name)
return opener()
def get_text_stream(name, encoding=None, errors='strict'):
"""Returns a system stream for text processing. This usually returns
a wrapped stream around a binary stream returned from
:func:`get_binary_stream` but it also can take shortcuts on Python 3
for already correctly configured streams.
:param name: the name of the stream to open. Valid names are ``'stdin'``,
``'stdout'`` and ``'stderr'``
:param encoding: overrides the detected default encoding.
:param errors: overrides the default error mode.
"""
opener = text_streams.get(name)
if opener is None:
raise TypeError('Unknown standard stream %r' % name)
return opener(encoding, errors)
def open_file(filename, mode='r', encoding=None, errors='strict',
lazy=False, atomic=False):
"""This is similar to how the :class:`File` works but for manual
usage. Files are opened non lazy by default. This can open regular
files as well as stdin/stdout if ``'-'`` is passed.
If stdin/stdout is returned the stream is wrapped so that the context
manager will not close the stream accidentally. This makes it possible
to always use the function like this without having to worry to
accidentally close a standard stream::
with open_file(filename) as f:
...
.. versionadded:: 3.0
:param filename: the name of the file to open (or ``'-'`` for stdin/stdout).
:param mode: the mode in which to open the file.
:param encoding: the encoding to use.
:param errors: the error handling for this file.
:param lazy: can be flipped to true to open the file lazily.
:param atomic: in atomic mode writes go into a temporary file and it's
moved on close.
"""
if lazy:
return LazyFile(filename, mode, encoding, errors, atomic=atomic)
f, should_close = open_stream(filename, mode, encoding, errors,
atomic=atomic)
if not should_close:
f = KeepOpenFile(f)
return f
def get_os_args():
"""This returns the argument part of sys.argv in the most appropriate
form for processing. What this means is that this return value is in
a format that works for Click to process but does not necessarily
correspond well to what's actually standard for the interpreter.
On most environments the return value is ``sys.argv[:1]`` unchanged.
However if you are on Windows and running Python 2 the return value
will actually be a list of unicode strings instead because the
default behavior on that platform otherwise will not be able to
carry all possible values that sys.argv can have.
.. versionadded:: 6.0
"""
# We can only extract the unicode argv if sys.argv has not been
# changed since the startup of the application.
if PY2 and WIN and _initial_argv_hash == _hash_py_argv():
return _get_windows_argv()
return sys.argv[1:]
def format_filename(filename, shorten=False):
"""Formats a filename for user display. The main purpose of this
function is to ensure that the filename can be displayed at all. This
will decode the filename to unicode if necessary in a way that it will
not fail. Optionally, it can shorten the filename to not include the
full path to the filename.
:param filename: formats a filename for UI display. This will also convert
the filename into unicode without failing.
:param shorten: this optionally shortens the filename to strip of the
path that leads up to it.
"""
if shorten:
filename = os.path.basename(filename)
return filename_to_ui(filename)
def get_app_dir(app_name, roaming=True, force_posix=False):
r"""Returns the config folder for the application. The default behavior
is to return whatever is most appropriate for the operating system.
To give you an idea, for an app called ``"Foo Bar"``, something like
the following folders could be returned:
Mac OS X:
``~/Library/Application Support/Foo Bar``
Mac OS X (POSIX):
``~/.foo-bar``
Unix:
``~/.config/foo-bar``
Unix (POSIX):
``~/.foo-bar``
Win XP (roaming):
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
Win XP (not roaming):
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
Win 7 (roaming):
``C:\Users\<user>\AppData\Roaming\Foo Bar``
Win 7 (not roaming):
``C:\Users\<user>\AppData\Local\Foo Bar``
.. versionadded:: 2.0
:param app_name: the application name. This should be properly capitalized
and can contain whitespace.
:param roaming: controls if the folder should be roaming or not on Windows.
Has no affect otherwise.
:param force_posix: if this is set to `True` then on any POSIX system the
folder will be stored in the home folder with a leading
dot instead of the XDG config home or darwin's
application support folder.
"""
if WIN:
key = roaming and 'APPDATA' or 'LOCALAPPDATA'
folder = os.environ.get(key)
if folder is None:
folder = os.path.expanduser('~')
return os.path.join(folder, app_name)
if force_posix:
return os.path.join(os.path.expanduser('~/.' + _posixify(app_name)))
if sys.platform == 'darwin':
return os.path.join(os.path.expanduser(
'~/Library/Application Support'), app_name)
return os.path.join(
os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')),
_posixify(app_name))
| mit |
alrusdi/lettuce | tests/integration/lib/Django-1.3/django/db/backends/dummy/base.py | 151 | 1998 | """
Dummy database backend for Django.
Django uses this if the database ENGINE setting is empty (None or empty string).
Each of these API functions, except connection.close(), raises
ImproperlyConfigured.
"""
from django.core.exceptions import ImproperlyConfigured
from django.db.backends import *
from django.db.backends.creation import BaseDatabaseCreation
def complain(*args, **kwargs):
raise ImproperlyConfigured("You haven't set the database ENGINE setting yet.")
def ignore(*args, **kwargs):
pass
class DatabaseError(Exception):
pass
class IntegrityError(DatabaseError):
pass
class DatabaseOperations(BaseDatabaseOperations):
quote_name = complain
class DatabaseClient(BaseDatabaseClient):
runshell = complain
class DatabaseIntrospection(BaseDatabaseIntrospection):
get_table_list = complain
get_table_description = complain
get_relations = complain
get_indexes = complain
class DatabaseWrapper(BaseDatabaseWrapper):
operators = {}
# Override the base class implementations with null
# implementations. Anything that tries to actually
# do something raises complain; anything that tries
# to rollback or undo something raises ignore.
_commit = complain
_rollback = ignore
enter_transaction_management = complain
leave_transaction_management = ignore
set_dirty = complain
set_clean = complain
commit_unless_managed = complain
rollback_unless_managed = ignore
savepoint = ignore
savepoint_commit = complain
savepoint_rollback = ignore
close = ignore
cursor = complain
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = BaseDatabaseFeatures(self)
self.ops = DatabaseOperations()
self.client = DatabaseClient(self)
self.creation = BaseDatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
| gpl-3.0 |
singh-pratyush96/tournament-udacity | tournament/tournament.py | 1 | 10312 | #!/usr/bin/env python
#
# tournament.py -- implementation of a Swiss-system tournament
#
import psycopg2
from random import shuffle
def connect():
"""
Connect to the PostgreSQL database.
Returns a database connection and corresponding cursor.
"""
connection = psycopg2.connect("dbname=tournament")
cursor = connection.cursor()
return connection, cursor
def existsPlayer(pid):
"""
Check if a player exists
Args:
pid: Player ID
Returns: Status
"""
conn, cur = connect()
sql = 'select count(*) from players where pid = %s;'
cur.execute(sql, (pid,))
result = cur.fetchall()[0][0] == 1
conn.close()
return result
def existsTournament(tournamentid):
"""
Check if a tournament exists
Args:
tournamentid: Tournament ID
Returns: Status
"""
conn, cur = connect()
sql = 'select count(*) from tournaments where tid = %s;'
cur.execute(sql, (tournamentid,))
result = cur.fetchall()[0][0] == 1
conn.close()
return result
def existsTournamentPlayer(tournamentid, pid):
"""
Check if a player is registered for a tournament
Args:
tournamentid: Tournament ID
pid: Player ID
Returns: Status
"""
conn, cur = connect()
sql = 'select count(*) from tournamentplayers where tid = %s and pid = %s;'
cur.execute(sql, (tournamentid, pid))
result = cur.fetchall()[0][0] == 1
conn.close()
return result
def deleteMatches(tournamentid=-1):
"""
Remove all the match records from the database for a tournament.
If no argument is passed, delete all matches from all tournament.
Args:
tournamentid (int): ID of tournament of which matches are to be cleared
If no argument passed, reset all matches
Returns: Status
"""
conn, cur = connect()
if tournamentid == -1: # If no argument passed
sql = 'update tournamentplayers set wins = DEFAULT,' \
' matches = DEFAULT, lastoppid = default;'
cur.execute(sql)
else:
if not existsTournament(tournamentid):
conn.close()
return False
sql = 'update tournamentplayers set wins = DEFAULT,' \
' matches = DEFAULT, lastoppid = default where tid = %s;'
cur.execute(sql, (tournamentid,))
conn.commit()
conn.close()
return True
def deleteTournamentPlayers(tournamentid=-1):
"""
Remove all the player records from the database.
Args:
tournamentid (int): Tournament ID of which players are to be deleted.
If no argument passed, delete for all tournaments
Returns: Status
"""
conn, cur = connect()
if tournamentid == -1: # If no argument passed
sql = 'delete from tournamentplayers;'
else:
if not existsTournament(tournamentid):
conn.close()
return False
sql = 'delete from tournamentplayers where tid = %s;'
cur.execute(sql, (tournamentid,))
conn.commit()
conn.close()
return True
def countTournamentPlayers(tournamentid=-1):
"""Returns the number of players currently registered.
Args:
tournamentid (int): Tournament ID to count players.
If no argument, count players participated in any
tournament
Returns: Status, count of players
"""
conn, cur = connect()
# Get count of rows in player relation
if tournamentid == -1:
sql = 'select count(distinct pid) from tournamentplayers;'
cur.execute(sql)
else:
if not existsTournament(tournamentid):
conn.close()
return False, -1
sql = 'select count(distinct pid) from tournamentplayers ' \
'where tid = %s;'
cur.execute(sql, (tournamentid,))
player_count = cur.fetchall()[0][0]
conn.close()
return True, player_count
def playerCount():
"""
Count all players, whether registered or not
Returns: Number of players
"""
conn, cur = connect()
sql = 'select count(*) from players;'
cur.execute(sql)
count = cur.fetchall()[0][0]
conn.close()
return count
def registerPlayer(name):
"""Adds a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
Returns: ID of registered player
"""
conn, cur = connect()
sql = 'insert into players (pname) values (%s) returning pid;'
cur.execute(sql, (name,))
pid = cur.fetchall()[0][0]
conn.commit()
conn.close()
return pid
def playerStandings(tournamentid=-1):
"""Returns a list of the players and their win records, sorted by wins.
The first entry in the list should be the player in first place, or a player
tied for first place if there is currently a tie.
Returns:
A list of tuples, each of which contains (id, name, wins, matches):
id: the player's unique id (assigned by the database)
name: the player's full name (as registered)
wins: the number of matches the player has won
matches: the number of matches the player has played
Returns: Status, list of tuples
"""
conn, cur = connect()
if tournamentid == -1:
sql = 'select * from all_tournament_player_stats;'
cur.execute(sql)
list1 = cur.fetchall()
else:
if not existsTournament(tournamentid):
conn.close()
return False, []
sql = 'select pid, pname, wins, matches from tournament_players_stats' \
' where tid = %s;'
cur.execute(sql, (tournamentid,))
list1 = cur.fetchall()
conn.close()
return True, list1
def clearPlayers():
"""
Delete all players
"""
conn, cur = connect()
sql = 'delete from players; delete from tournamentplayers;'
cur.execute(sql)
conn.commit()
conn.close()
def reportMatch(tournamentid, winner, loser):
"""
Report result of match. winner and loser are same
in case of a 'bye'
Args:
tournamentid: Tournament ID
winner: Winner ID
loser: Loser ID
Returns: Status
"""
conn, cur = connect()
if not existsTournamentPlayer(tournamentid, winner) or \
not existsTournamentPlayer(tournamentid, loser):
conn.close()
return False
sql = 'update tournamentplayers set matches = matches + 1,' \
' wins = wins + 1, lastoppid = %s where tid = %s and pid = %s;'
cur.execute(sql, (loser, tournamentid, winner))
if winner != loser: # If not a bye
sql = 'update tournamentplayers set matches = matches + 1,' \
' lastoppid = %s where tid = %s and pid = %s;'
cur.execute(sql, (winner, tournamentid, loser))
conn.commit()
conn.close()
def swissPairings(tournamentid):
"""
Returns a list of pairs of players for the next round of a match.
Assuming that there are an even number of players registered, each player
appears exactly once in the pairings. Each player is paired with another
player with an equal or nearly-equal win record, that is, a player adjacent
to him or her in the standings.
Returns:
A list of tuples, each of which contains (id1, name1, id2, name2)
id1: the first player's unique id
name1: the first player's name
id2: the second player's unique id
name2: the second player's name
"""
conn, cur = connect()
sql = 'select pid, pname, lastoppid from tournament_players_stats' \
' where tid = %s;'
cur.execute(sql, (tournamentid,))
players = cur.fetchall()
# Odd players, bye one who wasn't byed last time
if len(players) % 2 == 1:
tempList = list(players)
shuffle(tempList)
byed = False
randomFirst = tempList[0]
while not byed and len(tempList) > 0:
if tempList[0][0] == tempList[0][2]:
players.remove(tempList[0])
reportMatch(tournamentid, tempList[0][0], tempList[0][0])
byed = True
tempList.remove(tempList[0])
if not byed:
reportMatch(tournamentid, randomFirst[0], randomFirst[0])
players.remove(randomFirst)
# Arrange players, no rematch
pairs = []
while len(players) > 2: # No. of players will always be odd
player1 = players[0]
player2 = players[1]
if player1[2] == player2[0]:
player2 = players[2]
players.remove(player1)
players.remove(player2)
pairs.append((player1[0], player1[1], player2[0], player2[1]))
# Add remaining two players
pairs.append((players[0][0], players[0][1], players[1][0], players[1][1]))
conn.close()
return pairs
def addTournament(name):
"""
Register a new tournament
Args:
name: Name of tournament
Returns:
ID of tournament added
"""
conn, cur = connect()
sql = 'insert into tournaments (tname) values(%s) returning tid;'
cur.execute(sql, (name,))
tid = cur.fetchall()[0][0]
conn.commit()
conn.close()
return tid
def addPlayerTournament(tid, pid):
"""
Add a registered player to a tournament
Args:
tid: Tournament ID
pid: Player ID
Returns: Status
"""
if not existsTournament(tid) or not existsPlayer(pid):
return False
conn, cur = connect()
sql = 'insert into tournamentplayers (tid, pid) values (%s, %s);'
cur.execute(sql, (tid, pid))
conn.commit()
conn.close()
return True
def countTournaments():
"""
Count number of tournaments
Returns: Number of tournaments
"""
conn, cur = connect()
sql = 'select count(*) from tournaments;'
cur.execute(sql)
count = cur.fetchall()[0][0]
conn.close()
return count
def clearTournaments():
"""
Delete all tournaments
"""
conn, cur = connect()
sql = 'delete from tournamentplayers; delete from tournaments;'
cur.execute(sql)
conn.commit()
conn.close() | mit |
Rosebotics/pymata-aio | examples/sparkfun_redbot/pixy/pixy_set_brightness.py | 1 | 2302 | #!/usr/bin/env python3
"""
Just playing with changing the Pixy brightness setting.
We strongly recommend you figure out an appropriate brightness value in PixyMon then use this command once
to set the brightness to the value you found works well.
"""
from pymata_aio.pymata3 import PyMata3
from pymata_aio.pymata3 import Constants
WIFLY_IP_ADDRESS = None # Leave set as None if not using WiFly
WIFLY_IP_ADDRESS = "10.0.1.19" # If using a WiFly on the RedBot, set the ip address here.
#WIFLY_IP_ADDRESS = "r01.wlan.rose-hulman.edu" # If your WiFi network allows it, you can use the device hostname instead.
if WIFLY_IP_ADDRESS:
# arduino_wait is a timer parameter to allow for the arduino to reboot when the connection is made which is NA for WiFly.
board = PyMata3(arduino_wait=0, ip_address=WIFLY_IP_ADDRESS)
else:
# Use a USB cable to RedBot or an XBee connection instead of WiFly.
COM_PORT = None # Use None for automatic com port detection, or set if needed i.e. "COM7"
board = PyMata3(com_port=COM_PORT)
def print_pixy_blocks(blocks):
""" Prints the Pixy blocks data."""
print("Detected " + str(len(blocks)) + " Pixy blocks:")
if len(blocks) > 0 and not "signature" in blocks[0]:
print("Something went wrong. This does not appear to be a printable block.")
board.shutdown()
return
for block_index in range(len(blocks)):
block = blocks[block_index]
print(" block {}: sig: {} x: {} y: {} width: {} height: {}".format(
block_index, block["signature"], block["x"], block["y"], block["width"], block["height"]))
def main():
board.pixy_init()
board.keep_alive(period=2)
print("Set brightness example")
while True:
print("Normal room")
board.pixy_set_brightness(80)
board.sleep(0.25)
print_pixy_blocks(board.pixy_get_blocks())
board.sleep(2.0)
print("Darker room")
board.pixy_set_brightness(100)
board.sleep(0.25)
print_pixy_blocks(board.pixy_get_blocks())
board.sleep(2.0)
print("Very dark room")
board.pixy_set_brightness(150)
board.sleep(0.25)
print_pixy_blocks(board.pixy_get_blocks())
board.sleep(2.0)
main()
| gpl-3.0 |
dcroc16/skunk_works | google_appengine/lib/django-1.4/django/contrib/gis/geos/prototypes/__init__.py | 314 | 1305 | """
This module contains all of the GEOS ctypes function prototypes. Each
prototype handles the interaction between the GEOS library and Python
via ctypes.
"""
# Coordinate sequence routines.
from django.contrib.gis.geos.prototypes.coordseq import (create_cs, get_cs,
cs_clone, cs_getordinate, cs_setordinate, cs_getx, cs_gety, cs_getz,
cs_setx, cs_sety, cs_setz, cs_getsize, cs_getdims)
# Geometry routines.
from django.contrib.gis.geos.prototypes.geom import (from_hex, from_wkb, from_wkt,
create_point, create_linestring, create_linearring, create_polygon, create_collection,
destroy_geom, get_extring, get_intring, get_nrings, get_geomn, geom_clone,
geos_normalize, geos_type, geos_typeid, geos_get_srid, geos_set_srid,
get_dims, get_num_coords, get_num_geoms,
to_hex, to_wkb, to_wkt)
# Miscellaneous routines.
from django.contrib.gis.geos.prototypes.misc import *
# Predicates
from django.contrib.gis.geos.prototypes.predicates import (geos_hasz, geos_isempty,
geos_isring, geos_issimple, geos_isvalid, geos_contains, geos_crosses,
geos_disjoint, geos_equals, geos_equalsexact, geos_intersects,
geos_intersects, geos_overlaps, geos_relatepattern, geos_touches, geos_within)
# Topology routines
from django.contrib.gis.geos.prototypes.topology import *
| mit |
devopshq/crosspm | crosspm/cpm.py | 1 | 16409 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
{app_name}
Usage:
crosspm download [options]
crosspm lock [DEPS] [DEPSLOCK] [options]
crosspm usedby [DEPS] [options]
crosspm pack <OUT> <SOURCE> [options]
crosspm cache [size | age | clear [hard]]
crosspm -h | --help
crosspm --version
Options:
<OUT> Output file.
<SOURCE> Source directory path.
-h, --help Show this screen.
--version Show version.
-L, --list Do not load packages and its dependencies. Just show what's found.
-v LEVEL, --verbose=LEVEL Set output verbosity: ({verb_level}) [default: ].
-l LOGFILE, --log=LOGFILE File name for log output. Log level is '{log_default}' if set when verbose doesn't.
-c FILE, --config=FILE Path to configuration file.
-o OPTIONS, --options OPTIONS Extra options.
--deps-path=FILE Path to file with dependencies [./{deps_default}]
--depslock-path=FILE Path to file with locked dependencies [./{deps_lock_default}]
--dependencies-content=CONTENT Content for dependencies.txt file
--dependencies-lock-content=CONTENT Content for dependencies.txt.lock file
--lock-on-success Save file with locked dependencies next to original one if download succeeds
--out-format=TYPE Output data format. Available formats:({out_format}) [default: {out_format_default}]
--output=FILE Output file name (required if --out_format is not stdout)
--output-template=FILE Template path, e.g. nuget.packages.config.j2 (required if --out_format=jinja)
--no-fails Ignore fails config if possible.
--recursive=VALUE Process all packages recursively to find and lock all dependencies
--prefer-local Do not search package if exist in cache
--stdout Print info and debug message to STDOUT, error to STDERR. Otherwise - all messages to STDERR
""" # noqa
import logging
import os
import shlex
import sys
import time
from docopt import docopt
from crosspm import version
from crosspm.helpers.archive import Archive
from crosspm.helpers.config import (
CROSSPM_DEPENDENCY_LOCK_FILENAME,
CROSSPM_DEPENDENCY_FILENAME,
Config,
)
from crosspm.helpers.content import DependenciesContent
from crosspm.helpers.downloader import Downloader
from crosspm.helpers.exceptions import * # noqa
from crosspm.helpers.locker import Locker
from crosspm.helpers.output import Output
from crosspm.helpers.python import get_object_from_string
from crosspm.helpers.usedby import Usedby
app_name = 'CrossPM (Cross Package Manager) version: {version} The MIT License (MIT)'.format(version=version)
def do_run(func):
def wrapper(self, *args, **kwargs):
try:
res = func(self, *args, **kwargs)
except CrosspmExceptionWrongArgs as e:
print(__doc__)
return self.exit(e.error_code, e.msg)
except CrosspmException as e:
return self.exit(e.error_code, e.msg)
except Exception as e:
self._log.exception(e)
return self.exit(CROSSPM_ERRORCODE_UNKNOWN_ERROR, 'Unknown error occurred!')
return 0, res
return wrapper
class CrossPM:
_ready = False
def __init__(self, args=None, throw_exceptions=None, return_result=False):
self._config = None
self._output = None
self._return_result = return_result
if throw_exceptions is None:
# legacy behavior
if self._return_result:
self._throw_exceptions = False
else:
self._throw_exceptions = True
else:
self._throw_exceptions = throw_exceptions
self._log = logging.getLogger('crosspm')
args = self.prepare_args(args)
docopt_str = __doc__.format(app_name=app_name,
verb_level=Config.get_verbosity_level(),
log_default=Config.get_verbosity_level(0, True),
deps_default=CROSSPM_DEPENDENCY_FILENAME,
deps_lock_default=CROSSPM_DEPENDENCY_LOCK_FILENAME,
out_format=Output.get_output_types(),
out_format_default='stdout',
)
self._args = docopt(docopt_str,
argv=args,
version=version)
if self._args['--recursive']:
recursive_str = self._args['--recursive']
if recursive_str.lower() == 'true':
self._args['--recursive'] = True
elif recursive_str.lower() == 'false':
self._args['--recursive'] = False
else:
raise Exception("Unknown value to --recursive: {}".format(recursive_str))
if isinstance(self._args, str):
if self._throw_exceptions:
print(app_name)
print(self._args)
exit()
self._ready = True
if self._args['download']:
self.command_ = Downloader
elif self._args['lock']:
self.command_ = Locker
elif self._args['usedby']:
self.command_ = Usedby
else:
self.command_ = None
@property
def stdout(self):
"""
Флаг --stdout может быть взят из переменной окружения CROSSPM_STDOUT.
Если есть любое значение в CROSSPM_STDOUT - оно понимается как True
:return:
"""
# --stdout
stdout = self._args['--stdout']
if stdout:
return True
# CROSSPM_STDOUT
stdout_env = os.getenv('CROSSPM_STDOUT', None)
if stdout_env is not None:
return True
return False
@staticmethod
def prepare_args(args, windows=None):
"""
Prepare args - add support for old interface, e.g:
- --recursive was "flag" and for now it support True or False value
:param args:
:return:
"""
if windows is None:
windows = "win" in sys.platform
if isinstance(args, str):
args = shlex.split(args, posix=not windows)
elif isinstance(args, list):
pass
elif args is None:
args = sys.argv[1:]
else:
raise Exception("Unknown args type: {}".format(type(args)))
# --recursive => --recursive=True|False convert
for position, argument in enumerate(args):
# Normal way, skip change
if argument.lower() in ('--recursive=true', '--recursive=false'):
return args
elif argument.lower() == '--recursive':
if len(args) > position + 1 and args[position + 1].lower() in ["true", "false"]:
# --recursive true | false
return args
else:
# legacy way, convert --recursive to --recursive=true
args[position] = "--recursive=True"
return args
return args
@do_run
def read_config(self):
_deps_path = self._args['--deps-path']
# Передаём содержимое напрямую
if _deps_path is None and self._args['--dependencies-content'] is not None:
_deps_path = DependenciesContent(self._args['--dependencies-content'])
_depslock_path = self._args['--depslock-path']
if _depslock_path is None and self._args['--dependencies-lock-content'] is not None:
_depslock_path = DependenciesContent(self._args['--dependencies-lock-content'])
if self._args['lock']:
if self._args['DEPS']:
_deps_path = self._args['DEPS']
if self._args['DEPSLOCK']:
_depslock_path = self._args['DEPSLOCK']
self._config = Config(self._args['--config'], self._args['--options'], self._args['--no-fails'], _depslock_path,
_deps_path, self._args['--lock-on-success'],
self._args['--prefer-local'])
self._output = Output(self._config.output('result', None), self._config.name_column, self._config)
def exit(self, code, msg):
self._log.critical(msg)
if self._throw_exceptions:
sys.exit(code)
else:
return code, msg
@property
def recursive(self):
if self.command_ is Downloader:
if self._args['--recursive'] is None:
recursive = True
else:
recursive = self._args['--recursive']
else:
if self._args['--recursive'] is None:
recursive = False
else:
recursive = self._args['--recursive']
return recursive
@do_run
def check_common_args(self):
if self._args['--output']:
output = self._args['--output'].strip().strip("'").strip('"')
output_abs = os.path.abspath(output)
if os.path.isdir(output_abs):
raise CrosspmExceptionWrongArgs(
'"%s" is a directory - can\'t write to it'
)
self._args['--output'] = output
@do_run
def set_logging_level(self):
level_str = self._args['--verbose'].strip().lower()
log = self._args['--log']
if log:
log = log.strip().strip("'").strip('"')
log_abs = os.path.abspath(log)
if os.path.isdir(log_abs):
raise CrosspmExceptionWrongArgs(
'"%s" is a directory - can\'t write log to it'
)
else:
log_dir = os.path.dirname(log_abs)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
else:
log_abs = None
level = Config.get_verbosity_level(level_str or 'console')
self._log.handlers = []
if level or log_abs:
self._log.setLevel(level)
format_str = '%(asctime)-19s [%(levelname)-9s] %(message)s'
if level_str == 'debug':
format_str = '%(asctime)-19s [%(levelname)-9s] %(name)-12s: %(message)s'
formatter = logging.Formatter(format_str, datefmt="%Y-%m-%d %H:%M:%S")
if level:
# legacy way - Cmake catch message from stdout and parse PACKAGE_ROOT
# So, crosspm print debug and info message to stderr for debug purpose
if not self.stdout:
sh = logging.StreamHandler(stream=sys.stderr)
sh.setLevel(level)
self._log.addHandler(sh)
# If --stdout flag enabled
else:
sh = logging.StreamHandler(stream=sys.stderr)
sh.setLevel(logging.WARNING)
self._log.addHandler(sh)
sh = logging.StreamHandler(stream=sys.stdout)
sh.setLevel(level)
self._log.addHandler(sh)
if log_abs:
if not level_str:
level = Config.get_verbosity_level(0)
fh = logging.FileHandler(filename=log_abs)
fh.setLevel(level)
fh.setFormatter(formatter)
self._log.addHandler(fh)
def run(self):
time_start = time.time()
if self._ready:
errorcode, msg = self.set_logging_level()
self._log.info(app_name)
errorcode, msg = self.check_common_args()
if errorcode == 0:
errorcode, msg = self.read_config()
if errorcode == 0:
if self._args['download']:
errorcode, msg = self.command(self.command_)
elif self._args['lock']:
errorcode, msg = self.command(self.command_)
elif self._args['usedby']:
errorcode, msg = self.command(self.command_)
elif self._args['pack']:
errorcode, msg = self.pack()
elif self._args['cache']:
errorcode, msg = self.cache()
else:
errorcode, msg = CROSSPM_ERRORCODE_WRONG_ARGS, self._args
time_end = time.time()
self._log.info('Done in %2.2f sec' % (time_end - time_start))
return errorcode, msg
@do_run
def command(self, command_):
if self._return_result:
params = {}
else:
if self._args['--out-format'] == 'stdout':
if self._args['--output']:
raise CrosspmExceptionWrongArgs(
"unwanted argument '--output' while argument '--out-format={}'".format(
self._args['--out-format'],
))
elif not self._args['--output']:
raise CrosspmExceptionWrongArgs(
"argument '--output' required when argument '--out-format={}'".format(
self._args['--out-format'],
))
params = {
'out_format': ['--out-format', ''],
'output': ['--output', ''],
'output_template': ['--output-template', ''],
# 'out_prefix': ['--out-prefix', ''],
# 'depslock_path': ['--depslock-path', ''],
}
for k, v in params.items():
params[k] = self._args[v[0]] if v[0] in self._args else v[1]
if isinstance(params[k], str):
params[k] = params[k].strip('"').strip("'")
# try to dynamic load --output-template from python module
output_template = params['output_template']
if output_template:
# Try to load from python module
module_template = get_object_from_string(output_template)
if module_template is not None:
self._log.debug(
"Found output template path '{}' from '{}'".format(module_template, output_template))
params['output_template'] = module_template
else:
self._log.debug("Output template '{}' use like file path".format(output_template))
# check template exist
output_template = params['output_template']
if output_template and not os.path.exists(output_template):
raise CrosspmException(CROSSPM_ERRORCODE_CONFIG_NOT_FOUND,
"Can not find template '{}'".format(output_template))
do_load = not self._args['--list']
# hack for Locker
if command_ is Locker:
do_load = self.recursive
cpm_ = command_(self._config, do_load, self.recursive)
cpm_.entrypoint()
if self._return_result:
return self._return(cpm_)
else:
# self._output.write(params, packages)
self._output.write_output(params, cpm_.get_tree_packages())
return ''
def _return(self, cpm_downloader):
if str(self._return_result).lower() == 'raw':
return cpm_downloader.get_raw_packages()
if str(self._return_result).lower() == 'tree':
return cpm_downloader.get_tree_packages()
else:
return self._output.output_type_module(cpm_downloader.get_tree_packages())
@do_run
def pack(self):
Archive.create(self._args['<OUT>'], self._args['<SOURCE>'])
@do_run
def cache(self):
if self._args['clear']:
self._config.cache.clear(self._args['hard'])
elif self._args['size']:
self._config.cache.size()
elif self._args['age']:
self._config.cache.age()
else:
self._config.cache.info()
if __name__ == '__main__':
app = CrossPM()
app.run()
| mit |
printedheart/seastar | tests/memcached/test_memcached.py | 25 | 22367 | #!/usr/bin/env python3
#
# This file is open source software, licensed to you under the terms
# of the Apache License, Version 2.0 (the "License"). See the NOTICE file
# distributed with this work for additional information regarding copyright
# ownership. You may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from contextlib import contextmanager
import socket
import struct
import sys
import random
import argparse
import time
import re
import unittest
server_addr = None
call = None
args = None
class TimeoutError(Exception):
pass
@contextmanager
def tcp_connection(timeout=1):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout)
s.connect(server_addr)
def call(msg):
s.send(msg.encode())
return s.recv(16*1024)
yield call
s.close()
def slow(f):
def wrapper(self):
if args.fast:
raise unittest.SkipTest('Slow')
return f(self)
return wrapper
def recv_all(s):
m = b''
while True:
data = s.recv(1024)
if not data:
break
m += data
return m
def tcp_call(msg, timeout=1):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout)
s.connect(server_addr)
s.send(msg.encode())
s.shutdown(socket.SHUT_WR)
data = recv_all(s)
s.close()
return data
def udp_call_for_fragments(msg, timeout=1):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(timeout)
this_req_id = random.randint(-32768, 32767)
datagram = struct.pack(">hhhh", this_req_id, 0, 1, 0) + msg.encode()
sock.sendto(datagram, server_addr)
messages = {}
n_determined = None
while True:
data, addr = sock.recvfrom(1500)
req_id, seq, n, res = struct.unpack_from(">hhhh", data)
content = data[8:]
if n_determined and n_determined != n:
raise Exception('Inconsitent number of total messages, %d and %d' % (n_determined, n))
n_determined = n
if req_id != this_req_id:
raise Exception('Invalid request id: ' + req_id + ', expected ' + this_req_id)
if seq in messages:
raise Exception('Duplicate message for seq=' + seq)
messages[seq] = content
if len(messages) == n:
break
for k, v in sorted(messages.items(), key=lambda e: e[0]):
yield v
sock.close()
def udp_call(msg, **kwargs):
return b''.join(udp_call_for_fragments(msg, **kwargs))
class MemcacheTest(unittest.TestCase):
def set(self, key, value, flags=0, expiry=0):
self.assertEqual(call('set %s %d %d %d\r\n%s\r\n' % (key, flags, expiry, len(value), value)), b'STORED\r\n')
def delete(self, key):
self.assertEqual(call('delete %s\r\n' % key), b'DELETED\r\n')
def assertHasKey(self, key):
resp = call('get %s\r\n' % key)
if not resp.startswith(('VALUE %s' % key).encode()):
self.fail('Key \'%s\' should be present, but got: %s' % (key, resp.decode()))
def assertNoKey(self, key):
resp = call('get %s\r\n' % key)
if resp != b'END\r\n':
self.fail('Key \'%s\' should not be present, but got: %s' % (key, resp.decode()))
def setKey(self, key):
self.set(key, 'some value')
def getItemVersion(self, key):
m = re.match(r'VALUE %s \d+ \d+ (?P<version>\d+)' % key, call('gets %s\r\n' % key).decode())
return int(m.group('version'))
def getStat(self, name, call_fn=None):
if not call_fn: call_fn = call
resp = call_fn('stats\r\n').decode()
m = re.search(r'STAT %s (?P<value>.+)' % re.escape(name), resp, re.MULTILINE)
return m.group('value')
def flush(self):
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
def tearDown(self):
self.flush()
class TcpSpecificTests(MemcacheTest):
def test_recovers_from_errors_in_the_stream(self):
with tcp_connection() as conn:
self.assertEqual(conn('get\r\n'), b'ERROR\r\n')
self.assertEqual(conn('get key\r\n'), b'END\r\n')
def test_incomplete_command_results_in_error(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(server_addr)
s.send(b'get')
s.shutdown(socket.SHUT_WR)
self.assertEqual(recv_all(s), b'ERROR\r\n')
s.close()
def test_stream_closed_results_in_error(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(server_addr)
s.shutdown(socket.SHUT_WR)
self.assertEqual(recv_all(s), b'')
s.close()
def test_unsuccesful_parsing_does_not_leave_data_behind(self):
with tcp_connection() as conn:
self.assertEqual(conn('set key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
self.assertRegexpMatches(conn('delete a b c\r\n'), b'^(CLIENT_)?ERROR.*\r\n$')
self.assertEqual(conn('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.assertEqual(conn('delete key\r\n'), b'DELETED\r\n')
def test_flush_all_no_reply(self):
self.assertEqual(call('flush_all noreply\r\n'), b'')
def test_set_no_reply(self):
self.assertEqual(call('set key 0 0 5 noreply\r\nhello\r\nget key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.delete('key')
def test_delete_no_reply(self):
self.setKey('key')
self.assertEqual(call('delete key noreply\r\nget key\r\n'), b'END\r\n')
def test_add_no_reply(self):
self.assertEqual(call('add key 0 0 1 noreply\r\na\r\nget key\r\n'), b'VALUE key 0 1\r\na\r\nEND\r\n')
self.delete('key')
def test_replace_no_reply(self):
self.assertEqual(call('set key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.assertEqual(call('replace key 0 0 1 noreply\r\nb\r\nget key\r\n'), b'VALUE key 0 1\r\nb\r\nEND\r\n')
self.delete('key')
def test_cas_noreply(self):
self.assertNoKey('key')
self.assertEqual(call('cas key 0 0 1 1 noreply\r\na\r\n'), b'')
self.assertNoKey('key')
self.assertEqual(call('add key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
version = self.getItemVersion('key')
self.assertEqual(call('cas key 1 0 5 %d noreply\r\naloha\r\n' % (version + 1)), b'')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.assertEqual(call('cas key 1 0 5 %d noreply\r\naloha\r\n' % (version)), b'')
self.assertEqual(call('get key\r\n'), b'VALUE key 1 5\r\naloha\r\nEND\r\n')
self.delete('key')
@slow
def test_connection_statistics(self):
with tcp_connection() as conn:
curr_connections = int(self.getStat('curr_connections', call_fn=conn))
total_connections = int(self.getStat('total_connections', call_fn=conn))
with tcp_connection() as conn2:
self.assertEquals(curr_connections + 1, int(self.getStat('curr_connections', call_fn=conn)))
self.assertEquals(total_connections + 1, int(self.getStat('total_connections', call_fn=conn)))
self.assertEquals(total_connections + 1, int(self.getStat('total_connections', call_fn=conn)))
time.sleep(0.1)
self.assertEquals(curr_connections, int(self.getStat('curr_connections', call_fn=conn)))
class UdpSpecificTests(MemcacheTest):
def test_large_response_is_split_into_mtu_chunks(self):
max_datagram_size = 1400
data = '1' * (max_datagram_size*3)
self.set('key', data)
chunks = list(udp_call_for_fragments('get key\r\n'))
for chunk in chunks:
self.assertLessEqual(len(chunk), max_datagram_size)
self.assertEqual(b''.join(chunks).decode(),
'VALUE key 0 %d\r\n%s\r\n' \
'END\r\n' % (len(data), data))
self.delete('key')
class TestCommands(MemcacheTest):
def test_basic_commands(self):
self.assertEqual(call('get key\r\n'), b'END\r\n')
self.assertEqual(call('set key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.assertEqual(call('delete key\r\n'), b'DELETED\r\n')
self.assertEqual(call('delete key\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('get key\r\n'), b'END\r\n')
def test_error_handling(self):
self.assertEqual(call('get\r\n'), b'ERROR\r\n')
@slow
def test_expiry(self):
self.assertEqual(call('set key 0 1 5\r\nhello\r\n'), b'STORED\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
time.sleep(1)
self.assertEqual(call('get key\r\n'), b'END\r\n')
@slow
def test_expiry_at_epoch_time(self):
expiry = int(time.time()) + 1
self.assertEqual(call('set key 0 %d 5\r\nhello\r\n' % expiry), b'STORED\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
time.sleep(2)
self.assertEqual(call('get key\r\n'), b'END\r\n')
def test_multiple_keys_in_get(self):
self.assertEqual(call('set key1 0 0 2\r\nv1\r\n'), b'STORED\r\n')
self.assertEqual(call('set key 0 0 2\r\nv2\r\n'), b'STORED\r\n')
resp = call('get key1 key\r\n')
self.assertRegexpMatches(resp, b'^(VALUE key1 0 2\r\nv1\r\nVALUE key 0 2\r\nv2\r\nEND\r\n)|(VALUE key 0 2\r\nv2\r\nVALUE key1 0 2\r\nv1\r\nEND\r\n)$')
self.delete("key")
self.delete("key1")
def test_flush_all(self):
self.set('key', 'value')
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
self.assertNoKey('key')
def test_keys_set_after_flush_remain(self):
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
self.setKey('key')
self.assertHasKey('key')
self.delete('key')
@slow
def test_flush_all_with_timeout_flushes_all_keys_even_those_set_after_flush(self):
self.setKey('key')
self.assertEqual(call('flush_all 2\r\n'), b'OK\r\n')
self.assertHasKey('key')
self.setKey('key2')
time.sleep(2)
self.assertNoKey('key')
self.assertNoKey('key2')
@slow
def test_subsequent_flush_is_merged(self):
self.setKey('key')
self.assertEqual(call('flush_all 2\r\n'), b'OK\r\n') # Can flush in anything between 1-2
self.assertEqual(call('flush_all 4\r\n'), b'OK\r\n') # Can flush in anything between 3-4
time.sleep(2)
self.assertHasKey('key')
self.setKey('key2')
time.sleep(4)
self.assertNoKey('key')
self.assertNoKey('key2')
@slow
def test_immediate_flush_cancels_delayed_flush(self):
self.assertEqual(call('flush_all 2\r\n'), b'OK\r\n')
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
self.setKey('key')
time.sleep(1)
self.assertHasKey('key')
self.delete('key')
@slow
def test_flushing_in_the_past(self):
self.setKey('key1')
time.sleep(1)
self.setKey('key2')
key2_time = int(time.time())
self.assertEqual(call('flush_all %d\r\n' % (key2_time - 1)), b'OK\r\n')
self.assertNoKey("key1")
self.assertNoKey("key2")
@slow
def test_memcache_does_not_crash_when_flushing_with_already_expred_items(self):
self.assertEqual(call('set key1 0 2 5\r\nhello\r\n'), b'STORED\r\n')
time.sleep(1)
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
def test_response_spanning_many_datagrams(self):
key1_data = '1' * 1000
key2_data = '2' * 1000
key3_data = '3' * 1000
self.set('key1', key1_data)
self.set('key2', key2_data)
self.set('key3', key3_data)
resp = call('get key1 key2 key3\r\n').decode()
pattern = '^VALUE (?P<v1>.*?\r\n.*?)\r\nVALUE (?P<v2>.*?\r\n.*?)\r\nVALUE (?P<v3>.*?\r\n.*?)\r\nEND\r\n$'
self.assertRegexpMatches(resp, pattern)
m = re.match(pattern, resp)
self.assertEqual(set([m.group('v1'), m.group('v2'), m.group('v3')]),
set(['key1 0 %d\r\n%s' % (len(key1_data), key1_data),
'key2 0 %d\r\n%s' % (len(key2_data), key2_data),
'key3 0 %d\r\n%s' % (len(key3_data), key3_data)]))
self.delete('key1')
self.delete('key2')
self.delete('key3')
def test_version(self):
self.assertRegexpMatches(call('version\r\n'), b'^VERSION .*\r\n$')
def test_add(self):
self.assertEqual(call('add key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.assertEqual(call('add key 0 0 1\r\na\r\n'), b'NOT_STORED\r\n')
self.delete('key')
def test_replace(self):
self.assertEqual(call('add key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.assertEqual(call('replace key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.delete('key')
self.assertEqual(call('replace key 0 0 1\r\na\r\n'), b'NOT_STORED\r\n')
def test_cas_and_gets(self):
self.assertEqual(call('cas key 0 0 1 1\r\na\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('add key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
version = self.getItemVersion('key')
self.assertEqual(call('set key 1 0 5\r\nhello\r\n'), b'STORED\r\n')
self.assertEqual(call('gets key\r\n').decode(), 'VALUE key 1 5 %d\r\nhello\r\nEND\r\n' % (version + 1))
self.assertEqual(call('cas key 0 0 5 %d\r\nhello\r\n' % (version)), b'EXISTS\r\n')
self.assertEqual(call('cas key 0 0 5 %d\r\naloha\r\n' % (version + 1)), b'STORED\r\n')
self.assertEqual(call('gets key\r\n').decode(), 'VALUE key 0 5 %d\r\naloha\r\nEND\r\n' % (version + 2))
self.delete('key')
def test_curr_items_stat(self):
self.assertEquals(0, int(self.getStat('curr_items')))
self.setKey('key')
self.assertEquals(1, int(self.getStat('curr_items')))
self.delete('key')
self.assertEquals(0, int(self.getStat('curr_items')))
def test_how_stats_change_with_different_commands(self):
get_count = int(self.getStat('cmd_get'))
set_count = int(self.getStat('cmd_set'))
flush_count = int(self.getStat('cmd_flush'))
total_items = int(self.getStat('total_items'))
get_misses = int(self.getStat('get_misses'))
get_hits = int(self.getStat('get_hits'))
cas_hits = int(self.getStat('cas_hits'))
cas_badval = int(self.getStat('cas_badval'))
cas_misses = int(self.getStat('cas_misses'))
delete_misses = int(self.getStat('delete_misses'))
delete_hits = int(self.getStat('delete_hits'))
curr_connections = int(self.getStat('curr_connections'))
incr_hits = int(self.getStat('incr_hits'))
incr_misses = int(self.getStat('incr_misses'))
decr_hits = int(self.getStat('decr_hits'))
decr_misses = int(self.getStat('decr_misses'))
call('get key\r\n')
get_count += 1
get_misses += 1
call('gets key\r\n')
get_count += 1
get_misses += 1
call('set key1 0 0 1\r\na\r\n')
set_count += 1
total_items += 1
call('get key1\r\n')
get_count += 1
get_hits += 1
call('add key1 0 0 1\r\na\r\n')
set_count += 1
call('add key2 0 0 1\r\na\r\n')
set_count += 1
total_items += 1
call('replace key1 0 0 1\r\na\r\n')
set_count += 1
total_items += 1
call('replace key3 0 0 1\r\na\r\n')
set_count += 1
call('cas key4 0 0 1 1\r\na\r\n')
set_count += 1
cas_misses += 1
call('cas key1 0 0 1 %d\r\na\r\n' % self.getItemVersion('key1'))
set_count += 1
get_count += 1
get_hits += 1
cas_hits += 1
total_items += 1
call('cas key1 0 0 1 %d\r\na\r\n' % (self.getItemVersion('key1') + 1))
set_count += 1
get_count += 1
get_hits += 1
cas_badval += 1
call('delete key1\r\n')
delete_hits += 1
call('delete key1\r\n')
delete_misses += 1
call('incr num 1\r\n')
incr_misses += 1
call('decr num 1\r\n')
decr_misses += 1
call('set num 0 0 1\r\n0\r\n')
set_count += 1
total_items += 1
call('incr num 1\r\n')
incr_hits += 1
call('decr num 1\r\n')
decr_hits += 1
self.flush()
flush_count += 1
self.assertEquals(get_count, int(self.getStat('cmd_get')))
self.assertEquals(set_count, int(self.getStat('cmd_set')))
self.assertEquals(flush_count, int(self.getStat('cmd_flush')))
self.assertEquals(total_items, int(self.getStat('total_items')))
self.assertEquals(get_hits, int(self.getStat('get_hits')))
self.assertEquals(get_misses, int(self.getStat('get_misses')))
self.assertEquals(cas_misses, int(self.getStat('cas_misses')))
self.assertEquals(cas_hits, int(self.getStat('cas_hits')))
self.assertEquals(cas_badval, int(self.getStat('cas_badval')))
self.assertEquals(delete_misses, int(self.getStat('delete_misses')))
self.assertEquals(delete_hits, int(self.getStat('delete_hits')))
self.assertEquals(0, int(self.getStat('curr_items')))
self.assertEquals(curr_connections, int(self.getStat('curr_connections')))
self.assertEquals(incr_misses, int(self.getStat('incr_misses')))
self.assertEquals(incr_hits, int(self.getStat('incr_hits')))
self.assertEquals(decr_misses, int(self.getStat('decr_misses')))
self.assertEquals(decr_hits, int(self.getStat('decr_hits')))
def test_incr(self):
self.assertEqual(call('incr key 0\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('set key 0 0 1\r\n0\r\n'), b'STORED\r\n')
self.assertEqual(call('incr key 0\r\n'), b'0\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 1\r\n0\r\nEND\r\n')
self.assertEqual(call('incr key 1\r\n'), b'1\r\n')
self.assertEqual(call('incr key 2\r\n'), b'3\r\n')
self.assertEqual(call('incr key %d\r\n' % (pow(2, 64) - 1)), b'2\r\n')
self.assertEqual(call('incr key %d\r\n' % (pow(2, 64) - 3)), b'18446744073709551615\r\n')
self.assertRegexpMatches(call('incr key 1\r\n').decode(), r'0(\w+)?\r\n')
self.assertEqual(call('set key 0 0 2\r\n1 \r\n'), b'STORED\r\n')
self.assertEqual(call('incr key 1\r\n'), b'2\r\n')
self.assertEqual(call('set key 0 0 2\r\n09\r\n'), b'STORED\r\n')
self.assertEqual(call('incr key 1\r\n'), b'10\r\n')
def test_decr(self):
self.assertEqual(call('decr key 0\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('set key 0 0 1\r\n7\r\n'), b'STORED\r\n')
self.assertEqual(call('decr key 1\r\n'), b'6\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 1\r\n6\r\nEND\r\n')
self.assertEqual(call('decr key 6\r\n'), b'0\r\n')
self.assertEqual(call('decr key 2\r\n'), b'0\r\n')
self.assertEqual(call('set key 0 0 2\r\n20\r\n'), b'STORED\r\n')
self.assertRegexpMatches(call('decr key 11\r\n').decode(), r'^9( )?\r\n$')
self.assertEqual(call('set key 0 0 3\r\n100\r\n'), b'STORED\r\n')
self.assertRegexpMatches(call('decr key 91\r\n').decode(), r'^9( )?\r\n$')
self.assertEqual(call('set key 0 0 2\r\n1 \r\n'), b'STORED\r\n')
self.assertEqual(call('decr key 1\r\n'), b'0\r\n')
self.assertEqual(call('set key 0 0 2\r\n09\r\n'), b'STORED\r\n')
self.assertEqual(call('decr key 1\r\n'), b'8\r\n')
def test_incr_and_decr_on_invalid_input(self):
error_msg = b'CLIENT_ERROR cannot increment or decrement non-numeric value\r\n'
for cmd in ['incr', 'decr']:
for value in ['', '-1', 'a', '0x1', '18446744073709551616']:
self.assertEqual(call('set key 0 0 %d\r\n%s\r\n' % (len(value), value)), b'STORED\r\n')
prev = call('get key\r\n')
self.assertEqual(call(cmd + ' key 1\r\n'), error_msg, "cmd=%s, value=%s" % (cmd, value))
self.assertEqual(call('get key\r\n'), prev)
self.delete('key')
def wait_for_memcache_tcp(timeout=4):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
timeout_at = time.time() + timeout
while True:
if time.time() >= timeout_at:
raise TimeoutError()
try:
s.connect(server_addr)
s.close()
break
except ConnectionRefusedError:
time.sleep(0.1)
def wait_for_memcache_udp(timeout=4):
timeout_at = time.time() + timeout
while True:
if time.time() >= timeout_at:
raise TimeoutError()
try:
udp_call('version\r\n', timeout=0.2)
break
except socket.timeout:
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="memcache protocol tests")
parser.add_argument('--server', '-s', action="store", help="server adddress in <host>:<port> format", default="localhost:11211")
parser.add_argument('--udp', '-U', action="store_true", help="Use UDP protocol")
parser.add_argument('--fast', action="store_true", help="Run only fast tests")
args = parser.parse_args()
host, port = args.server.split(':')
server_addr = (host, int(port))
if args.udp:
call = udp_call
wait_for_memcache_udp()
else:
call = tcp_call
wait_for_memcache_tcp()
runner = unittest.TextTestRunner()
loader = unittest.TestLoader()
suite = unittest.TestSuite()
suite.addTest(loader.loadTestsFromTestCase(TestCommands))
if args.udp:
suite.addTest(loader.loadTestsFromTestCase(UdpSpecificTests))
else:
suite.addTest(loader.loadTestsFromTestCase(TcpSpecificTests))
result = runner.run(suite)
if not result.wasSuccessful():
sys.exit(1)
| apache-2.0 |
amilaperera/google-diff-match-patch | python2/diff_match_patch_test.py | 319 | 41744 | #!/usr/bin/python2.4
"""Test harness for diff_match_patch.py
Copyright 2006 Google Inc.
http://code.google.com/p/google-diff-match-patch/
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import time
import unittest
import diff_match_patch as dmp_module
# Force a module reload. Allows one to edit the DMP module and rerun the tests
# without leaving the Python interpreter.
reload(dmp_module)
class DiffMatchPatchTest(unittest.TestCase):
def setUp(self):
"Test harness for dmp_module."
self.dmp = dmp_module.diff_match_patch()
def diff_rebuildtexts(self, diffs):
# Construct the two texts which made up the diff originally.
text1 = ""
text2 = ""
for x in range(0, len(diffs)):
if diffs[x][0] != dmp_module.diff_match_patch.DIFF_INSERT:
text1 += diffs[x][1]
if diffs[x][0] != dmp_module.diff_match_patch.DIFF_DELETE:
text2 += diffs[x][1]
return (text1, text2)
class DiffTest(DiffMatchPatchTest):
"""DIFF TEST FUNCTIONS"""
def testDiffCommonPrefix(self):
# Detect any common prefix.
# Null case.
self.assertEquals(0, self.dmp.diff_commonPrefix("abc", "xyz"))
# Non-null case.
self.assertEquals(4, self.dmp.diff_commonPrefix("1234abcdef", "1234xyz"))
# Whole case.
self.assertEquals(4, self.dmp.diff_commonPrefix("1234", "1234xyz"))
def testDiffCommonSuffix(self):
# Detect any common suffix.
# Null case.
self.assertEquals(0, self.dmp.diff_commonSuffix("abc", "xyz"))
# Non-null case.
self.assertEquals(4, self.dmp.diff_commonSuffix("abcdef1234", "xyz1234"))
# Whole case.
self.assertEquals(4, self.dmp.diff_commonSuffix("1234", "xyz1234"))
def testDiffCommonOverlap(self):
# Null case.
self.assertEquals(0, self.dmp.diff_commonOverlap("", "abcd"))
# Whole case.
self.assertEquals(3, self.dmp.diff_commonOverlap("abc", "abcd"))
# No overlap.
self.assertEquals(0, self.dmp.diff_commonOverlap("123456", "abcd"))
# Overlap.
self.assertEquals(3, self.dmp.diff_commonOverlap("123456xxx", "xxxabcd"))
# Unicode.
# Some overly clever languages (C#) may treat ligatures as equal to their
# component letters. E.g. U+FB01 == 'fi'
self.assertEquals(0, self.dmp.diff_commonOverlap("fi", u"\ufb01i"))
def testDiffHalfMatch(self):
# Detect a halfmatch.
self.dmp.Diff_Timeout = 1
# No match.
self.assertEquals(None, self.dmp.diff_halfMatch("1234567890", "abcdef"))
self.assertEquals(None, self.dmp.diff_halfMatch("12345", "23"))
# Single Match.
self.assertEquals(("12", "90", "a", "z", "345678"), self.dmp.diff_halfMatch("1234567890", "a345678z"))
self.assertEquals(("a", "z", "12", "90", "345678"), self.dmp.diff_halfMatch("a345678z", "1234567890"))
self.assertEquals(("abc", "z", "1234", "0", "56789"), self.dmp.diff_halfMatch("abc56789z", "1234567890"))
self.assertEquals(("a", "xyz", "1", "7890", "23456"), self.dmp.diff_halfMatch("a23456xyz", "1234567890"))
# Multiple Matches.
self.assertEquals(("12123", "123121", "a", "z", "1234123451234"), self.dmp.diff_halfMatch("121231234123451234123121", "a1234123451234z"))
self.assertEquals(("", "-=-=-=-=-=", "x", "", "x-=-=-=-=-=-=-="), self.dmp.diff_halfMatch("x-=-=-=-=-=-=-=-=-=-=-=-=", "xx-=-=-=-=-=-=-="))
self.assertEquals(("-=-=-=-=-=", "", "", "y", "-=-=-=-=-=-=-=y"), self.dmp.diff_halfMatch("-=-=-=-=-=-=-=-=-=-=-=-=y", "-=-=-=-=-=-=-=yy"))
# Non-optimal halfmatch.
# Optimal diff would be -q+x=H-i+e=lloHe+Hu=llo-Hew+y not -qHillo+x=HelloHe-w+Hulloy
self.assertEquals(("qHillo", "w", "x", "Hulloy", "HelloHe"), self.dmp.diff_halfMatch("qHilloHelloHew", "xHelloHeHulloy"))
# Optimal no halfmatch.
self.dmp.Diff_Timeout = 0
self.assertEquals(None, self.dmp.diff_halfMatch("qHilloHelloHew", "xHelloHeHulloy"))
def testDiffLinesToChars(self):
# Convert lines down to characters.
self.assertEquals(("\x01\x02\x01", "\x02\x01\x02", ["", "alpha\n", "beta\n"]), self.dmp.diff_linesToChars("alpha\nbeta\nalpha\n", "beta\nalpha\nbeta\n"))
self.assertEquals(("", "\x01\x02\x03\x03", ["", "alpha\r\n", "beta\r\n", "\r\n"]), self.dmp.diff_linesToChars("", "alpha\r\nbeta\r\n\r\n\r\n"))
self.assertEquals(("\x01", "\x02", ["", "a", "b"]), self.dmp.diff_linesToChars("a", "b"))
# More than 256 to reveal any 8-bit limitations.
n = 300
lineList = []
charList = []
for x in range(1, n + 1):
lineList.append(str(x) + "\n")
charList.append(unichr(x))
self.assertEquals(n, len(lineList))
lines = "".join(lineList)
chars = "".join(charList)
self.assertEquals(n, len(chars))
lineList.insert(0, "")
self.assertEquals((chars, "", lineList), self.dmp.diff_linesToChars(lines, ""))
def testDiffCharsToLines(self):
# Convert chars up to lines.
diffs = [(self.dmp.DIFF_EQUAL, "\x01\x02\x01"), (self.dmp.DIFF_INSERT, "\x02\x01\x02")]
self.dmp.diff_charsToLines(diffs, ["", "alpha\n", "beta\n"])
self.assertEquals([(self.dmp.DIFF_EQUAL, "alpha\nbeta\nalpha\n"), (self.dmp.DIFF_INSERT, "beta\nalpha\nbeta\n")], diffs)
# More than 256 to reveal any 8-bit limitations.
n = 300
lineList = []
charList = []
for x in range(1, n + 1):
lineList.append(str(x) + "\n")
charList.append(unichr(x))
self.assertEquals(n, len(lineList))
lines = "".join(lineList)
chars = "".join(charList)
self.assertEquals(n, len(chars))
lineList.insert(0, "")
diffs = [(self.dmp.DIFF_DELETE, chars)]
self.dmp.diff_charsToLines(diffs, lineList)
self.assertEquals([(self.dmp.DIFF_DELETE, lines)], diffs)
def testDiffCleanupMerge(self):
# Cleanup a messy diff.
# Null case.
diffs = []
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([], diffs)
# No change case.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_INSERT, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_INSERT, "c")], diffs)
# Merge equalities.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_EQUAL, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "abc")], diffs)
# Merge deletions.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_DELETE, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc")], diffs)
# Merge insertions.
diffs = [(self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_INSERT, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_INSERT, "abc")], diffs)
# Merge interweave.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_DELETE, "c"), (self.dmp.DIFF_INSERT, "d"), (self.dmp.DIFF_EQUAL, "e"), (self.dmp.DIFF_EQUAL, "f")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "ac"), (self.dmp.DIFF_INSERT, "bd"), (self.dmp.DIFF_EQUAL, "ef")], diffs)
# Prefix and suffix detection.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "abc"), (self.dmp.DIFF_DELETE, "dc")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "d"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_EQUAL, "c")], diffs)
# Prefix and suffix detection with equalities.
diffs = [(self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "abc"), (self.dmp.DIFF_DELETE, "dc"), (self.dmp.DIFF_EQUAL, "y")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "xa"), (self.dmp.DIFF_DELETE, "d"), (self.dmp.DIFF_INSERT, "b"), (self.dmp.DIFF_EQUAL, "cy")], diffs)
# Slide edit left.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_INSERT, "ba"), (self.dmp.DIFF_EQUAL, "c")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_INSERT, "ab"), (self.dmp.DIFF_EQUAL, "ac")], diffs)
# Slide edit right.
diffs = [(self.dmp.DIFF_EQUAL, "c"), (self.dmp.DIFF_INSERT, "ab"), (self.dmp.DIFF_EQUAL, "a")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "ca"), (self.dmp.DIFF_INSERT, "ba")], diffs)
# Slide edit left recursive.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_EQUAL, "c"), (self.dmp.DIFF_DELETE, "ac"), (self.dmp.DIFF_EQUAL, "x")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_EQUAL, "acx")], diffs)
# Slide edit right recursive.
diffs = [(self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "ca"), (self.dmp.DIFF_EQUAL, "c"), (self.dmp.DIFF_DELETE, "b"), (self.dmp.DIFF_EQUAL, "a")]
self.dmp.diff_cleanupMerge(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "xca"), (self.dmp.DIFF_DELETE, "cba")], diffs)
def testDiffCleanupSemanticLossless(self):
# Slide diffs to match logical boundaries.
# Null case.
diffs = []
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([], diffs)
# Blank lines.
diffs = [(self.dmp.DIFF_EQUAL, "AAA\r\n\r\nBBB"), (self.dmp.DIFF_INSERT, "\r\nDDD\r\n\r\nBBB"), (self.dmp.DIFF_EQUAL, "\r\nEEE")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "AAA\r\n\r\n"), (self.dmp.DIFF_INSERT, "BBB\r\nDDD\r\n\r\n"), (self.dmp.DIFF_EQUAL, "BBB\r\nEEE")], diffs)
# Line boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "AAA\r\nBBB"), (self.dmp.DIFF_INSERT, " DDD\r\nBBB"), (self.dmp.DIFF_EQUAL, " EEE")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "AAA\r\n"), (self.dmp.DIFF_INSERT, "BBB DDD\r\n"), (self.dmp.DIFF_EQUAL, "BBB EEE")], diffs)
# Word boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The c"), (self.dmp.DIFF_INSERT, "ow and the c"), (self.dmp.DIFF_EQUAL, "at.")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The "), (self.dmp.DIFF_INSERT, "cow and the "), (self.dmp.DIFF_EQUAL, "cat.")], diffs)
# Alphanumeric boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The-c"), (self.dmp.DIFF_INSERT, "ow-and-the-c"), (self.dmp.DIFF_EQUAL, "at.")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The-"), (self.dmp.DIFF_INSERT, "cow-and-the-"), (self.dmp.DIFF_EQUAL, "cat.")], diffs)
# Hitting the start.
diffs = [(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "ax")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "aax")], diffs)
# Hitting the end.
diffs = [(self.dmp.DIFF_EQUAL, "xa"), (self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "a")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "xaa"), (self.dmp.DIFF_DELETE, "a")], diffs)
# Sentence boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The xxx. The "), (self.dmp.DIFF_INSERT, "zzz. The "), (self.dmp.DIFF_EQUAL, "yyy.")]
self.dmp.diff_cleanupSemanticLossless(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The xxx."), (self.dmp.DIFF_INSERT, " The zzz."), (self.dmp.DIFF_EQUAL, " The yyy.")], diffs)
def testDiffCleanupSemantic(self):
# Cleanup semantically trivial equalities.
# Null case.
diffs = []
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([], diffs)
# No elimination #1.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "cd"), (self.dmp.DIFF_EQUAL, "12"), (self.dmp.DIFF_DELETE, "e")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "cd"), (self.dmp.DIFF_EQUAL, "12"), (self.dmp.DIFF_DELETE, "e")], diffs)
# No elimination #2.
diffs = [(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "ABC"), (self.dmp.DIFF_EQUAL, "1234"), (self.dmp.DIFF_DELETE, "wxyz")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "ABC"), (self.dmp.DIFF_EQUAL, "1234"), (self.dmp.DIFF_DELETE, "wxyz")], diffs)
# Simple elimination.
diffs = [(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_DELETE, "c")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "b")], diffs)
# Backpass elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_EQUAL, "cd"), (self.dmp.DIFF_DELETE, "e"), (self.dmp.DIFF_EQUAL, "f"), (self.dmp.DIFF_INSERT, "g")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abcdef"), (self.dmp.DIFF_INSERT, "cdfg")], diffs)
# Multiple eliminations.
diffs = [(self.dmp.DIFF_INSERT, "1"), (self.dmp.DIFF_EQUAL, "A"), (self.dmp.DIFF_DELETE, "B"), (self.dmp.DIFF_INSERT, "2"), (self.dmp.DIFF_EQUAL, "_"), (self.dmp.DIFF_INSERT, "1"), (self.dmp.DIFF_EQUAL, "A"), (self.dmp.DIFF_DELETE, "B"), (self.dmp.DIFF_INSERT, "2")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "AB_AB"), (self.dmp.DIFF_INSERT, "1A2_1A2")], diffs)
# Word boundaries.
diffs = [(self.dmp.DIFF_EQUAL, "The c"), (self.dmp.DIFF_DELETE, "ow and the c"), (self.dmp.DIFF_EQUAL, "at.")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_EQUAL, "The "), (self.dmp.DIFF_DELETE, "cow and the "), (self.dmp.DIFF_EQUAL, "cat.")], diffs)
# No overlap elimination.
diffs = [(self.dmp.DIFF_DELETE, "abcxx"), (self.dmp.DIFF_INSERT, "xxdef")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abcxx"), (self.dmp.DIFF_INSERT, "xxdef")], diffs)
# Overlap elimination.
diffs = [(self.dmp.DIFF_DELETE, "abcxxx"), (self.dmp.DIFF_INSERT, "xxxdef")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_EQUAL, "xxx"), (self.dmp.DIFF_INSERT, "def")], diffs)
# Reverse overlap elimination.
diffs = [(self.dmp.DIFF_DELETE, "xxxabc"), (self.dmp.DIFF_INSERT, "defxxx")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_INSERT, "def"), (self.dmp.DIFF_EQUAL, "xxx"), (self.dmp.DIFF_DELETE, "abc")], diffs)
# Two overlap eliminations.
diffs = [(self.dmp.DIFF_DELETE, "abcd1212"), (self.dmp.DIFF_INSERT, "1212efghi"), (self.dmp.DIFF_EQUAL, "----"), (self.dmp.DIFF_DELETE, "A3"), (self.dmp.DIFF_INSERT, "3BC")]
self.dmp.diff_cleanupSemantic(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abcd"), (self.dmp.DIFF_EQUAL, "1212"), (self.dmp.DIFF_INSERT, "efghi"), (self.dmp.DIFF_EQUAL, "----"), (self.dmp.DIFF_DELETE, "A"), (self.dmp.DIFF_EQUAL, "3"), (self.dmp.DIFF_INSERT, "BC")], diffs)
def testDiffCleanupEfficiency(self):
# Cleanup operationally trivial equalities.
self.dmp.Diff_EditCost = 4
# Null case.
diffs = []
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([], diffs)
# No elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "wxyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "wxyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")], diffs)
# Four-edit elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "xyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abxyzcd"), (self.dmp.DIFF_INSERT, "12xyz34")], diffs)
# Three-edit elimination.
diffs = [(self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "xcd"), (self.dmp.DIFF_INSERT, "12x34")], diffs)
# Backpass elimination.
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "xy"), (self.dmp.DIFF_INSERT, "34"), (self.dmp.DIFF_EQUAL, "z"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "56")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abxyzcd"), (self.dmp.DIFF_INSERT, "12xy34z56")], diffs)
# High cost elimination.
self.dmp.Diff_EditCost = 5
diffs = [(self.dmp.DIFF_DELETE, "ab"), (self.dmp.DIFF_INSERT, "12"), (self.dmp.DIFF_EQUAL, "wxyz"), (self.dmp.DIFF_DELETE, "cd"), (self.dmp.DIFF_INSERT, "34")]
self.dmp.diff_cleanupEfficiency(diffs)
self.assertEquals([(self.dmp.DIFF_DELETE, "abwxyzcd"), (self.dmp.DIFF_INSERT, "12wxyz34")], diffs)
self.dmp.Diff_EditCost = 4
def testDiffPrettyHtml(self):
# Pretty print.
diffs = [(self.dmp.DIFF_EQUAL, "a\n"), (self.dmp.DIFF_DELETE, "<B>b</B>"), (self.dmp.DIFF_INSERT, "c&d")]
self.assertEquals("<span>a¶<br></span><del style=\"background:#ffe6e6;\"><B>b</B></del><ins style=\"background:#e6ffe6;\">c&d</ins>", self.dmp.diff_prettyHtml(diffs))
def testDiffText(self):
# Compute the source and destination texts.
diffs = [(self.dmp.DIFF_EQUAL, "jump"), (self.dmp.DIFF_DELETE, "s"), (self.dmp.DIFF_INSERT, "ed"), (self.dmp.DIFF_EQUAL, " over "), (self.dmp.DIFF_DELETE, "the"), (self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_EQUAL, " lazy")]
self.assertEquals("jumps over the lazy", self.dmp.diff_text1(diffs))
self.assertEquals("jumped over a lazy", self.dmp.diff_text2(diffs))
def testDiffDelta(self):
# Convert a diff into delta string.
diffs = [(self.dmp.DIFF_EQUAL, "jump"), (self.dmp.DIFF_DELETE, "s"), (self.dmp.DIFF_INSERT, "ed"), (self.dmp.DIFF_EQUAL, " over "), (self.dmp.DIFF_DELETE, "the"), (self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_EQUAL, " lazy"), (self.dmp.DIFF_INSERT, "old dog")]
text1 = self.dmp.diff_text1(diffs)
self.assertEquals("jumps over the lazy", text1)
delta = self.dmp.diff_toDelta(diffs)
self.assertEquals("=4\t-1\t+ed\t=6\t-3\t+a\t=5\t+old dog", delta)
# Convert delta string into a diff.
self.assertEquals(diffs, self.dmp.diff_fromDelta(text1, delta))
# Generates error (19 != 20).
try:
self.dmp.diff_fromDelta(text1 + "x", delta)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
# Generates error (19 != 18).
try:
self.dmp.diff_fromDelta(text1[1:], delta)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
# Generates error (%c3%xy invalid Unicode).
try:
self.dmp.diff_fromDelta("", "+%c3xy")
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
# Test deltas with special characters.
diffs = [(self.dmp.DIFF_EQUAL, u"\u0680 \x00 \t %"), (self.dmp.DIFF_DELETE, u"\u0681 \x01 \n ^"), (self.dmp.DIFF_INSERT, u"\u0682 \x02 \\ |")]
text1 = self.dmp.diff_text1(diffs)
self.assertEquals(u"\u0680 \x00 \t %\u0681 \x01 \n ^", text1)
delta = self.dmp.diff_toDelta(diffs)
self.assertEquals("=7\t-7\t+%DA%82 %02 %5C %7C", delta)
# Convert delta string into a diff.
self.assertEquals(diffs, self.dmp.diff_fromDelta(text1, delta))
# Verify pool of unchanged characters.
diffs = [(self.dmp.DIFF_INSERT, "A-Z a-z 0-9 - _ . ! ~ * ' ( ) ; / ? : @ & = + $ , # ")]
text2 = self.dmp.diff_text2(diffs)
self.assertEquals("A-Z a-z 0-9 - _ . ! ~ * \' ( ) ; / ? : @ & = + $ , # ", text2)
delta = self.dmp.diff_toDelta(diffs)
self.assertEquals("+A-Z a-z 0-9 - _ . ! ~ * \' ( ) ; / ? : @ & = + $ , # ", delta)
# Convert delta string into a diff.
self.assertEquals(diffs, self.dmp.diff_fromDelta("", delta))
def testDiffXIndex(self):
# Translate a location in text1 to text2.
self.assertEquals(5, self.dmp.diff_xIndex([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "1234"), (self.dmp.DIFF_EQUAL, "xyz")], 2))
# Translation on deletion.
self.assertEquals(1, self.dmp.diff_xIndex([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "1234"), (self.dmp.DIFF_EQUAL, "xyz")], 3))
def testDiffLevenshtein(self):
# Levenshtein with trailing equality.
self.assertEquals(4, self.dmp.diff_levenshtein([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "1234"), (self.dmp.DIFF_EQUAL, "xyz")]))
# Levenshtein with leading equality.
self.assertEquals(4, self.dmp.diff_levenshtein([(self.dmp.DIFF_EQUAL, "xyz"), (self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_INSERT, "1234")]))
# Levenshtein with middle equality.
self.assertEquals(7, self.dmp.diff_levenshtein([(self.dmp.DIFF_DELETE, "abc"), (self.dmp.DIFF_EQUAL, "xyz"), (self.dmp.DIFF_INSERT, "1234")]))
def testDiffBisect(self):
# Normal.
a = "cat"
b = "map"
# Since the resulting diff hasn't been normalized, it would be ok if
# the insertion and deletion pairs are swapped.
# If the order changes, tweak this test as required.
self.assertEquals([(self.dmp.DIFF_DELETE, "c"), (self.dmp.DIFF_INSERT, "m"), (self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "t"), (self.dmp.DIFF_INSERT, "p")], self.dmp.diff_bisect(a, b, sys.maxint))
# Timeout.
self.assertEquals([(self.dmp.DIFF_DELETE, "cat"), (self.dmp.DIFF_INSERT, "map")], self.dmp.diff_bisect(a, b, 0))
def testDiffMain(self):
# Perform a trivial diff.
# Null case.
self.assertEquals([], self.dmp.diff_main("", "", False))
# Equality.
self.assertEquals([(self.dmp.DIFF_EQUAL, "abc")], self.dmp.diff_main("abc", "abc", False))
# Simple insertion.
self.assertEquals([(self.dmp.DIFF_EQUAL, "ab"), (self.dmp.DIFF_INSERT, "123"), (self.dmp.DIFF_EQUAL, "c")], self.dmp.diff_main("abc", "ab123c", False))
# Simple deletion.
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "123"), (self.dmp.DIFF_EQUAL, "bc")], self.dmp.diff_main("a123bc", "abc", False))
# Two insertions.
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_INSERT, "123"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_INSERT, "456"), (self.dmp.DIFF_EQUAL, "c")], self.dmp.diff_main("abc", "a123b456c", False))
# Two deletions.
self.assertEquals([(self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "123"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_DELETE, "456"), (self.dmp.DIFF_EQUAL, "c")], self.dmp.diff_main("a123b456c", "abc", False))
# Perform a real diff.
# Switch off the timeout.
self.dmp.Diff_Timeout = 0
# Simple cases.
self.assertEquals([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, "b")], self.dmp.diff_main("a", "b", False))
self.assertEquals([(self.dmp.DIFF_DELETE, "Apple"), (self.dmp.DIFF_INSERT, "Banana"), (self.dmp.DIFF_EQUAL, "s are a"), (self.dmp.DIFF_INSERT, "lso"), (self.dmp.DIFF_EQUAL, " fruit.")], self.dmp.diff_main("Apples are a fruit.", "Bananas are also fruit.", False))
self.assertEquals([(self.dmp.DIFF_DELETE, "a"), (self.dmp.DIFF_INSERT, u"\u0680"), (self.dmp.DIFF_EQUAL, "x"), (self.dmp.DIFF_DELETE, "\t"), (self.dmp.DIFF_INSERT, "\x00")], self.dmp.diff_main("ax\t", u"\u0680x\x00", False))
# Overlaps.
self.assertEquals([(self.dmp.DIFF_DELETE, "1"), (self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "y"), (self.dmp.DIFF_EQUAL, "b"), (self.dmp.DIFF_DELETE, "2"), (self.dmp.DIFF_INSERT, "xab")], self.dmp.diff_main("1ayb2", "abxab", False))
self.assertEquals([(self.dmp.DIFF_INSERT, "xaxcx"), (self.dmp.DIFF_EQUAL, "abc"), (self.dmp.DIFF_DELETE, "y")], self.dmp.diff_main("abcy", "xaxcxabc", False))
self.assertEquals([(self.dmp.DIFF_DELETE, "ABCD"), (self.dmp.DIFF_EQUAL, "a"), (self.dmp.DIFF_DELETE, "="), (self.dmp.DIFF_INSERT, "-"), (self.dmp.DIFF_EQUAL, "bcd"), (self.dmp.DIFF_DELETE, "="), (self.dmp.DIFF_INSERT, "-"), (self.dmp.DIFF_EQUAL, "efghijklmnopqrs"), (self.dmp.DIFF_DELETE, "EFGHIJKLMNOefg")], self.dmp.diff_main("ABCDa=bcd=efghijklmnopqrsEFGHIJKLMNOefg", "a-bcd-efghijklmnopqrs", False))
# Large equality.
self.assertEquals([(self.dmp.DIFF_INSERT, " "), (self.dmp.DIFF_EQUAL,"a"), (self.dmp.DIFF_INSERT,"nd"), (self.dmp.DIFF_EQUAL," [[Pennsylvania]]"), (self.dmp.DIFF_DELETE," and [[New")], self.dmp.diff_main("a [[Pennsylvania]] and [[New", " and [[Pennsylvania]]", False))
# Timeout.
self.dmp.Diff_Timeout = 0.1 # 100ms
a = "`Twas brillig, and the slithy toves\nDid gyre and gimble in the wabe:\nAll mimsy were the borogoves,\nAnd the mome raths outgrabe.\n"
b = "I am the very model of a modern major general,\nI've information vegetable, animal, and mineral,\nI know the kings of England, and I quote the fights historical,\nFrom Marathon to Waterloo, in order categorical.\n"
# Increase the text lengths by 1024 times to ensure a timeout.
for x in range(10):
a = a + a
b = b + b
startTime = time.time()
self.dmp.diff_main(a, b)
endTime = time.time()
# Test that we took at least the timeout period.
self.assertTrue(self.dmp.Diff_Timeout <= endTime - startTime)
# Test that we didn't take forever (be forgiving).
# Theoretically this test could fail very occasionally if the
# OS task swaps or locks up for a second at the wrong moment.
self.assertTrue(self.dmp.Diff_Timeout * 2 > endTime - startTime)
self.dmp.Diff_Timeout = 0
# Test the linemode speedup.
# Must be long to pass the 100 char cutoff.
# Simple line-mode.
a = "1234567890\n" * 13
b = "abcdefghij\n" * 13
self.assertEquals(self.dmp.diff_main(a, b, False), self.dmp.diff_main(a, b, True))
# Single line-mode.
a = "1234567890" * 13
b = "abcdefghij" * 13
self.assertEquals(self.dmp.diff_main(a, b, False), self.dmp.diff_main(a, b, True))
# Overlap line-mode.
a = "1234567890\n" * 13
b = "abcdefghij\n1234567890\n1234567890\n1234567890\nabcdefghij\n1234567890\n1234567890\n1234567890\nabcdefghij\n1234567890\n1234567890\n1234567890\nabcdefghij\n"
texts_linemode = self.diff_rebuildtexts(self.dmp.diff_main(a, b, True))
texts_textmode = self.diff_rebuildtexts(self.dmp.diff_main(a, b, False))
self.assertEquals(texts_textmode, texts_linemode)
# Test null inputs.
try:
self.dmp.diff_main(None, None)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
class MatchTest(DiffMatchPatchTest):
"""MATCH TEST FUNCTIONS"""
def testMatchAlphabet(self):
# Initialise the bitmasks for Bitap.
self.assertEquals({"a":4, "b":2, "c":1}, self.dmp.match_alphabet("abc"))
self.assertEquals({"a":37, "b":18, "c":8}, self.dmp.match_alphabet("abcaba"))
def testMatchBitap(self):
self.dmp.Match_Distance = 100
self.dmp.Match_Threshold = 0.5
# Exact matches.
self.assertEquals(5, self.dmp.match_bitap("abcdefghijk", "fgh", 5))
self.assertEquals(5, self.dmp.match_bitap("abcdefghijk", "fgh", 0))
# Fuzzy matches.
self.assertEquals(4, self.dmp.match_bitap("abcdefghijk", "efxhi", 0))
self.assertEquals(2, self.dmp.match_bitap("abcdefghijk", "cdefxyhijk", 5))
self.assertEquals(-1, self.dmp.match_bitap("abcdefghijk", "bxy", 1))
# Overflow.
self.assertEquals(2, self.dmp.match_bitap("123456789xx0", "3456789x0", 2))
self.assertEquals(0, self.dmp.match_bitap("abcdef", "xxabc", 4))
self.assertEquals(3, self.dmp.match_bitap("abcdef", "defyy", 4))
self.assertEquals(0, self.dmp.match_bitap("abcdef", "xabcdefy", 0))
# Threshold test.
self.dmp.Match_Threshold = 0.4
self.assertEquals(4, self.dmp.match_bitap("abcdefghijk", "efxyhi", 1))
self.dmp.Match_Threshold = 0.3
self.assertEquals(-1, self.dmp.match_bitap("abcdefghijk", "efxyhi", 1))
self.dmp.Match_Threshold = 0.0
self.assertEquals(1, self.dmp.match_bitap("abcdefghijk", "bcdef", 1))
self.dmp.Match_Threshold = 0.5
# Multiple select.
self.assertEquals(0, self.dmp.match_bitap("abcdexyzabcde", "abccde", 3))
self.assertEquals(8, self.dmp.match_bitap("abcdexyzabcde", "abccde", 5))
# Distance test.
self.dmp.Match_Distance = 10 # Strict location.
self.assertEquals(-1, self.dmp.match_bitap("abcdefghijklmnopqrstuvwxyz", "abcdefg", 24))
self.assertEquals(0, self.dmp.match_bitap("abcdefghijklmnopqrstuvwxyz", "abcdxxefg", 1))
self.dmp.Match_Distance = 1000 # Loose location.
self.assertEquals(0, self.dmp.match_bitap("abcdefghijklmnopqrstuvwxyz", "abcdefg", 24))
def testMatchMain(self):
# Full match.
# Shortcut matches.
self.assertEquals(0, self.dmp.match_main("abcdef", "abcdef", 1000))
self.assertEquals(-1, self.dmp.match_main("", "abcdef", 1))
self.assertEquals(3, self.dmp.match_main("abcdef", "", 3))
self.assertEquals(3, self.dmp.match_main("abcdef", "de", 3))
self.assertEquals(3, self.dmp.match_main("abcdef", "defy", 4))
self.assertEquals(0, self.dmp.match_main("abcdef", "abcdefy", 0))
# Complex match.
self.dmp.Match_Threshold = 0.7
self.assertEquals(4, self.dmp.match_main("I am the very model of a modern major general.", " that berry ", 5))
self.dmp.Match_Threshold = 0.5
# Test null inputs.
try:
self.dmp.match_main(None, None, 0)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
class PatchTest(DiffMatchPatchTest):
"""PATCH TEST FUNCTIONS"""
def testPatchObj(self):
# Patch Object.
p = dmp_module.patch_obj()
p.start1 = 20
p.start2 = 21
p.length1 = 18
p.length2 = 17
p.diffs = [(self.dmp.DIFF_EQUAL, "jump"), (self.dmp.DIFF_DELETE, "s"), (self.dmp.DIFF_INSERT, "ed"), (self.dmp.DIFF_EQUAL, " over "), (self.dmp.DIFF_DELETE, "the"), (self.dmp.DIFF_INSERT, "a"), (self.dmp.DIFF_EQUAL, "\nlaz")]
strp = str(p)
self.assertEquals("@@ -21,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n %0Alaz\n", strp)
def testPatchFromText(self):
self.assertEquals([], self.dmp.patch_fromText(""))
strp = "@@ -21,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n %0Alaz\n"
self.assertEquals(strp, str(self.dmp.patch_fromText(strp)[0]))
self.assertEquals("@@ -1 +1 @@\n-a\n+b\n", str(self.dmp.patch_fromText("@@ -1 +1 @@\n-a\n+b\n")[0]))
self.assertEquals("@@ -1,3 +0,0 @@\n-abc\n", str(self.dmp.patch_fromText("@@ -1,3 +0,0 @@\n-abc\n")[0]))
self.assertEquals("@@ -0,0 +1,3 @@\n+abc\n", str(self.dmp.patch_fromText("@@ -0,0 +1,3 @@\n+abc\n")[0]))
# Generates error.
try:
self.dmp.patch_fromText("Bad\nPatch\n")
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
def testPatchToText(self):
strp = "@@ -21,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n laz\n"
p = self.dmp.patch_fromText(strp)
self.assertEquals(strp, self.dmp.patch_toText(p))
strp = "@@ -1,9 +1,9 @@\n-f\n+F\n oo+fooba\n@@ -7,9 +7,9 @@\n obar\n-,\n+.\n tes\n"
p = self.dmp.patch_fromText(strp)
self.assertEquals(strp, self.dmp.patch_toText(p))
def testPatchAddContext(self):
self.dmp.Patch_Margin = 4
p = self.dmp.patch_fromText("@@ -21,4 +21,10 @@\n-jump\n+somersault\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps over the lazy dog.")
self.assertEquals("@@ -17,12 +17,18 @@\n fox \n-jump\n+somersault\n s ov\n", str(p))
# Same, but not enough trailing context.
p = self.dmp.patch_fromText("@@ -21,4 +21,10 @@\n-jump\n+somersault\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps.")
self.assertEquals("@@ -17,10 +17,16 @@\n fox \n-jump\n+somersault\n s.\n", str(p))
# Same, but not enough leading context.
p = self.dmp.patch_fromText("@@ -3 +3,2 @@\n-e\n+at\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps.")
self.assertEquals("@@ -1,7 +1,8 @@\n Th\n-e\n+at\n qui\n", str(p))
# Same, but with ambiguity.
p = self.dmp.patch_fromText("@@ -3 +3,2 @@\n-e\n+at\n")[0]
self.dmp.patch_addContext(p, "The quick brown fox jumps. The quick brown fox crashes.")
self.assertEquals("@@ -1,27 +1,28 @@\n Th\n-e\n+at\n quick brown fox jumps. \n", str(p))
def testPatchMake(self):
# Null case.
patches = self.dmp.patch_make("", "")
self.assertEquals("", self.dmp.patch_toText(patches))
text1 = "The quick brown fox jumps over the lazy dog."
text2 = "That quick brown fox jumped over a lazy dog."
# Text2+Text1 inputs.
expectedPatch = "@@ -1,8 +1,7 @@\n Th\n-at\n+e\n qui\n@@ -21,17 +21,18 @@\n jump\n-ed\n+s\n over \n-a\n+the\n laz\n"
# The second patch must be "-21,17 +21,18", not "-22,17 +21,18" due to rolling context.
patches = self.dmp.patch_make(text2, text1)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Text1+Text2 inputs.
expectedPatch = "@@ -1,11 +1,12 @@\n Th\n-e\n+at\n quick b\n@@ -22,18 +22,17 @@\n jump\n-s\n+ed\n over \n-the\n+a\n laz\n"
patches = self.dmp.patch_make(text1, text2)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Diff input.
diffs = self.dmp.diff_main(text1, text2, False)
patches = self.dmp.patch_make(diffs)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Text1+Diff inputs.
patches = self.dmp.patch_make(text1, diffs)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Text1+Text2+Diff inputs (deprecated).
patches = self.dmp.patch_make(text1, text2, diffs)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Character encoding.
patches = self.dmp.patch_make("`1234567890-=[]\\;',./", "~!@#$%^&*()_+{}|:\"<>?")
self.assertEquals("@@ -1,21 +1,21 @@\n-%601234567890-=%5B%5D%5C;',./\n+~!@#$%25%5E&*()_+%7B%7D%7C:%22%3C%3E?\n", self.dmp.patch_toText(patches))
# Character decoding.
diffs = [(self.dmp.DIFF_DELETE, "`1234567890-=[]\\;',./"), (self.dmp.DIFF_INSERT, "~!@#$%^&*()_+{}|:\"<>?")]
self.assertEquals(diffs, self.dmp.patch_fromText("@@ -1,21 +1,21 @@\n-%601234567890-=%5B%5D%5C;',./\n+~!@#$%25%5E&*()_+%7B%7D%7C:%22%3C%3E?\n")[0].diffs)
# Long string with repeats.
text1 = ""
for x in range(100):
text1 += "abcdef"
text2 = text1 + "123"
expectedPatch = "@@ -573,28 +573,31 @@\n cdefabcdefabcdefabcdefabcdef\n+123\n"
patches = self.dmp.patch_make(text1, text2)
self.assertEquals(expectedPatch, self.dmp.patch_toText(patches))
# Test null inputs.
try:
self.dmp.patch_make(None, None)
self.assertFalse(True)
except ValueError:
# Exception expected.
pass
def testPatchSplitMax(self):
# Assumes that Match_MaxBits is 32.
patches = self.dmp.patch_make("abcdefghijklmnopqrstuvwxyz01234567890", "XabXcdXefXghXijXklXmnXopXqrXstXuvXwxXyzX01X23X45X67X89X0")
self.dmp.patch_splitMax(patches)
self.assertEquals("@@ -1,32 +1,46 @@\n+X\n ab\n+X\n cd\n+X\n ef\n+X\n gh\n+X\n ij\n+X\n kl\n+X\n mn\n+X\n op\n+X\n qr\n+X\n st\n+X\n uv\n+X\n wx\n+X\n yz\n+X\n 012345\n@@ -25,13 +39,18 @@\n zX01\n+X\n 23\n+X\n 45\n+X\n 67\n+X\n 89\n+X\n 0\n", self.dmp.patch_toText(patches))
patches = self.dmp.patch_make("abcdef1234567890123456789012345678901234567890123456789012345678901234567890uvwxyz", "abcdefuvwxyz")
oldToText = self.dmp.patch_toText(patches)
self.dmp.patch_splitMax(patches)
self.assertEquals(oldToText, self.dmp.patch_toText(patches))
patches = self.dmp.patch_make("1234567890123456789012345678901234567890123456789012345678901234567890", "abc")
self.dmp.patch_splitMax(patches)
self.assertEquals("@@ -1,32 +1,4 @@\n-1234567890123456789012345678\n 9012\n@@ -29,32 +1,4 @@\n-9012345678901234567890123456\n 7890\n@@ -57,14 +1,3 @@\n-78901234567890\n+abc\n", self.dmp.patch_toText(patches))
patches = self.dmp.patch_make("abcdefghij , h : 0 , t : 1 abcdefghij , h : 0 , t : 1 abcdefghij , h : 0 , t : 1", "abcdefghij , h : 1 , t : 1 abcdefghij , h : 1 , t : 1 abcdefghij , h : 0 , t : 1")
self.dmp.patch_splitMax(patches)
self.assertEquals("@@ -2,32 +2,32 @@\n bcdefghij , h : \n-0\n+1\n , t : 1 abcdef\n@@ -29,32 +29,32 @@\n bcdefghij , h : \n-0\n+1\n , t : 1 abcdef\n", self.dmp.patch_toText(patches))
def testPatchAddPadding(self):
# Both edges full.
patches = self.dmp.patch_make("", "test")
self.assertEquals("@@ -0,0 +1,4 @@\n+test\n", self.dmp.patch_toText(patches))
self.dmp.patch_addPadding(patches)
self.assertEquals("@@ -1,8 +1,12 @@\n %01%02%03%04\n+test\n %01%02%03%04\n", self.dmp.patch_toText(patches))
# Both edges partial.
patches = self.dmp.patch_make("XY", "XtestY")
self.assertEquals("@@ -1,2 +1,6 @@\n X\n+test\n Y\n", self.dmp.patch_toText(patches))
self.dmp.patch_addPadding(patches)
self.assertEquals("@@ -2,8 +2,12 @@\n %02%03%04X\n+test\n Y%01%02%03\n", self.dmp.patch_toText(patches))
# Both edges none.
patches = self.dmp.patch_make("XXXXYYYY", "XXXXtestYYYY")
self.assertEquals("@@ -1,8 +1,12 @@\n XXXX\n+test\n YYYY\n", self.dmp.patch_toText(patches))
self.dmp.patch_addPadding(patches)
self.assertEquals("@@ -5,8 +5,12 @@\n XXXX\n+test\n YYYY\n", self.dmp.patch_toText(patches))
def testPatchApply(self):
self.dmp.Match_Distance = 1000
self.dmp.Match_Threshold = 0.5
self.dmp.Patch_DeleteThreshold = 0.5
# Null case.
patches = self.dmp.patch_make("", "")
results = self.dmp.patch_apply(patches, "Hello world.")
self.assertEquals(("Hello world.", []), results)
# Exact match.
patches = self.dmp.patch_make("The quick brown fox jumps over the lazy dog.", "That quick brown fox jumped over a lazy dog.")
results = self.dmp.patch_apply(patches, "The quick brown fox jumps over the lazy dog.")
self.assertEquals(("That quick brown fox jumped over a lazy dog.", [True, True]), results)
# Partial match.
results = self.dmp.patch_apply(patches, "The quick red rabbit jumps over the tired tiger.")
self.assertEquals(("That quick red rabbit jumped over a tired tiger.", [True, True]), results)
# Failed match.
results = self.dmp.patch_apply(patches, "I am the very model of a modern major general.")
self.assertEquals(("I am the very model of a modern major general.", [False, False]), results)
# Big delete, small change.
patches = self.dmp.patch_make("x1234567890123456789012345678901234567890123456789012345678901234567890y", "xabcy")
results = self.dmp.patch_apply(patches, "x123456789012345678901234567890-----++++++++++-----123456789012345678901234567890y")
self.assertEquals(("xabcy", [True, True]), results)
# Big delete, big change 1.
patches = self.dmp.patch_make("x1234567890123456789012345678901234567890123456789012345678901234567890y", "xabcy")
results = self.dmp.patch_apply(patches, "x12345678901234567890---------------++++++++++---------------12345678901234567890y")
self.assertEquals(("xabc12345678901234567890---------------++++++++++---------------12345678901234567890y", [False, True]), results)
# Big delete, big change 2.
self.dmp.Patch_DeleteThreshold = 0.6
patches = self.dmp.patch_make("x1234567890123456789012345678901234567890123456789012345678901234567890y", "xabcy")
results = self.dmp.patch_apply(patches, "x12345678901234567890---------------++++++++++---------------12345678901234567890y")
self.assertEquals(("xabcy", [True, True]), results)
self.dmp.Patch_DeleteThreshold = 0.5
# Compensate for failed patch.
self.dmp.Match_Threshold = 0.0
self.dmp.Match_Distance = 0
patches = self.dmp.patch_make("abcdefghijklmnopqrstuvwxyz--------------------1234567890", "abcXXXXXXXXXXdefghijklmnopqrstuvwxyz--------------------1234567YYYYYYYYYY890")
results = self.dmp.patch_apply(patches, "ABCDEFGHIJKLMNOPQRSTUVWXYZ--------------------1234567890")
self.assertEquals(("ABCDEFGHIJKLMNOPQRSTUVWXYZ--------------------1234567YYYYYYYYYY890", [False, True]), results)
self.dmp.Match_Threshold = 0.5
self.dmp.Match_Distance = 1000
# No side effects.
patches = self.dmp.patch_make("", "test")
patchstr = self.dmp.patch_toText(patches)
results = self.dmp.patch_apply(patches, "")
self.assertEquals(patchstr, self.dmp.patch_toText(patches))
# No side effects with major delete.
patches = self.dmp.patch_make("The quick brown fox jumps over the lazy dog.", "Woof")
patchstr = self.dmp.patch_toText(patches)
self.dmp.patch_apply(patches, "The quick brown fox jumps over the lazy dog.")
self.assertEquals(patchstr, self.dmp.patch_toText(patches))
# Edge exact match.
patches = self.dmp.patch_make("", "test")
self.dmp.patch_apply(patches, "")
self.assertEquals(("test", [True]), results)
# Near edge exact match.
patches = self.dmp.patch_make("XY", "XtestY")
results = self.dmp.patch_apply(patches, "XY")
self.assertEquals(("XtestY", [True]), results)
# Edge partial match.
patches = self.dmp.patch_make("y", "y123")
results = self.dmp.patch_apply(patches, "x")
self.assertEquals(("x123", [True]), results)
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
nicobustillos/odoo | addons/payment_adyen/tests/test_adyen.py | 378 | 4314 | # -*- coding: utf-8 -*-
from lxml import objectify
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment.tests.common import PaymentAcquirerCommon
from openerp.addons.payment_adyen.controllers.main import AdyenController
from openerp.osv.orm import except_orm
from openerp.tools import mute_logger
class AdyenCommon(PaymentAcquirerCommon):
def setUp(self):
super(AdyenCommon, self).setUp()
cr, uid = self.cr, self.uid
self.base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# get the adyen account
model, self.adyen_id = self.registry('ir.model.data').get_object_reference(cr, uid, 'payment_adyen', 'payment_acquirer_adyen')
# some CC (always use expiration date 06 / 2016, cvc 737, cid 7373 (amex))
self.amex = (('370000000000002', '7373'))
self.dinersclub = (('36006666333344', '737'))
self.discover = (('6011601160116611', '737'), ('644564456445644', '737'))
self.jcb = (('3530111333300000', '737'))
self.mastercard = (('5555444433331111', '737'), ('5555555555554444', '737'))
self.visa = (('4111 1111 1111 1111', '737'), ('4444333322221111', '737'))
self.mcdebit = (('5500000000000004', '737'))
self.visadebit = (('4400000000000008', '737'))
self.maestro = (('6731012345678906', '737'))
self.laser = (('630495060000000000', '737'))
self.hipercard = (('6062828888666688', '737'))
self.dsmastercard = (('521234567890 1234', '737', 'user', 'password'))
self.dsvisa = (('4212345678901237', '737', 'user', 'password'))
self.mistercash = (('6703444444444449', None, 'user', 'password'))
class AdyenServer2Server(AdyenCommon):
def test_00_tx_management(self):
cr, uid, context = self.cr, self.uid, {}
class AdyenForm(AdyenCommon):
def test_10_adyen_form_render(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
adyen = self.payment_acquirer.browse(self.cr, self.uid, self.adyen_id, None)
self.assertEqual(adyen.environment, 'test', 'test without test environment')
# ----------------------------------------
# Test: button direct rendering
# ----------------------------------------
form_values = {
'merchantAccount': 'OpenERPCOM',
'merchantReference': 'test_ref0',
'skinCode': 'cbqYWvVL',
'paymentAmount': '1',
'currencyCode': 'EUR',
'resURL': '%s' % urlparse.urljoin(self.base_url, AdyenController._return_url),
}
# render the button
res = self.payment_acquirer.render(
cr, uid, self.adyen_id,
'test_ref0', 0.01, self.currency_euro_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://test.adyen.com/hpp/pay.shtml', 'adyen: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit', 'shipBeforeDate', 'sessionValidity', 'shopperLocale', 'merchantSig']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'adyen: wrong value for input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
# @mute_logger('openerp.addons.payment_adyen.models.adyen', 'ValidationError')
# def test_20_paypal_form_management(self):
# cr, uid, context = self.cr, self.uid, {}
# # be sure not to do stupid things
# adyen = self.payment_acquirer.browse(self.cr, self.uid, self.adyen_id, None)
# self.assertEqual(adyen.env, 'test', 'test without test env')
# {'authResult': u'AUTHORISED',
# 'merchantReference': u'SO014',
# 'merchantReturnData': u'return_url=/shop/payment/validate',
# 'merchantSig': u'GaLRO8aMHFaQX3gQ5BVP/YETzeA=',
# 'paymentMethod': u'visa',
# 'pspReference': u'8813859935907337',
# 'shopperLocale': u'en_US',
# 'skinCode': u'cbqYWvVL'} | agpl-3.0 |
beiko-lab/gengis | bin/Lib/site-packages/matplotlib/tri/trifinder.py | 4 | 3221 | from __future__ import print_function
from matplotlib.tri import Triangulation
import matplotlib._tri as _tri
class TriFinder(object):
"""
Abstract base class for classes used to find the triangles of a
Triangulation in which (x,y) points lie.
Rather than instantiate an object of a class derived from TriFinder, it is
usually better to use the function
:func:`matplotlib.tri.Triangulation.get_trifinder`.
Derived classes implement __call__(x,y) where x,y are array_like point
coordinates of the same shape.
"""
def __init__(self, triangulation):
if not isinstance(triangulation, Triangulation):
raise ValueError('Expected a Triangulation object')
self._triangulation = triangulation
class TrapezoidMapTriFinder(TriFinder):
"""
:class:`~matplotlib.tri.TriFinder` class implemented using the trapezoid
map algorithm from the book "Computational Geometry, Algorithms and
Applications", second edition, by M. de Berg, M. van Kreveld, M. Overmars
and O. Schwarzkopf.
The triangulation must be valid, i.e. it must not have duplicate points,
triangles formed from colinear points, or overlapping triangles. The
algorithm has some tolerance to triangles formed from colinear points, but
this should not be relied upon.
"""
def __init__(self, triangulation):
TriFinder.__init__(self, triangulation)
self._cpp_trifinder = _tri.TrapezoidMapTriFinder(
triangulation.get_cpp_triangulation())
self._initialize()
def __call__(self, x, y):
"""
Return an array containing the indices of the triangles in which the
specified x,y points lie, or -1 for points that do not lie within a
triangle.
*x*, *y* are array_like x and y coordinates of the same shape and any
number of dimensions.
Returns integer array with the same shape and *x* and *y*.
"""
# C++ checks arguments are OK.
return self._cpp_trifinder.find_many(x, y)
def _get_tree_stats(self):
"""
Return a python list containing the statistics about the node tree:
0: number of nodes (tree size)
1: number of unique nodes
2: number of trapezoids (tree leaf nodes)
3: number of unique trapezoids
4: maximum parent count (max number of times a node is repeated in
tree)
5: maximum depth of tree (one more than the maximum number of
comparisons needed to search through the tree)
6: mean of all trapezoid depths (one more than the average number
of comparisons needed to search through the tree)
"""
return self._cpp_trifinder.get_tree_stats()
def _initialize(self):
"""
Initialize the underlying C++ object. Can be called multiple times if,
for example, the triangulation is modified.
"""
self._cpp_trifinder.initialize()
def _print_tree(self):
"""
Print a text representation of the node tree, which is useful for
debugging purposes.
"""
self._cpp_trifinder.print_tree()
| gpl-3.0 |
MaTriXy/physical-web | web-service/tests.py | 37 | 11736 | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
import nose
import os
import signal
import subprocess
import sys
import unittest
import urllib
import urllib2
LOCAL_TEST_PORT = 9002
REGRESSION_TEST_URLS = [
'http://www.blackanddecker.fr',
'http://www.google.com',
'http://dota2.gamepedia.com/',
'http://www.orange.fr',
'http://librarian.codes',
'http://fredrikthalberg.com',
'http://harleykwyn.com',
]
REGRESSION_TEST_BAD_URLS = [
'http://google.com/asdfasdfasdfasdfa',
'http://www.',
]
class PwsTest(unittest.TestCase):
_HOST = None # Set in main()
_ENABLE_EXPERIMENTAL = False
@property
def HOST(self):
PwsTest._HOST
@property
def ENABLE_EXPERIMENTAL(self):
PwsTest._ENABLE_EXPERIMENTAL
def request(self, params=None, payload=None):
"""
Makes an http request to our endpoint
If payload is None, this performs a GET request.
Otherwise, the payload is json-serialized and a POST request is sent.
"""
JSON = getattr(self, 'JSON', False)
url = '{}/{}'.format(self.HOST, self.PATH)
if params:
url += '?{}'.format(urllib.urlencode(params))
args = [url]
if payload is not None:
args.append(json.dumps(payload))
req = urllib2.Request(*args)
req.add_header("Content-Type", "application/json")
response = urllib2.urlopen(req)
data = response.read()
if JSON:
data = json.loads(data)
# Print so we have something nice to look at when we fail
print json.dumps(data, indent=2)
else:
print data
return response.code, data
class TestResolveScan(PwsTest):
PATH = 'resolve-scan'
JSON = True
def call(self, values):
return self.request(payload=values)[1]
def test_demo_data(self):
result = self.call({
'objects': [
{ 'url': 'http://www.caltrain.com/schedules/realtime/stations/mountainviewstation-mobile.html' },
{ 'url': 'http://benfry.com/distellamap/' },
{ 'url': 'http://en.wikipedia.org/wiki/Le_D%C3%A9jeuner_sur_l%E2%80%99herbe' },
{ 'url': 'http://sfmoma.org' }
]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), 4)
self.assertIn('description', result['metadata'][0])
self.assertIn('title', result['metadata'][0])
self.assertIn('url', result['metadata'][0])
self.assertIn('displayUrl', result['metadata'][0])
self.assertIn('rank', result['metadata'][0])
self.assertIn('id', result['metadata'][0])
self.assertIn('icon', result['metadata'][0])
def test_invalid_data(self):
result = self.call({
'objects': [
{ 'url': 'http://totallybadurlthatwontwork.com/' },
{ 'usdf': 'http://badkeys' },
]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), 0)
#self.assertEqual(len(result['unresolved']), 1)
def test_rssi_ranking(self):
result = self.call({
'objects': [
{
'url': 'http://www.caltrain.com/schedules/realtime/stations/mountainviewstation-mobile.html',
'rssi': -75,
'txpower': -22,
},
{
'url': 'http://benfry.com/distellamap/',
'rssi': -95,
'txpower': -63,
},
{
'url': 'http://en.wikipedia.org/wiki/Le_D%C3%A9jeuner_sur_l%E2%80%99herbe',
'rssi': -61,
'txpower': -22,
},
{
'url': 'http://sfmoma.org',
'rssi': -74,
'txpower': -22,
},
]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), 4)
self.assertEqual(result['metadata'][0]['id'],
'http://benfry.com/distellamap/')
self.assertEqual(result['metadata'][1]['id'],
'http://en.wikipedia.org/wiki/'
'Le_D%C3%A9jeuner_sur_l%E2%80%99herbe')
self.assertEqual(result['metadata'][2]['id'],
'http://sfmoma.org')
self.assertEqual(result['metadata'][3]['id'],
'http://www.caltrain.com/schedules/realtime/'
'stations/mountainviewstation-mobile.html')
def test_url_which_redirects(self):
result = self.call({
'objects': [
{
'url': 'http://goo.gl/KYvLwO',
},
]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), 1)
beaconResult = result['metadata'][0]
self.assertEqual(beaconResult['id'],
'http://goo.gl/KYvLwO')
self.assertEqual(beaconResult['url'],
'https://github.com/Google/physical-web')
self.assertEqual(beaconResult['displayUrl'],
'https://github.com/Google/physical-web')
def test_redirect_with_rssi_tx_power(self):
if not self.ENABLE_EXPERIMENTAL:
return
result = self.call({
'objects': [
{
'url': '{}/experimental/googl/KYvLwO'.format(self.HOST),
'rssi': -41,
'txpower': -22
},
{
'url': '{}/experimental/googl/r8iJqW'.format(self.HOST),
'rssi': -91,
'txpower': -22
},
]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), 1)
self.assertEqual(result['metadata'][0]['url'],
'https://github.com/Google/physical-web')
def test_regression_urls(self):
result = self.call({
'objects': [ {'url': url} for url in REGRESSION_TEST_URLS ]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), len(REGRESSION_TEST_URLS))
for beaconResult in result['metadata']:
self.assertIn('description', beaconResult)
self.assertIn('title', beaconResult)
self.assertIn('url', beaconResult)
self.assertIn('rank', beaconResult)
self.assertIn('id', beaconResult)
self.assertIn('icon', beaconResult)
def test_regression_bad_urls(self):
result = self.call({
'objects': [ {'url': url} for url in REGRESSION_TEST_BAD_URLS ]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), 0)
def test_invalid_rssi(self):
result = self.call({
'objects': [{
'url': 'http://github.com/google/physical-web/',
'rssi': 127,
'txpower': -41
}]
})
self.assertIn('metadata', result)
self.assertEqual(len(result['metadata']), 1)
beaconResult = result['metadata'][0]
self.assertIn('description', beaconResult)
self.assertIn('title', beaconResult)
self.assertIn('url', beaconResult)
self.assertIn('rank', beaconResult)
self.assertIn('id', beaconResult)
self.assertIn('icon', beaconResult)
self.assertEqual(1000, beaconResult['rank'])
class TestShortenUrl(PwsTest):
PATH = 'shorten-url'
JSON = True
def call(self, values):
return self.request(payload=values)[1]
def test_github_url(self):
result = self.call({
'longUrl': 'http://www.github.com/Google/physical-web'
})
self.assertIn('kind', result)
self.assertIn('id', result)
self.assertIn('longUrl', result)
self.assertTrue(result['id'].startswith('http://goo.gl/'))
class RefreshUrl(PwsTest):
PATH = 'refresh-url'
def call(self, url):
params = {'url': url}
return self.request(params=params, payload='')[1]
def test_github_url(self):
result = self.call('https://github.com/google/physical-web')
self.assertEqual(result, '')
class TestGo(PwsTest):
PATH = 'go'
def call(self, url):
params = {'url': url}
return self.request(params=params)[0]
def test_github_url(self):
result = self.call('https://github.com/google/physical-web')
self.assertEqual(result, 200)
def main():
"""The main routine."""
# Parse arguments
local_url = 'http://localhost:{}'.format(LOCAL_TEST_PORT)
parser = argparse.ArgumentParser(description='Run web-service tests')
parser.add_argument(
'-e', '--endpoint', dest='endpoint', default='auto',
help='Which server to test against.\n'
'auto: {} (server starts automatically)\n'
'local: http://localhost:8080\n'
'prod: https://url-caster.appspot.com\n'
'dev: https://url-caster-dev.appspot.com\n'
'*: Other values interpreted literally'
.format(local_url))
parser.add_argument('-x', '--experimental', dest='experimental', action='store_true', default=False)
args = parser.parse_args()
# Setup the endpoint
endpoint = args.endpoint
server = None
if endpoint.lower() == 'auto':
endpoint = local_url
print 'Starting local server...',
server = subprocess.Popen([
'dev_appserver.py', os.path.dirname(__file__),
'--port', str(LOCAL_TEST_PORT),
'--admin_port', str(LOCAL_TEST_PORT + 1),
], bufsize=1, stderr=subprocess.PIPE, preexec_fn=os.setsid)
# Wait for the server to start up
while True:
line = server.stderr.readline()
if 'Unable to bind' in line:
print 'Rogue server already running.'
return 1
if 'running at: {}'.format(local_url) in line:
break
print 'done'
elif endpoint.lower() == 'local':
endpoint = 'http://localhost:8080'
elif endpoint.lower() == 'prod':
endpoint = 'https://url-caster.appspot.com'
elif endpoint.lower() == 'dev':
endpoint = 'https://url-caster-dev.appspot.com'
PwsTest.HOST = endpoint
PwsTest.ENABLE_EXPERIMENTAL = args.experimental
# Run the tests
try:
nose.runmodule()
finally:
# Teardown the endpoint
if server:
os.killpg(os.getpgid(server.pid), signal.SIGINT)
server.wait()
# We should never get here since nose.runmodule will call exit
return 0
if __name__ == '__main__':
try:
exit(main())
except KeyboardInterrupt:
sys.stderr.write('Exiting due to KeyboardInterrupt!\n')
| apache-2.0 |
LyzardKing/ubuntu-make | tests/medium/test_baseinstaller.py | 1 | 6208 | # -*- coding: utf-8 -*-
# Copyright (C) 2015 Canonical
#
# Authors:
# Didier Roche
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Tests for base installer framework in container"""
from . import ContainerTests
import os
import subprocess
from ..large import test_baseinstaller
from ..tools import UMAKE, spawn_process, get_data_dir, swap_file_and_restore
class BaseInstallerInContainer(ContainerTests, test_baseinstaller.BaseInstallerTests):
"""This will install the Base Framework inside a container"""
TIMEOUT_START = 10
TIMEOUT_STOP = 10
def setUp(self):
self.hosts = {8765: ["localhost"], 443: ["github.com"]}
self.apt_repo_override_path = os.path.join(self.APT_FAKE_REPO_PATH, 'android')
self.additional_local_frameworks = [os.path.join("tests", "data", "testframeworks", "baseinstallerfake.py")]
self.umake_download_page = os.path.join(get_data_dir(), "server-content", "github.com",
"ubuntu", "ubuntu-make", "releases")
super().setUp()
# override with container path
self.installed_path = os.path.join(self.install_base_path, "base", "base-framework")
def test_install_wrong_download_link_update(self):
"""Install wrong download link, update available"""
with swap_file_and_restore(self.download_page_file_path) as content:
with open(self.download_page_file_path, "w") as newfile:
newfile.write(content.replace('id="linux-bundle', ""))
# umake download page can't match any version (LATESTRELEASE)
self.child = spawn_process(self.command('{} base base-framework'.format(UMAKE)))
self.expect_and_no_warn("Choose installation path: {}".format(self.installed_path))
self.child.sendline("")
self.expect_and_no_warn("To get the latest version",
timeout=self.TIMEOUT_INSTALL_PROGRESS, expect_warn=True)
self.wait_and_close(exit_status=1)
# we have nothing installed
self.assertFalse(self.launcher_exists_and_is_pinned(self.desktop_filename))
def test_install_wrong_download_link_no_update(self):
"""Install wrong download link, no update available"""
with swap_file_and_restore(self.download_page_file_path) as content:
with open(self.download_page_file_path, "w") as newfile:
newfile.write(content.replace('id="linux-bundle', ""))
with swap_file_and_restore(self.umake_download_page) as content:
with open(self.umake_download_page, "w") as newfile:
# Note: our version will have +unknown, testing the git/snap case
version = subprocess.check_output(self.command_as_list([UMAKE, '--version']),
stderr=subprocess.STDOUT).decode("utf-8")
newfile.write(content.replace('LATESTRELEASE', version.strip().split("+")[0]))
self.child = spawn_process(self.command('{} base base-framework'.format(UMAKE)))
self.expect_and_no_warn("Choose installation path: {}".format(self.installed_path))
self.child.sendline("")
self.wait_and_close(exit_status=1, expect_warn=True)
self.assertIn("Download page changed its syntax or is not parsable (url missing)",
self.child.before)
self.assertNotIn("To get the latest version", self.child.before)
# we have nothing installed
self.assertFalse(self.launcher_exists_and_is_pinned(self.desktop_filename))
def test_install_wrong_download_link_404_update(self):
"""Install wrong download link, github giving 404"""
with swap_file_and_restore(self.download_page_file_path) as content:
with open(self.download_page_file_path, "w") as newfile:
newfile.write(content.replace('id="linux-bundle', ""))
with swap_file_and_restore(self.umake_download_page):
os.remove(self.umake_download_page)
self.child = spawn_process(self.command('{} base base-framework'.format(UMAKE)))
self.expect_and_no_warn("Choose installation path: {}".format(self.installed_path))
self.child.sendline("")
self.wait_and_close(exit_status=1, expect_warn=True)
self.assertIn("\r\nERROR: 404 Client Error:", self.child.before)
# we have nothing installed
self.assertFalse(self.launcher_exists_and_is_pinned(self.desktop_filename))
def test_install_wrong_download_link_github_missing(self):
# TODO: cut all network connection on the container to enable that test
return
with swap_file_and_restore(self.download_page_file_path) as content:
with open(self.download_page_file_path, "w") as newfile:
newfile.write(content.replace('id="linux-bundle', ""))
self.child = spawn_process(self.command('{} base base-framework'.format(UMAKE)))
self.expect_and_no_warn("Choose installation path: {}".format(self.installed_path))
self.child.sendline("")
self.expect_and_no_warn("\r\nERROR: Connection Error\r\n",
timeout=self.TIMEOUT_INSTALL_PROGRESS, expect_warn=True)
self.wait_and_close(exit_status=1)
# we have nothing installed
self.assertFalse(self.launcher_exists_and_is_pinned(self.desktop_filename))
| gpl-3.0 |
Lektorium-LLC/edx-platform | lms/djangoapps/teams/tests/factories.py | 23 | 1230 | """Factories for testing the Teams API."""
from datetime import datetime
from uuid import uuid4
import factory
import pytz
from factory.django import DjangoModelFactory
from lms.djangoapps.teams.models import CourseTeam, CourseTeamMembership
LAST_ACTIVITY_AT = datetime(2015, 8, 15, 0, 0, 0, tzinfo=pytz.utc)
class CourseTeamFactory(DjangoModelFactory):
"""Factory for CourseTeams.
Note that team_id is not auto-generated from name when using the factory.
"""
class Meta(object):
model = CourseTeam
django_get_or_create = ('team_id',)
team_id = factory.Sequence('team-{0}'.format)
discussion_topic_id = factory.LazyAttribute(lambda a: uuid4().hex)
name = factory.Sequence("Awesome Team {0}".format)
description = "A simple description"
last_activity_at = LAST_ACTIVITY_AT
class CourseTeamMembershipFactory(DjangoModelFactory):
"""Factory for CourseTeamMemberships."""
class Meta(object):
model = CourseTeamMembership
last_activity_at = LAST_ACTIVITY_AT
@classmethod
def _create(cls, model_class, *args, **kwargs):
"""Create the team membership. """
obj = model_class(*args, **kwargs)
obj.save()
return obj
| agpl-3.0 |
caLew/sugartest | src/jarabe/frame/activitiestray.py | 7 | 33146 | # Copyright (C) 2006-2007 Red Hat, Inc.
# Copyright (C) 2008 One Laptop Per Child
# Copyright (C) 2010 Collabora Ltd. <http://www.collabora.co.uk/>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from gettext import gettext as _
import tempfile
import os
from gi.repository import GObject
from gi.repository import Gio
from gi.repository import Gtk
from sugar3.graphics import style
from sugar3.graphics.tray import HTray
from sugar3.graphics.xocolor import XoColor
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.toolbutton import ToolButton
from sugar3.graphics.icon import Icon, get_icon_file_name
from sugar3.graphics.palette import Palette
from sugar3.graphics.menuitem import MenuItem
from sugar3.graphics.palettemenu import PaletteMenuBox
from sugar3.graphics.palettemenu import PaletteMenuItem
from sugar3.graphics.palettemenu import PaletteMenuItemSeparator
from sugar3.datastore import datastore
from sugar3 import mime
from sugar3 import env
from sugar3 import profile
from jarabe.model import shell
from jarabe.model import invites
from jarabe.model import bundleregistry
from jarabe.model import filetransfer
from jarabe.model import notifications
from jarabe.view.palettes import JournalPalette, CurrentActivityPalette
from jarabe.frame.frameinvoker import FrameWidgetInvoker
from jarabe.frame.notification import NotificationIcon
from jarabe.frame.notification import NotificationButton
from jarabe.frame.notification import NotificationPulsingIcon
import jarabe.frame
class ActivityButton(RadioToolButton):
def __init__(self, home_activity, group):
RadioToolButton.__init__(self, group=group)
self.set_palette_invoker(FrameWidgetInvoker(self))
self.palette_invoker.cache_palette = False
self._home_activity = home_activity
self._notify_launch_hid = None
self._icon = NotificationPulsingIcon()
self._icon.props.base_color = home_activity.get_icon_color()
self._icon.props.pulse_color = \
XoColor('%s,%s' % (style.COLOR_BUTTON_GREY.get_svg(),
style.COLOR_TOOLBAR_GREY.get_svg()))
if home_activity.get_icon_path():
self._icon.props.file = home_activity.get_icon_path()
else:
# Let's see if the X11 window can give us an icon.
window = home_activity.get_window()
if not window.get_icon_is_fallback():
pixbuf = window.get_icon()
self._icon.pixbuf = pixbuf
else:
self._icon.props.icon_name = 'image-missing'
self.set_icon_widget(self._icon)
self._icon.show()
if home_activity.props.launch_status == shell.Activity.LAUNCHING:
self._icon.props.pulsing = True
self._notify_launch_hid = home_activity.connect(
'notify::launch-status', self.__notify_launch_status_cb)
elif home_activity.props.launch_status == shell.Activity.LAUNCH_FAILED:
self._on_failed_launch()
def create_palette(self):
if self._home_activity.is_journal():
palette = JournalPalette(self._home_activity)
else:
palette = CurrentActivityPalette(self._home_activity)
palette.connect('done', self.__palette_item_selected_cb)
palette.set_group_id('frame')
self.set_palette(palette)
def __palette_item_selected_cb(self, widget):
frame = jarabe.frame.get_view()
frame.hide()
def _on_failed_launch(self):
# TODO http://bugs.sugarlabs.org/ticket/2007
pass
def __notify_launch_status_cb(self, home_activity, pspec):
home_activity.disconnect(self._notify_launch_hid)
self._notify_launch_hid = None
if home_activity.props.launch_status == shell.Activity.LAUNCH_FAILED:
self._on_failed_launch()
else:
self._icon.props.pulsing = False
def show_badge(self):
self._icon.show_badge()
def hide_badge(self):
self._icon.hide_badge()
class InviteButton(ToolButton):
"""Invite to shared activity"""
__gsignals__ = {
'remove-invite': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, invite):
ToolButton.__init__(self)
self._invite = invite
self.connect('clicked', self.__clicked_cb)
self.connect('destroy', self.__destroy_cb)
bundle_registry = bundleregistry.get_registry()
bundle = bundle_registry.get_bundle(invite.get_bundle_id())
self._icon = Icon()
self._icon.props.xo_color = invite.get_color()
if bundle is not None:
self._icon.props.file = bundle.get_icon()
else:
self._icon.props.icon_name = 'image-missing'
self.set_icon_widget(self._icon)
self._icon.show()
palette = InvitePalette(invite)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
palette.connect('remove-invite', self.__remove_invite_cb)
self.set_palette(palette)
self._notif_icon = NotificationIcon()
self._notif_icon.connect('button-release-event',
self.__button_release_event_cb)
self._notif_icon.props.xo_color = invite.get_color()
if bundle is not None:
self._notif_icon.props.icon_filename = bundle.get_icon()
else:
self._notif_icon.props.icon_name = 'image-missing'
frame = jarabe.frame.get_view()
frame.add_notification(self._notif_icon, Gtk.CornerType.TOP_LEFT)
def __button_release_event_cb(self, icon, event):
if self._notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self._notif_icon)
self._notif_icon = None
self._invite.join()
self.emit('remove-invite')
def __clicked_cb(self, button):
self.palette.popup(immediate=True, state=Palette.SECONDARY)
def __remove_invite_cb(self, palette):
self.emit('remove-invite')
def __destroy_cb(self, button):
if self._notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self._notif_icon)
self._notif_icon = None
class InvitePalette(Palette):
"""Palette for frame or notification icon for invites."""
__gsignals__ = {
'remove-invite': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, invite):
Palette.__init__(self, '')
self._invite = invite
menu_item = MenuItem(_('Join'), icon_name='dialog-ok')
menu_item.connect('activate', self.__join_activate_cb)
self.menu.append(menu_item)
menu_item.show()
menu_item = MenuItem(_('Decline'), icon_name='dialog-cancel')
menu_item.connect('activate', self.__decline_activate_cb)
self.menu.append(menu_item)
menu_item.show()
bundle_id = invite.get_bundle_id()
registry = bundleregistry.get_registry()
self._bundle = registry.get_bundle(bundle_id)
if self._bundle:
name = self._bundle.get_name()
else:
name = bundle_id
self.set_primary_text(name)
def __join_activate_cb(self, menu_item):
self._invite.join()
self.emit('remove-invite')
def __decline_activate_cb(self, menu_item):
self.emit('remove-invite')
class ActivitiesTray(HTray):
def __init__(self):
HTray.__init__(self)
self._buttons = {}
self._buttons_by_name = {}
self._invite_to_item = {}
self._freeze_button_clicks = False
self._home_model = shell.get_model()
self._home_model.connect('activity-added', self.__activity_added_cb)
self._home_model.connect('activity-removed',
self.__activity_removed_cb)
self._home_model.connect('active-activity-changed',
self.__activity_changed_cb)
self._home_model.connect('tabbing-activity-changed',
self.__tabbing_activity_changed_cb)
self._invites = invites.get_instance()
for invite in self._invites:
self._add_invite(invite)
self._invites.connect('invite-added', self.__invite_added_cb)
self._invites.connect('invite-removed', self.__invite_removed_cb)
filetransfer.new_file_transfer.connect(self.__new_file_transfer_cb)
service = notifications.get_service()
service.notification_received.connect(self.__notification_received_cb)
service.buffer_cleared.connect(self.__buffer_cleared_cb)
def __notification_received_cb(self, **kwargs):
logging.debug('ActivitiesTray.__notification_received_cb')
name = kwargs.get('app_name')
button = self._buttons_by_name.get(name, None)
if button is None:
hints = kwargs.get('hints')
icon = NotificationPulsingIcon(
hints.get('x-sugar-icon-file-name', ''),
hints.get('x-sugar-icon-name', ''),
hints.get('x-sugar-icon-colors', ''))
button = NotificationButton(name)
button.set_icon(icon)
button.show()
self.add_item(button)
self._buttons_by_name[name] = button
if hasattr(button, 'show_badge'):
button.show_badge()
def __buffer_cleared_cb(self, **kwargs):
logging.debug('ActivitiesTray.__buffer_cleared_cb')
name = kwargs.get('app_name', None)
button = self._buttons_by_name.get(name, None)
if isinstance(button, NotificationButton):
self.remove_item(button)
del self._buttons_by_name[name]
return
if hasattr(button, 'hide_badge'):
button.hide_badge()
def __activity_added_cb(self, home_model, home_activity):
logging.debug('__activity_added_cb: %r', home_activity)
if self.get_children():
group = self.get_children()[0]
else:
group = None
button = ActivityButton(home_activity, group)
self.add_item(button)
self._buttons[home_activity] = button
self._buttons_by_name[home_activity.get_activity_id()] = button
button.connect('clicked', self.__activity_clicked_cb, home_activity)
button.show()
def __activity_removed_cb(self, home_model, home_activity):
logging.debug('__activity_removed_cb: %r', home_activity)
button = self._buttons[home_activity]
self.remove_item(button)
del self._buttons[home_activity]
del self._buttons_by_name[home_activity.get_activity_id()]
def _activate_activity(self, home_activity):
button = self._buttons[home_activity]
self._freeze_button_clicks = True
button.props.active = True
self._freeze_button_clicks = False
self.scroll_to_item(button)
# Redraw immediately.
# The widget may not be realized yet, and then there is no window.
x11_window = self.get_window()
if x11_window:
x11_window.process_updates(True)
def __activity_changed_cb(self, home_model, home_activity):
logging.debug('__activity_changed_cb: %r', home_activity)
if home_activity is None:
return
# Only select the new activity, if there is no tabbing activity.
if home_model.get_tabbing_activity() is None:
self._activate_activity(home_activity)
def __tabbing_activity_changed_cb(self, home_model, home_activity):
logging.debug('__tabbing_activity_changed_cb: %r', home_activity)
# If the tabbing_activity is set to None just do nothing.
# The active activity will be updated a bit later (and it will
# be set to the activity that is currently selected).
if home_activity is None:
return
self._activate_activity(home_activity)
def __activity_clicked_cb(self, button, home_activity):
if not self._freeze_button_clicks and button.props.active:
logging.debug('ActivitiesTray.__activity_clicked_cb')
window = home_activity.get_window()
if window:
window.activate(Gtk.get_current_event_time())
frame = jarabe.frame.get_view()
frame.hide()
def __remove_invite_cb(self, icon, invite):
self._invites.remove_invite(invite)
def __invite_added_cb(self, invites_model, invite):
self._add_invite(invite)
def __invite_removed_cb(self, invites_model, invite):
self._remove_invite(invite)
def _add_invite(self, invite):
"""Add an invite"""
item = InviteButton(invite)
item.connect('remove-invite', self.__remove_invite_cb, invite)
self.add_item(item)
item.show()
self._invite_to_item[invite] = item
def _remove_invite(self, invite):
self.remove_item(self._invite_to_item[invite])
self._invite_to_item[invite].destroy()
del self._invite_to_item[invite]
def __new_file_transfer_cb(self, **kwargs):
file_transfer = kwargs['file_transfer']
logging.debug('__new_file_transfer_cb %r', file_transfer)
if isinstance(file_transfer, filetransfer.IncomingFileTransfer):
button = IncomingTransferButton(file_transfer)
elif isinstance(file_transfer, filetransfer.OutgoingFileTransfer):
button = OutgoingTransferButton(file_transfer)
self.add_item(button)
button.show()
class BaseTransferButton(ToolButton):
"""Button with a notification attached
"""
def __init__(self, file_transfer):
ToolButton.__init__(self)
self.file_transfer = file_transfer
file_transfer.connect('notify::state', self.__notify_state_cb)
icon = Icon()
self.props.icon_widget = icon
icon.show()
self.notif_icon = NotificationIcon()
self.notif_icon.connect('button-release-event',
self.__button_release_event_cb)
self.connect('clicked', self.__button_clicked_cb)
def __button_release_event_cb(self, icon, event):
if self.notif_icon is not None:
frame = jarabe.frame.get_view()
frame.remove_notification(self.notif_icon)
self.notif_icon = None
def __button_clicked_cb(self, button):
self.palette.popup(immediate=True, state=Palette.SECONDARY)
def remove(self):
frame = jarabe.frame.get_view()
frame.remove_notification(self.notif_icon)
self.props.parent.remove(self)
def __notify_state_cb(self, file_transfer, pspec):
logging.debug('_update state: %r %r', file_transfer.props.state,
file_transfer.reason_last_change)
if file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
if file_transfer.reason_last_change == \
filetransfer.FT_REASON_LOCAL_STOPPED:
self.remove()
class IncomingTransferButton(BaseTransferButton):
"""UI element representing an ongoing incoming file transfer
"""
def __init__(self, file_transfer):
BaseTransferButton.__init__(self, file_transfer)
self._ds_object = datastore.create()
file_transfer.connect('notify::state', self.__notify_state_cb)
file_transfer.connect('notify::transferred-bytes',
self.__notify_transferred_bytes_cb)
icons = Gio.content_type_get_icon(file_transfer.mime_type).props.names
icons.append('application-octet-stream')
for icon_name in icons:
icon_name = 'transfer-from-%s' % icon_name
file_name = get_icon_file_name(icon_name)
if file_name is not None:
self.props.icon_widget.props.icon_name = icon_name
self.notif_icon.props.icon_name = icon_name
break
icon_color = file_transfer.buddy.props.color
self.props.icon_widget.props.xo_color = icon_color
self.notif_icon.props.xo_color = icon_color
frame = jarabe.frame.get_view()
frame.add_notification(self.notif_icon,
Gtk.CornerType.TOP_LEFT)
def create_palette(self):
palette = IncomingTransferPalette(self.file_transfer)
palette.connect('dismiss-clicked', self.__dismiss_clicked_cb)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
return palette
def __notify_state_cb(self, file_transfer, pspec):
if file_transfer.props.state == filetransfer.FT_STATE_OPEN:
logging.debug('__notify_state_cb OPEN')
self._ds_object.metadata['title'] = file_transfer.title
self._ds_object.metadata['description'] = file_transfer.description
self._ds_object.metadata['progress'] = '0'
self._ds_object.metadata['keep'] = '0'
self._ds_object.metadata['buddies'] = ''
self._ds_object.metadata['preview'] = ''
self._ds_object.metadata['icon-color'] = \
file_transfer.buddy.props.color.to_string()
self._ds_object.metadata['mime_type'] = file_transfer.mime_type
elif file_transfer.props.state == filetransfer.FT_STATE_COMPLETED:
logging.debug('__notify_state_cb COMPLETED')
self._ds_object.metadata['progress'] = '100'
self._ds_object.file_path = file_transfer.destination_path
datastore.write(self._ds_object, transfer_ownership=True,
reply_handler=self.__reply_handler_cb,
error_handler=self.__error_handler_cb)
elif file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
logging.debug('__notify_state_cb CANCELLED')
object_id = self._ds_object.object_id
if object_id is not None:
self._ds_object.destroy()
datastore.delete(object_id)
self._ds_object = None
def __notify_transferred_bytes_cb(self, file_transfer, pspec):
progress = file_transfer.props.transferred_bytes / \
file_transfer.file_size
self._ds_object.metadata['progress'] = str(progress * 100)
datastore.write(self._ds_object, update_mtime=False)
def __reply_handler_cb(self):
logging.debug('__reply_handler_cb %r', self._ds_object.object_id)
def __error_handler_cb(self, error):
logging.debug('__error_handler_cb %r %s', self._ds_object.object_id,
error)
def __dismiss_clicked_cb(self, palette):
self.remove()
class OutgoingTransferButton(BaseTransferButton):
"""UI element representing an ongoing outgoing file transfer
"""
def __init__(self, file_transfer):
BaseTransferButton.__init__(self, file_transfer)
icons = Gio.content_type_get_icon(file_transfer.mime_type).props.names
icons.append('application-octet-stream')
for icon_name in icons:
icon_name = 'transfer-to-%s' % icon_name
file_name = get_icon_file_name(icon_name)
if file_name is not None:
self.props.icon_widget.props.icon_name = icon_name
self.notif_icon.props.icon_name = icon_name
break
icon_color = profile.get_color()
self.props.icon_widget.props.xo_color = icon_color
self.notif_icon.props.xo_color = icon_color
frame = jarabe.frame.get_view()
frame.add_notification(self.notif_icon,
Gtk.CornerType.TOP_LEFT)
def create_palette(self):
palette = OutgoingTransferPalette(self.file_transfer)
palette.connect('dismiss-clicked', self.__dismiss_clicked_cb)
palette.props.invoker = FrameWidgetInvoker(self)
palette.set_group_id('frame')
return palette
def __dismiss_clicked_cb(self, palette):
self.remove()
class BaseTransferPalette(Palette):
"""Base palette class for frame or notification icon for file transfers
"""
__gtype_name__ = 'SugarBaseTransferPalette'
__gsignals__ = {
'dismiss-clicked': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, file_transfer):
Palette.__init__(self, file_transfer.title)
self.file_transfer = file_transfer
self.progress_bar = None
self.progress_label = None
self._notify_transferred_bytes_handler = None
self.connect('popup', self.__popup_cb)
self.connect('popdown', self.__popdown_cb)
def __popup_cb(self, palette):
self.update_progress()
self._notify_transferred_bytes_handler = \
self.file_transfer.connect('notify::transferred_bytes',
self.__notify_transferred_bytes_cb)
def __popdown_cb(self, palette):
if self._notify_transferred_bytes_handler is not None:
self.file_transfer.disconnect(
self._notify_transferred_bytes_handler)
self._notify_transferred_bytes_handler = None
def __notify_transferred_bytes_cb(self, file_transfer, pspec):
self.update_progress()
def _format_size(self, size):
if size < 1024:
return _('%dB') % size
elif size < 1048576:
return _('%dKB') % (size / 1024)
else:
return _('%dMB') % (size / 1048576)
def update_progress(self):
logging.debug('update_progress: %r',
self.file_transfer.props.transferred_bytes)
if self.progress_bar is None:
return
self.progress_bar.props.fraction = \
self.file_transfer.props.transferred_bytes / \
float(self.file_transfer.file_size)
logging.debug('update_progress: %r', self.progress_bar.props.fraction)
transferred = self._format_size(
self.file_transfer.props.transferred_bytes)
total = self._format_size(self.file_transfer.file_size)
# TRANS: file transfer, bytes transferred, e.g. 128 of 1024
self.progress_label.props.label = _('%s of %s') % (transferred, total)
class IncomingTransferPalette(BaseTransferPalette):
"""Palette for frame or notification icon for incoming file transfers
"""
__gtype_name__ = 'SugarIncomingTransferPalette'
def __init__(self, file_transfer):
BaseTransferPalette.__init__(self, file_transfer)
self.file_transfer.connect('notify::state', self.__notify_state_cb)
nick = str(self.file_transfer.buddy.props.nick)
self.props.secondary_text = _('Transfer from %s') % (nick,)
self._update()
def __notify_state_cb(self, file_transfer, pspec):
self._update()
def _update(self):
box = PaletteMenuBox()
self.set_content(box)
box.show()
logging.debug('_update state: %r', self.file_transfer.props.state)
if self.file_transfer.props.state == filetransfer.FT_STATE_PENDING:
menu_item = PaletteMenuItem(_('Accept'))
icon = Icon(icon_name='dialog-ok',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__accept_activate_cb)
box.append_item(menu_item)
menu_item.show()
menu_item = PaletteMenuItem(_('Decline'))
icon = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__decline_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
if self.file_transfer.description:
text = self.file_transfer.description.replace('\n', ' ')
label = Gtk.Label(label=text)
label.set_max_width_chars(style.MENU_WIDTH_CHARS)
label.set_ellipsize(style.ELLIPSIZE_MODE_DEFAULT)
inner_box.add(label)
label.show()
mime_type = self.file_transfer.mime_type
type_description = mime.get_mime_description(mime_type)
size = self._format_size(self.file_transfer.file_size)
label = Gtk.Label(label='%s (%s)' % (size, type_description))
inner_box.add(label)
label.show()
elif self.file_transfer.props.state in \
[filetransfer.FT_STATE_ACCEPTED, filetransfer.FT_STATE_OPEN]:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
self.progress_bar = Gtk.ProgressBar()
inner_box.add(self.progress_bar)
self.progress_bar.show()
self.progress_label = Gtk.Label(label='')
inner_box.add(self.progress_label)
self.progress_label.show()
self.update_progress()
elif self.file_transfer.props.state == filetransfer.FT_STATE_COMPLETED:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
self.update_progress()
elif self.file_transfer.props.state == filetransfer.FT_STATE_CANCELLED:
if self.file_transfer.reason_last_change == \
filetransfer.FT_REASON_REMOTE_STOPPED:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
text = _('The other participant canceled the file transfer')
label = Gtk.Label(label=text)
inner_box.add(label)
label.show()
def __accept_activate_cb(self, menu_item):
# TODO: figure out the best place to get rid of that temp file
extension = mime.get_primary_extension(self.file_transfer.mime_type)
if extension is None:
extension = '.bin'
fd, file_path = tempfile.mkstemp(suffix=extension,
prefix=self._sanitize(
self.file_transfer.title),
dir=os.path.join(
env.get_profile_path(), 'data'))
os.close(fd)
os.unlink(file_path)
self.file_transfer.accept(file_path)
def _sanitize(self, file_name):
file_name = file_name.replace('/', '_')
file_name = file_name.replace('.', '_')
file_name = file_name.replace('?', '_')
return file_name
def __decline_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __cancel_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __dismiss_activate_cb(self, menu_item):
self.emit('dismiss-clicked')
class OutgoingTransferPalette(BaseTransferPalette):
"""Palette for frame or notification icon for outgoing file transfers
"""
__gtype_name__ = 'SugarOutgoingTransferPalette'
def __init__(self, file_transfer):
BaseTransferPalette.__init__(self, file_transfer)
self.progress_bar = None
self.progress_label = None
self.file_transfer.connect('notify::state', self.__notify_state_cb)
nick = str(file_transfer.buddy.props.nick)
self.props.secondary_text = _('Transfer to %s') % (nick,)
self._update()
def __notify_state_cb(self, file_transfer, pspec):
self._update()
def _update(self):
new_state = self.file_transfer.props.state
logging.debug('_update state: %r', new_state)
box = PaletteMenuBox()
self.set_content(box)
box.show()
if new_state == filetransfer.FT_STATE_PENDING:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
if self.file_transfer.description:
label = Gtk.Label(label=self.file_transfer.description)
inner_box.add(label)
label.show()
mime_type = self.file_transfer.mime_type
type_description = mime.get_mime_description(mime_type)
size = self._format_size(self.file_transfer.file_size)
label = Gtk.Label(label='%s (%s)' % (size, type_description))
inner_box.add(label)
label.show()
elif new_state in [filetransfer.FT_STATE_ACCEPTED,
filetransfer.FT_STATE_OPEN]:
menu_item = PaletteMenuItem(_('Cancel'))
icon = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__cancel_activate_cb)
box.append_item(menu_item)
menu_item.show()
separator = PaletteMenuItemSeparator()
box.append_item(separator)
separator.show()
inner_box = Gtk.VBox()
inner_box.set_spacing(style.DEFAULT_PADDING)
box.append_item(inner_box, vertical_padding=0)
inner_box.show()
self.progress_bar = Gtk.ProgressBar()
inner_box.add(self.progress_bar)
self.progress_bar.show()
self.progress_label = Gtk.Label(label='')
inner_box.add(self.progress_label)
self.progress_label.show()
self.update_progress()
elif new_state in [filetransfer.FT_STATE_COMPLETED,
filetransfer.FT_STATE_CANCELLED]:
menu_item = PaletteMenuItem(_('Dismiss'))
icon = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
menu_item.set_image(icon)
icon.show()
menu_item.connect('activate', self.__dismiss_activate_cb)
box.append_item(menu_item)
menu_item.show()
self.update_progress()
def __cancel_activate_cb(self, menu_item):
self.file_transfer.cancel()
def __dismiss_activate_cb(self, menu_item):
self.emit('dismiss-clicked')
| gpl-2.0 |
zx8/youtube-dl | youtube_dl/extractor/washingtonpost.py | 79 | 5626 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
strip_jsonp,
)
class WashingtonPostIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?washingtonpost\.com/.*?/(?P<id>[^/]+)/(?:$|[?#])'
_TESTS = [{
'url': 'http://www.washingtonpost.com/sf/national/2014/03/22/sinkhole-of-bureaucracy/',
'info_dict': {
'id': 'sinkhole-of-bureaucracy',
'title': 'Sinkhole of bureaucracy',
},
'playlist': [{
'md5': '79132cc09ec5309fa590ae46e4cc31bc',
'info_dict': {
'id': 'fc433c38-b146-11e3-b8b3-44b1d1cd4c1f',
'ext': 'mp4',
'title': 'Breaking Points: The Paper Mine',
'duration': 1287,
'description': 'Overly complicated paper pushing is nothing new to government bureaucracy. But the way federal retirement applications are filed may be the most outdated. David Fahrenthold explains.',
'uploader': 'The Washington Post',
'timestamp': 1395527908,
'upload_date': '20140322',
},
}, {
'md5': 'e1d5734c06865cc504ad99dc2de0d443',
'info_dict': {
'id': '41255e28-b14a-11e3-b8b3-44b1d1cd4c1f',
'ext': 'mp4',
'title': 'The town bureaucracy sustains',
'description': 'Underneath the friendly town of Boyers is a sea of government paperwork. In a disused limestone mine, hundreds of locals now track, file and process retirement applications for the federal government. We set out to find out what it\'s like to do paperwork 230 feet underground.',
'duration': 2217,
'timestamp': 1395528005,
'upload_date': '20140322',
'uploader': 'The Washington Post',
},
}],
}, {
'url': 'http://www.washingtonpost.com/blogs/wonkblog/wp/2014/12/31/one-airline-figured-out-how-to-make-sure-its-airplanes-never-disappear/',
'info_dict': {
'id': 'one-airline-figured-out-how-to-make-sure-its-airplanes-never-disappear',
'title': 'One airline figured out how to make sure its airplanes never disappear',
},
'playlist': [{
'md5': 'a7c1b5634ba5e57a6a82cdffa5b1e0d0',
'info_dict': {
'id': '0e4bb54c-9065-11e4-a66f-0ca5037a597d',
'ext': 'mp4',
'description': 'Washington Post transportation reporter Ashley Halsey III explains why a plane\'s black box needs to be recovered from a crash site instead of having its information streamed in real time throughout the flight.',
'upload_date': '20141230',
'uploader': 'The Washington Post',
'timestamp': 1419974765,
'title': 'Why black boxes don’t transmit data in real time',
}
}]
}]
def _real_extract(self, url):
page_id = self._match_id(url)
webpage = self._download_webpage(url, page_id)
title = self._og_search_title(webpage)
uuids = re.findall(r'''(?x)
(?:
<div\s+class="posttv-video-embed[^>]*?data-uuid=|
data-video-uuid=
)"([^"]+)"''', webpage)
entries = []
for i, uuid in enumerate(uuids, start=1):
vinfo_all = self._download_json(
'http://www.washingtonpost.com/posttv/c/videojson/%s?resType=jsonp' % uuid,
page_id,
transform_source=strip_jsonp,
note='Downloading information of video %d/%d' % (i, len(uuids))
)
vinfo = vinfo_all[0]['contentConfig']
uploader = vinfo.get('credits', {}).get('source')
timestamp = int_or_none(
vinfo.get('dateConfig', {}).get('dateFirstPublished'), 1000)
formats = [{
'format_id': (
'%s-%s-%s' % (s.get('type'), s.get('width'), s.get('bitrate'))
if s.get('width')
else s.get('type')),
'vbr': s.get('bitrate') if s.get('width') != 0 else None,
'width': s.get('width'),
'height': s.get('height'),
'acodec': s.get('audioCodec'),
'vcodec': s.get('videoCodec') if s.get('width') != 0 else 'none',
'filesize': s.get('fileSize'),
'url': s.get('url'),
'ext': 'mp4',
'preference': -100 if s.get('type') == 'smil' else None,
'protocol': {
'MP4': 'http',
'F4F': 'f4m',
}.get(s.get('type')),
} for s in vinfo.get('streams', [])]
source_media_url = vinfo.get('sourceMediaURL')
if source_media_url:
formats.append({
'format_id': 'source_media',
'url': source_media_url,
})
self._sort_formats(formats)
entries.append({
'id': uuid,
'title': vinfo['title'],
'description': vinfo.get('blurb'),
'uploader': uploader,
'formats': formats,
'duration': int_or_none(vinfo.get('videoDuration'), 100),
'timestamp': timestamp,
})
return {
'_type': 'playlist',
'entries': entries,
'id': page_id,
'title': title,
}
| unlicense |
thaim/ansible | lib/ansible/module_utils/network/ios/config/lag_interfaces/lag_interfaces.py | 19 | 11580 | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The ios_lag_interfaces class
It is in this file where the current configuration (as dict)
is compared to the provided configuration (as dict) and the command set
necessary to bring the current configuration to it's desired end-state is
created
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
from ansible.module_utils.network.common import utils
from ansible.module_utils.network.common.cfg.base import ConfigBase
from ansible.module_utils.network.common.utils import to_list
from ansible.module_utils.network.ios.facts.facts import Facts
from ansible.module_utils.network.ios.utils.utils import dict_to_set
from ansible.module_utils.network.ios.utils.utils import filter_dict_having_none_value, remove_duplicate_interface
class Lag_interfaces(ConfigBase):
"""
The ios_lag_interfaces class
"""
gather_subset = [
'!all',
'!min',
]
gather_network_resources = [
'lag_interfaces',
]
def __init__(self, module):
super(Lag_interfaces, self).__init__(module)
def get_lag_interfaces_facts(self):
""" Get the 'facts' (the current configuration)
:rtype: A dictionary
:returns: The current configuration as a dictionary
"""
facts, _warnings = Facts(self._module).get_facts(self.gather_subset, self.gather_network_resources)
lag_interfaces_facts = facts['ansible_network_resources'].get('lag_interfaces')
if not lag_interfaces_facts:
return []
return lag_interfaces_facts
def execute_module(self):
""" Execute the module
:rtype: A dictionary
:returns: The result from module execution
"""
result = {'changed': False}
commands = list()
warnings = list()
existing_lag_interfaces_facts = self.get_lag_interfaces_facts()
commands.extend(self.set_config(existing_lag_interfaces_facts))
if commands:
if not self._module.check_mode:
self._connection.edit_config(commands)
result['changed'] = True
result['commands'] = commands
changed_lag_interfaces_facts = self.get_lag_interfaces_facts()
result['before'] = existing_lag_interfaces_facts
if result['changed']:
result['after'] = changed_lag_interfaces_facts
result['warnings'] = warnings
return result
def set_config(self, existing_lag_interfaces_facts):
""" Collect the configuration from the args passed to the module,
collect the current configuration (as a dict from facts)
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
want = self._module.params['config']
have = existing_lag_interfaces_facts
resp = self.set_state(want, have)
return to_list(resp)
def set_state(self, want, have):
""" Select the appropriate function based on the state provided
:param want: the desired configuration as a dictionary
:param have: the current configuration as a dictionary
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
state = self._module.params['state']
if state in ('overridden', 'merged', 'replaced') and not want:
self._module.fail_json(msg='value of config parameter must not be empty for state {0}'.format(state))
module = self._module
if state == 'overridden':
commands = self._state_overridden(want, have, module)
elif state == 'deleted':
commands = self._state_deleted(want, have)
elif state == 'merged':
commands = self._state_merged(want, have, module)
elif state == 'replaced':
commands = self._state_replaced(want, have, module)
return commands
def _state_replaced(self, want, have, module):
""" The command generator when state is replaced
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
for interface in want:
for each_interface in interface.get('members'):
for each in have:
if each.get('members'):
for every in each.get('members'):
match = False
if every['member'] == each_interface['member']:
match = True
break
else:
continue
if match:
have_dict = filter_dict_having_none_value(interface, each)
commands.extend(self._clear_config(dict(), have_dict))
commands.extend(self._set_config(interface, each, module))
elif each.get('name') == each_interface['member']:
have_dict = filter_dict_having_none_value(interface, each)
commands.extend(self._clear_config(dict(), have_dict))
commands.extend(self._set_config(interface, each, module))
break
# Remove the duplicate interface call
commands = remove_duplicate_interface(commands)
return commands
def _state_overridden(self, want, have, module):
""" The command generator when state is overridden
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
for interface in want:
for each_interface in interface.get('members'):
for each in have:
if each.get('members'):
for every in each.get('members'):
match = False
if every['member'] == each_interface['member']:
match = True
break
else:
commands.extend(self._clear_config(interface, each))
continue
if match:
have_dict = filter_dict_having_none_value(interface, each)
commands.extend(self._clear_config(dict(), have_dict))
commands.extend(self._set_config(interface, each, module))
elif each.get('name') == each_interface['member']:
have_dict = filter_dict_having_none_value(interface, each)
commands.extend(self._clear_config(dict(), have_dict))
commands.extend(self._set_config(interface, each, module))
break
# Remove the duplicate interface call
commands = remove_duplicate_interface(commands)
return commands
def _state_merged(self, want, have, module):
""" The command generator when state is merged
:rtype: A list
:returns: the commands necessary to merge the provided into
the current configuration
"""
commands = []
for interface in want:
for each_interface in interface.get('members'):
for each in have:
if each.get('members'):
for every in each.get('members'):
if every['member'] == each_interface['member']:
break
elif each.get('name') == each_interface['member']:
break
else:
continue
commands.extend(self._set_config(interface, each, module))
return commands
def _state_deleted(self, want, have):
""" The command generator when state is deleted
:rtype: A list
:returns: the commands necessary to remove the current configuration
of the provided objects
"""
commands = []
if want:
for interface in want:
for each in have:
if each.get('name') == interface['name']:
break
else:
continue
commands.extend(self._clear_config(interface, each))
else:
for each in have:
commands.extend(self._clear_config(dict(), each))
return commands
def remove_command_from_config_list(self, interface, cmd, commands):
# To delete the passed config
if interface not in commands:
commands.append(interface)
commands.append('no %s' % cmd)
return commands
def add_command_to_config_list(self, interface, cmd, commands):
# To set the passed config
if interface not in commands:
commands.append(interface)
commands.append(cmd)
return commands
def _set_config(self, want, have, module):
# Set the interface config based on the want and have config
commands = []
# To remove keys with None values from want dict
want = utils.remove_empties(want)
# Get the diff b/w want and have
want_dict = dict_to_set(want)
have_dict = dict_to_set(have)
diff = want_dict - have_dict
# To get the channel-id from lag port-channel name
lag_config = dict(diff).get('members')
channel_name = re.search(r'(\d+)', want.get('name'))
if channel_name:
channel_id = channel_name.group()
else:
module.fail_json(msg="Lag Interface Name is not correct!")
if lag_config:
for each in lag_config:
each = dict(each)
each_interface = 'interface {0}'.format(each.get('member'))
if have.get('name') == want['members'][0]['member'] or have.get('name').lower().startswith('po'):
if each.get('mode'):
cmd = 'channel-group {0} mode {1}'.format(channel_id, each.get('mode'))
self.add_command_to_config_list(each_interface, cmd, commands)
elif each.get('link'):
cmd = 'channel-group {0} link {1}'.format(channel_id, each.get('link'))
self.add_command_to_config_list(each_interface, cmd, commands)
return commands
def _clear_config(self, want, have):
# Delete the interface config based on the want and have config
commands = []
if have.get('members'):
for each in have['members']:
interface = 'interface ' + each['member']
if want.get('members'):
if each.get('member') and each.get('member') != want['members'][0]['member']:
self.remove_command_from_config_list(interface, 'channel-group', commands)
elif each.get('member'):
self.remove_command_from_config_list(interface, 'channel-group', commands)
return commands
| mit |
goddardl/gaffer | python/GafferTest/Transform2DPlugTest.py | 2 | 4099 | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of Image Engine Design nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import Gaffer
import GafferTest
class Transform2DPlugTest( GafferTest.TestCase ) :
def testMatrix( self ) :
p = Gaffer.Transform2DPlug()
p["pivot"].setValue( IECore.V2f( 1, 1 ) )
p["translate"].setValue( IECore.V2f( 1, 2 ) )
p["rotate"].setValue( 45 )
p["scale"].setValue( IECore.V2f( 2, 3 ) )
pivotValue = p["pivot"].getValue()
pivot = IECore.M33f.createTranslated( pivotValue )
translateValue = p["translate"].getValue()
translate = IECore.M33f.createTranslated( translateValue )
rotate = IECore.M33f.createRotated( IECore.degreesToRadians( p["rotate"].getValue() ) )
scale = IECore.M33f.createScaled( p["scale"].getValue() )
invPivot = IECore.M33f.createTranslated( pivotValue * IECore.V2f(-1.) )
transforms = {
"p" : pivot,
"t" : translate,
"r" : rotate,
"s" : scale,
"pi" : invPivot,
}
transform = IECore.M33f()
for m in ( "pi", "s", "r", "t", "p" ) :
transform = transform * transforms[m]
self.assertEqual( p.matrix(), transform )
def testTransformOrderExplicit( self ) :
plug = Gaffer.Transform2DPlug()
displayWindow = IECore.Box2i( IECore.V2i(0), IECore.V2i(9) )
pixelAspect = 1.
t = IECore.V2f( 100, 0 )
r = 90
s = IECore.V2f( 2, 2 )
p = IECore.V2f( 10, -10 )
plug["translate"].setValue( t )
plug["rotate"].setValue( r )
plug["scale"].setValue( s )
plug["pivot"].setValue( p )
# Test if this is equal to a simple hardcoded matrix, down to floating point error
# This verifies that translation is not being affected by rotation and scale,
# which is what users will expect
self.assertTrue( plug.matrix().equalWithAbsError(
IECore.M33f(
0, 2, 0,
-2, 0, 0,
90, -30, 1),
2e-6
) )
def testCreateCounterpart( self ) :
t = Gaffer.Transform2DPlug()
t2 = t.createCounterpart( "a", Gaffer.Plug.Direction.Out )
self.assertEqual( t2.getName(), "a" )
self.assertEqual( t2.direction(), Gaffer.Plug.Direction.Out )
self.assertTrue( isinstance( t2, Gaffer.Transform2DPlug ) )
def testRunTimeTyped( self ) :
p = Gaffer.Transform2DPlug()
self.failIf( p.typeId() == Gaffer.CompoundPlug.staticTypeId() )
self.failUnless( p.isInstanceOf( Gaffer.CompoundPlug.staticTypeId() ) )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
rplauche/plugin.video.netflixbmc | resources/lib/pyasn1/codec/cer/encoder.py | 226 | 3161 | # CER encoder
from pyasn1.type import univ
from pyasn1.codec.ber import encoder
from pyasn1.compat.octets import int2oct, null
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if client == 0:
substrate = int2oct(0)
else:
substrate = int2oct(255)
return substrate, 0
class BitStringEncoder(encoder.BitStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.BitStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class OctetStringEncoder(encoder.OctetStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
# specialized RealEncoder here
# specialized GeneralStringEncoder here
# specialized GeneralizedTimeEncoder here
# specialized UTCTimeEncoder here
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if isinstance(client, univ.SequenceAndSetBase):
client.setDefaultComponents()
client.verifySizeSpec()
substrate = null; idx = len(client)
# This is certainly a hack but how else do I distinguish SetOf
# from Set if they have the same tags&constraints?
if isinstance(client, univ.SequenceAndSetBase):
# Set
comps = []
while idx > 0:
idx = idx - 1
if client[idx] is None: # Optional component
continue
if client.getDefaultComponentByPosition(idx) == client[idx]:
continue
comps.append(client[idx])
comps.sort(key=lambda x: isinstance(x, univ.Choice) and \
x.getMinTagSet() or x.getTagSet())
for c in comps:
substrate += encodeFun(c, defMode, maxChunkSize)
else:
# SetOf
compSubs = []
while idx > 0:
idx = idx - 1
compSubs.append(
encodeFun(client[idx], defMode, maxChunkSize)
)
compSubs.sort() # perhaps padding's not needed
substrate = null
for compSub in compSubs:
substrate += compSub
return substrate, 1
tagMap = encoder.tagMap.copy()
tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Set.typeId: SetOfEncoder(),
univ.SetOf.typeId: SetOfEncoder()
})
class Encoder(encoder.Encoder):
def __call__(self, client, defMode=0, maxChunkSize=0):
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
encode = Encoder(tagMap, typeMap)
# EncoderFactory queries class instance and builds a map of tags -> encoders
| gpl-2.0 |
agry/NGECore2 | scripts/mobiles/corellia/ragtag_blackjack.py | 2 | 1818 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('corellia_ragtag_blackjack')
mobileTemplate.setLevel(13)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("ragtag gang")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(True)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_dressed_criminal_thug_human_female_01.iff')
templates.add('object/mobile/shared_dressed_criminal_thug_human_female_02.iff')
templates.add('object/mobile/shared_dressed_criminal_thug_human_male_01.iff')
templates.add('object/mobile/shared_dressed_criminal_thug_human_male_02.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/carbine/shared_carbine_cdef.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
lootPoolNames_1 = ['Junk']
lootPoolChances_1 = [100]
lootGroupChance_1 = 100
mobileTemplate.addToLootGroups(lootPoolNames_1,lootPoolChances_1,lootGroupChance_1)
core.spawnService.addMobileTemplate('ragtag_blackjack', mobileTemplate)
return | lgpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.