repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
lokirius/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/persisted/journal/picklelog.py
|
64
|
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
# -*- test-case-name: twisted.test.test_journal -*-
"""Logging that uses pickles.
TODO: add log that logs to a file.
"""
# twisted imports
from twisted.persisted import dirdbm
from twisted.internet import defer
from zope.interface import implements
# sibling imports
import base
class DirDBMLog:
"""Log pickles to DirDBM directory."""
implements(base.ICommandLog)
def __init__(self, logPath):
self.db = dirdbm.Shelf(logPath)
indexs = map(int, self.db.keys())
if indexs:
self.currentIndex = max(indexs)
else:
self.currentIndex = 0
def logCommand(self, command, time):
"""Log a command."""
self.currentIndex += 1
self.db[str(self.currentIndex)] = (time, command)
return defer.succeed(1)
def getCurrentIndex(self):
"""Return index of last command logged."""
return self.currentIndex
def getCommandsSince(self, index):
result = []
for i in range(index, self.currentIndex + 1):
result.append(self.db[str(i)])
return result
|
40223231/2015cd_midterm2
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/unittest/test/testmock/testhelpers.py
|
737
|
import unittest
from unittest.mock import (
call, _Call, create_autospec, MagicMock,
Mock, ANY, _CallList, patch, PropertyMock
)
from datetime import datetime
class SomeClass(object):
def one(self, a, b):
pass
def two(self):
pass
def three(self, a=None):
pass
class AnyTest(unittest.TestCase):
def test_any(self):
self.assertEqual(ANY, object())
mock = Mock()
mock(ANY)
mock.assert_called_with(ANY)
mock = Mock()
mock(foo=ANY)
mock.assert_called_with(foo=ANY)
def test_repr(self):
self.assertEqual(repr(ANY), '<ANY>')
self.assertEqual(str(ANY), '<ANY>')
def test_any_and_datetime(self):
mock = Mock()
mock(datetime.now(), foo=datetime.now())
mock.assert_called_with(ANY, foo=ANY)
def test_any_mock_calls_comparison_order(self):
mock = Mock()
d = datetime.now()
class Foo(object):
def __eq__(self, other):
return False
def __ne__(self, other):
return True
for d in datetime.now(), Foo():
mock.reset_mock()
mock(d, foo=d, bar=d)
mock.method(d, zinga=d, alpha=d)
mock().method(a1=d, z99=d)
expected = [
call(ANY, foo=ANY, bar=ANY),
call.method(ANY, zinga=ANY, alpha=ANY),
call(), call().method(a1=ANY, z99=ANY)
]
self.assertEqual(expected, mock.mock_calls)
self.assertEqual(mock.mock_calls, expected)
class CallTest(unittest.TestCase):
def test_call_with_call(self):
kall = _Call()
self.assertEqual(kall, _Call())
self.assertEqual(kall, _Call(('',)))
self.assertEqual(kall, _Call(((),)))
self.assertEqual(kall, _Call(({},)))
self.assertEqual(kall, _Call(('', ())))
self.assertEqual(kall, _Call(('', {})))
self.assertEqual(kall, _Call(('', (), {})))
self.assertEqual(kall, _Call(('foo',)))
self.assertEqual(kall, _Call(('bar', ())))
self.assertEqual(kall, _Call(('baz', {})))
self.assertEqual(kall, _Call(('spam', (), {})))
kall = _Call(((1, 2, 3),))
self.assertEqual(kall, _Call(((1, 2, 3),)))
self.assertEqual(kall, _Call(('', (1, 2, 3))))
self.assertEqual(kall, _Call(((1, 2, 3), {})))
self.assertEqual(kall, _Call(('', (1, 2, 3), {})))
kall = _Call(((1, 2, 4),))
self.assertNotEqual(kall, _Call(('', (1, 2, 3))))
self.assertNotEqual(kall, _Call(('', (1, 2, 3), {})))
kall = _Call(('foo', (1, 2, 4),))
self.assertNotEqual(kall, _Call(('', (1, 2, 4))))
self.assertNotEqual(kall, _Call(('', (1, 2, 4), {})))
self.assertNotEqual(kall, _Call(('bar', (1, 2, 4))))
self.assertNotEqual(kall, _Call(('bar', (1, 2, 4), {})))
kall = _Call(({'a': 3},))
self.assertEqual(kall, _Call(('', (), {'a': 3})))
self.assertEqual(kall, _Call(('', {'a': 3})))
self.assertEqual(kall, _Call(((), {'a': 3})))
self.assertEqual(kall, _Call(({'a': 3},)))
def test_empty__Call(self):
args = _Call()
self.assertEqual(args, ())
self.assertEqual(args, ('foo',))
self.assertEqual(args, ((),))
self.assertEqual(args, ('foo', ()))
self.assertEqual(args, ('foo',(), {}))
self.assertEqual(args, ('foo', {}))
self.assertEqual(args, ({},))
def test_named_empty_call(self):
args = _Call(('foo', (), {}))
self.assertEqual(args, ('foo',))
self.assertEqual(args, ('foo', ()))
self.assertEqual(args, ('foo',(), {}))
self.assertEqual(args, ('foo', {}))
self.assertNotEqual(args, ((),))
self.assertNotEqual(args, ())
self.assertNotEqual(args, ({},))
self.assertNotEqual(args, ('bar',))
self.assertNotEqual(args, ('bar', ()))
self.assertNotEqual(args, ('bar', {}))
def test_call_with_args(self):
args = _Call(((1, 2, 3), {}))
self.assertEqual(args, ((1, 2, 3),))
self.assertEqual(args, ('foo', (1, 2, 3)))
self.assertEqual(args, ('foo', (1, 2, 3), {}))
self.assertEqual(args, ((1, 2, 3), {}))
def test_named_call_with_args(self):
args = _Call(('foo', (1, 2, 3), {}))
self.assertEqual(args, ('foo', (1, 2, 3)))
self.assertEqual(args, ('foo', (1, 2, 3), {}))
self.assertNotEqual(args, ((1, 2, 3),))
self.assertNotEqual(args, ((1, 2, 3), {}))
def test_call_with_kwargs(self):
args = _Call(((), dict(a=3, b=4)))
self.assertEqual(args, (dict(a=3, b=4),))
self.assertEqual(args, ('foo', dict(a=3, b=4)))
self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
self.assertEqual(args, ((), dict(a=3, b=4)))
def test_named_call_with_kwargs(self):
args = _Call(('foo', (), dict(a=3, b=4)))
self.assertEqual(args, ('foo', dict(a=3, b=4)))
self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
self.assertNotEqual(args, (dict(a=3, b=4),))
self.assertNotEqual(args, ((), dict(a=3, b=4)))
def test_call_with_args_call_empty_name(self):
args = _Call(((1, 2, 3), {}))
self.assertEqual(args, call(1, 2, 3))
self.assertEqual(call(1, 2, 3), args)
self.assertTrue(call(1, 2, 3) in [args])
def test_call_ne(self):
self.assertNotEqual(_Call(((1, 2, 3),)), call(1, 2))
self.assertFalse(_Call(((1, 2, 3),)) != call(1, 2, 3))
self.assertTrue(_Call(((1, 2), {})) != call(1, 2, 3))
def test_call_non_tuples(self):
kall = _Call(((1, 2, 3),))
for value in 1, None, self, int:
self.assertNotEqual(kall, value)
self.assertFalse(kall == value)
def test_repr(self):
self.assertEqual(repr(_Call()), 'call()')
self.assertEqual(repr(_Call(('foo',))), 'call.foo()')
self.assertEqual(repr(_Call(((1, 2, 3), {'a': 'b'}))),
"call(1, 2, 3, a='b')")
self.assertEqual(repr(_Call(('bar', (1, 2, 3), {'a': 'b'}))),
"call.bar(1, 2, 3, a='b')")
self.assertEqual(repr(call), 'call')
self.assertEqual(str(call), 'call')
self.assertEqual(repr(call()), 'call()')
self.assertEqual(repr(call(1)), 'call(1)')
self.assertEqual(repr(call(zz='thing')), "call(zz='thing')")
self.assertEqual(repr(call().foo), 'call().foo')
self.assertEqual(repr(call(1).foo.bar(a=3).bing),
'call().foo.bar().bing')
self.assertEqual(
repr(call().foo(1, 2, a=3)),
"call().foo(1, 2, a=3)"
)
self.assertEqual(repr(call()()), "call()()")
self.assertEqual(repr(call(1)(2)), "call()(2)")
self.assertEqual(
repr(call()().bar().baz.beep(1)),
"call()().bar().baz.beep(1)"
)
def test_call(self):
self.assertEqual(call(), ('', (), {}))
self.assertEqual(call('foo', 'bar', one=3, two=4),
('', ('foo', 'bar'), {'one': 3, 'two': 4}))
mock = Mock()
mock(1, 2, 3)
mock(a=3, b=6)
self.assertEqual(mock.call_args_list,
[call(1, 2, 3), call(a=3, b=6)])
def test_attribute_call(self):
self.assertEqual(call.foo(1), ('foo', (1,), {}))
self.assertEqual(call.bar.baz(fish='eggs'),
('bar.baz', (), {'fish': 'eggs'}))
mock = Mock()
mock.foo(1, 2 ,3)
mock.bar.baz(a=3, b=6)
self.assertEqual(mock.method_calls,
[call.foo(1, 2, 3), call.bar.baz(a=3, b=6)])
def test_extended_call(self):
result = call(1).foo(2).bar(3, a=4)
self.assertEqual(result, ('().foo().bar', (3,), dict(a=4)))
mock = MagicMock()
mock(1, 2, a=3, b=4)
self.assertEqual(mock.call_args, call(1, 2, a=3, b=4))
self.assertNotEqual(mock.call_args, call(1, 2, 3))
self.assertEqual(mock.call_args_list, [call(1, 2, a=3, b=4)])
self.assertEqual(mock.mock_calls, [call(1, 2, a=3, b=4)])
mock = MagicMock()
mock.foo(1).bar()().baz.beep(a=6)
last_call = call.foo(1).bar()().baz.beep(a=6)
self.assertEqual(mock.mock_calls[-1], last_call)
self.assertEqual(mock.mock_calls, last_call.call_list())
def test_call_list(self):
mock = MagicMock()
mock(1)
self.assertEqual(call(1).call_list(), mock.mock_calls)
mock = MagicMock()
mock(1).method(2)
self.assertEqual(call(1).method(2).call_list(),
mock.mock_calls)
mock = MagicMock()
mock(1).method(2)(3)
self.assertEqual(call(1).method(2)(3).call_list(),
mock.mock_calls)
mock = MagicMock()
int(mock(1).method(2)(3).foo.bar.baz(4)(5))
kall = call(1).method(2)(3).foo.bar.baz(4)(5).__int__()
self.assertEqual(kall.call_list(), mock.mock_calls)
def test_call_any(self):
self.assertEqual(call, ANY)
m = MagicMock()
int(m)
self.assertEqual(m.mock_calls, [ANY])
self.assertEqual([ANY], m.mock_calls)
def test_two_args_call(self):
args = _Call(((1, 2), {'a': 3}), two=True)
self.assertEqual(len(args), 2)
self.assertEqual(args[0], (1, 2))
self.assertEqual(args[1], {'a': 3})
other_args = _Call(((1, 2), {'a': 3}))
self.assertEqual(args, other_args)
class SpecSignatureTest(unittest.TestCase):
def _check_someclass_mock(self, mock):
self.assertRaises(AttributeError, getattr, mock, 'foo')
mock.one(1, 2)
mock.one.assert_called_with(1, 2)
self.assertRaises(AssertionError,
mock.one.assert_called_with, 3, 4)
self.assertRaises(TypeError, mock.one, 1)
mock.two()
mock.two.assert_called_with()
self.assertRaises(AssertionError,
mock.two.assert_called_with, 3)
self.assertRaises(TypeError, mock.two, 1)
mock.three()
mock.three.assert_called_with()
self.assertRaises(AssertionError,
mock.three.assert_called_with, 3)
self.assertRaises(TypeError, mock.three, 3, 2)
mock.three(1)
mock.three.assert_called_with(1)
mock.three(a=1)
mock.three.assert_called_with(a=1)
def test_basic(self):
for spec in (SomeClass, SomeClass()):
mock = create_autospec(spec)
self._check_someclass_mock(mock)
def test_create_autospec_return_value(self):
def f():
pass
mock = create_autospec(f, return_value='foo')
self.assertEqual(mock(), 'foo')
class Foo(object):
pass
mock = create_autospec(Foo, return_value='foo')
self.assertEqual(mock(), 'foo')
def test_autospec_reset_mock(self):
m = create_autospec(int)
int(m)
m.reset_mock()
self.assertEqual(m.__int__.call_count, 0)
def test_mocking_unbound_methods(self):
class Foo(object):
def foo(self, foo):
pass
p = patch.object(Foo, 'foo')
mock_foo = p.start()
Foo().foo(1)
mock_foo.assert_called_with(1)
def test_create_autospec_unbound_methods(self):
# see mock issue 128
# this is expected to fail until the issue is fixed
return
class Foo(object):
def foo(self):
pass
klass = create_autospec(Foo)
instance = klass()
self.assertRaises(TypeError, instance.foo, 1)
# Note: no type checking on the "self" parameter
klass.foo(1)
klass.foo.assert_called_with(1)
self.assertRaises(TypeError, klass.foo)
def test_create_autospec_keyword_arguments(self):
class Foo(object):
a = 3
m = create_autospec(Foo, a='3')
self.assertEqual(m.a, '3')
def test_create_autospec_keyword_only_arguments(self):
def foo(a, *, b=None):
pass
m = create_autospec(foo)
m(1)
m.assert_called_with(1)
self.assertRaises(TypeError, m, 1, 2)
m(2, b=3)
m.assert_called_with(2, b=3)
def test_function_as_instance_attribute(self):
obj = SomeClass()
def f(a):
pass
obj.f = f
mock = create_autospec(obj)
mock.f('bing')
mock.f.assert_called_with('bing')
def test_spec_as_list(self):
# because spec as a list of strings in the mock constructor means
# something very different we treat a list instance as the type.
mock = create_autospec([])
mock.append('foo')
mock.append.assert_called_with('foo')
self.assertRaises(AttributeError, getattr, mock, 'foo')
class Foo(object):
foo = []
mock = create_autospec(Foo)
mock.foo.append(3)
mock.foo.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.foo, 'foo')
def test_attributes(self):
class Sub(SomeClass):
attr = SomeClass()
sub_mock = create_autospec(Sub)
for mock in (sub_mock, sub_mock.attr):
self._check_someclass_mock(mock)
def test_builtin_functions_types(self):
# we could replace builtin functions / methods with a function
# with *args / **kwargs signature. Using the builtin method type
# as a spec seems to work fairly well though.
class BuiltinSubclass(list):
def bar(self, arg):
pass
sorted = sorted
attr = {}
mock = create_autospec(BuiltinSubclass)
mock.append(3)
mock.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.append, 'foo')
mock.bar('foo')
mock.bar.assert_called_with('foo')
self.assertRaises(TypeError, mock.bar, 'foo', 'bar')
self.assertRaises(AttributeError, getattr, mock.bar, 'foo')
mock.sorted([1, 2])
mock.sorted.assert_called_with([1, 2])
self.assertRaises(AttributeError, getattr, mock.sorted, 'foo')
mock.attr.pop(3)
mock.attr.pop.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.attr, 'foo')
def test_method_calls(self):
class Sub(SomeClass):
attr = SomeClass()
mock = create_autospec(Sub)
mock.one(1, 2)
mock.two()
mock.three(3)
expected = [call.one(1, 2), call.two(), call.three(3)]
self.assertEqual(mock.method_calls, expected)
mock.attr.one(1, 2)
mock.attr.two()
mock.attr.three(3)
expected.extend(
[call.attr.one(1, 2), call.attr.two(), call.attr.three(3)]
)
self.assertEqual(mock.method_calls, expected)
def test_magic_methods(self):
class BuiltinSubclass(list):
attr = {}
mock = create_autospec(BuiltinSubclass)
self.assertEqual(list(mock), [])
self.assertRaises(TypeError, int, mock)
self.assertRaises(TypeError, int, mock.attr)
self.assertEqual(list(mock), [])
self.assertIsInstance(mock['foo'], MagicMock)
self.assertIsInstance(mock.attr['foo'], MagicMock)
def test_spec_set(self):
class Sub(SomeClass):
attr = SomeClass()
for spec in (Sub, Sub()):
mock = create_autospec(spec, spec_set=True)
self._check_someclass_mock(mock)
self.assertRaises(AttributeError, setattr, mock, 'foo', 'bar')
self.assertRaises(AttributeError, setattr, mock.attr, 'foo', 'bar')
def test_descriptors(self):
class Foo(object):
@classmethod
def f(cls, a, b):
pass
@staticmethod
def g(a, b):
pass
class Bar(Foo):
pass
class Baz(SomeClass, Bar):
pass
for spec in (Foo, Foo(), Bar, Bar(), Baz, Baz()):
mock = create_autospec(spec)
mock.f(1, 2)
mock.f.assert_called_once_with(1, 2)
mock.g(3, 4)
mock.g.assert_called_once_with(3, 4)
def test_recursive(self):
class A(object):
def a(self):
pass
foo = 'foo bar baz'
bar = foo
A.B = A
mock = create_autospec(A)
mock()
self.assertFalse(mock.B.called)
mock.a()
mock.B.a()
self.assertEqual(mock.method_calls, [call.a(), call.B.a()])
self.assertIs(A.foo, A.bar)
self.assertIsNot(mock.foo, mock.bar)
mock.foo.lower()
self.assertRaises(AssertionError, mock.bar.lower.assert_called_with)
def test_spec_inheritance_for_classes(self):
class Foo(object):
def a(self):
pass
class Bar(object):
def f(self):
pass
class_mock = create_autospec(Foo)
self.assertIsNot(class_mock, class_mock())
for this_mock in class_mock, class_mock():
this_mock.a()
this_mock.a.assert_called_with()
self.assertRaises(TypeError, this_mock.a, 'foo')
self.assertRaises(AttributeError, getattr, this_mock, 'b')
instance_mock = create_autospec(Foo())
instance_mock.a()
instance_mock.a.assert_called_with()
self.assertRaises(TypeError, instance_mock.a, 'foo')
self.assertRaises(AttributeError, getattr, instance_mock, 'b')
# The return value isn't isn't callable
self.assertRaises(TypeError, instance_mock)
instance_mock.Bar.f()
instance_mock.Bar.f.assert_called_with()
self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g')
instance_mock.Bar().f()
instance_mock.Bar().f.assert_called_with()
self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g')
def test_inherit(self):
class Foo(object):
a = 3
Foo.Foo = Foo
# class
mock = create_autospec(Foo)
instance = mock()
self.assertRaises(AttributeError, getattr, instance, 'b')
attr_instance = mock.Foo()
self.assertRaises(AttributeError, getattr, attr_instance, 'b')
# instance
mock = create_autospec(Foo())
self.assertRaises(AttributeError, getattr, mock, 'b')
self.assertRaises(TypeError, mock)
# attribute instance
call_result = mock.Foo()
self.assertRaises(AttributeError, getattr, call_result, 'b')
def test_builtins(self):
# used to fail with infinite recursion
create_autospec(1)
create_autospec(int)
create_autospec('foo')
create_autospec(str)
create_autospec({})
create_autospec(dict)
create_autospec([])
create_autospec(list)
create_autospec(set())
create_autospec(set)
create_autospec(1.0)
create_autospec(float)
create_autospec(1j)
create_autospec(complex)
create_autospec(False)
create_autospec(True)
def test_function(self):
def f(a, b):
pass
mock = create_autospec(f)
self.assertRaises(TypeError, mock)
mock(1, 2)
mock.assert_called_with(1, 2)
f.f = f
mock = create_autospec(f)
self.assertRaises(TypeError, mock.f)
mock.f(3, 4)
mock.f.assert_called_with(3, 4)
def test_skip_attributeerrors(self):
class Raiser(object):
def __get__(self, obj, type=None):
if obj is None:
raise AttributeError('Can only be accessed via an instance')
class RaiserClass(object):
raiser = Raiser()
@staticmethod
def existing(a, b):
return a + b
s = create_autospec(RaiserClass)
self.assertRaises(TypeError, lambda x: s.existing(1, 2, 3))
s.existing(1, 2)
self.assertRaises(AttributeError, lambda: s.nonexisting)
# check we can fetch the raiser attribute and it has no spec
obj = s.raiser
obj.foo, obj.bar
def test_signature_class(self):
class Foo(object):
def __init__(self, a, b=3):
pass
mock = create_autospec(Foo)
self.assertRaises(TypeError, mock)
mock(1)
mock.assert_called_once_with(1)
mock(4, 5)
mock.assert_called_with(4, 5)
def test_class_with_no_init(self):
# this used to raise an exception
# due to trying to get a signature from object.__init__
class Foo(object):
pass
create_autospec(Foo)
def test_signature_callable(self):
class Callable(object):
def __init__(self):
pass
def __call__(self, a):
pass
mock = create_autospec(Callable)
mock()
mock.assert_called_once_with()
self.assertRaises(TypeError, mock, 'a')
instance = mock()
self.assertRaises(TypeError, instance)
instance(a='a')
instance.assert_called_once_with(a='a')
instance('a')
instance.assert_called_with('a')
mock = create_autospec(Callable())
mock(a='a')
mock.assert_called_once_with(a='a')
self.assertRaises(TypeError, mock)
mock('a')
mock.assert_called_with('a')
def test_signature_noncallable(self):
class NonCallable(object):
def __init__(self):
pass
mock = create_autospec(NonCallable)
instance = mock()
mock.assert_called_once_with()
self.assertRaises(TypeError, mock, 'a')
self.assertRaises(TypeError, instance)
self.assertRaises(TypeError, instance, 'a')
mock = create_autospec(NonCallable())
self.assertRaises(TypeError, mock)
self.assertRaises(TypeError, mock, 'a')
def test_create_autospec_none(self):
class Foo(object):
bar = None
mock = create_autospec(Foo)
none = mock.bar
self.assertNotIsInstance(none, type(None))
none.foo()
none.foo.assert_called_once_with()
def test_autospec_functions_with_self_in_odd_place(self):
class Foo(object):
def f(a, self):
pass
a = create_autospec(Foo)
a.f(self=10)
a.f.assert_called_with(self=10)
def test_autospec_property(self):
class Foo(object):
@property
def foo(self):
return 3
foo = create_autospec(Foo)
mock_property = foo.foo
# no spec on properties
self.assertTrue(isinstance(mock_property, MagicMock))
mock_property(1, 2, 3)
mock_property.abc(4, 5, 6)
mock_property.assert_called_once_with(1, 2, 3)
mock_property.abc.assert_called_once_with(4, 5, 6)
def test_autospec_slots(self):
class Foo(object):
__slots__ = ['a']
foo = create_autospec(Foo)
mock_slot = foo.a
# no spec on slots
mock_slot(1, 2, 3)
mock_slot.abc(4, 5, 6)
mock_slot.assert_called_once_with(1, 2, 3)
mock_slot.abc.assert_called_once_with(4, 5, 6)
class TestCallList(unittest.TestCase):
def test_args_list_contains_call_list(self):
mock = Mock()
self.assertIsInstance(mock.call_args_list, _CallList)
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
for kall in call(1, 2), call(a=3), call(3, 4), call(b=6):
self.assertTrue(kall in mock.call_args_list)
calls = [call(a=3), call(3, 4)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(1, 2), call(a=3)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(3, 4), call(b=6)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(3, 4)]
self.assertTrue(calls in mock.call_args_list)
self.assertFalse(call('fish') in mock.call_args_list)
self.assertFalse([call('fish')] in mock.call_args_list)
def test_call_list_str(self):
mock = Mock()
mock(1, 2)
mock.foo(a=3)
mock.foo.bar().baz('fish', cat='dog')
expected = (
"[call(1, 2),\n"
" call.foo(a=3),\n"
" call.foo.bar(),\n"
" call.foo.bar().baz('fish', cat='dog')]"
)
self.assertEqual(str(mock.mock_calls), expected)
def test_propertymock(self):
p = patch('%s.SomeClass.one' % __name__, new_callable=PropertyMock)
mock = p.start()
try:
SomeClass.one
mock.assert_called_once_with()
s = SomeClass()
s.one
mock.assert_called_with()
self.assertEqual(mock.mock_calls, [call(), call()])
s.one = 3
self.assertEqual(mock.mock_calls, [call(), call(), call(3)])
finally:
p.stop()
def test_propertymock_returnvalue(self):
m = MagicMock()
p = PropertyMock()
type(m).foo = p
returned = m.foo
p.assert_called_once_with()
self.assertIsInstance(returned, MagicMock)
self.assertNotIsInstance(returned, PropertyMock)
if __name__ == '__main__':
unittest.main()
|
tectronics/quirkysoft
|
refs/heads/master
|
waf_tools/pytest.py
|
5
|
import sys
import Task
import Utils
import Options
from Constants import RUN_ME
from TaskGen import extension
def set_options(opt):
opt.add_option('--all-tests',
action='store_true',
default=False,
help='Exec all unit tests')
opt.add_option('--test',
action='store',
help='Exec only this test')
opt.add_option('--filter',
action='store',
help='Add this filter to the command line')
def exec_test(self):
filename = self.inputs[0].abspath(self.env)
f = getattr(Options.options, 'filter', '')
cmd = [sys.executable, filename]
if f:
cmd += [f]
Utils.cmd_output(cmd)
@extension('.py')
def run_test(self, node):
task = self.create_task('waftest')
task.set_inputs(node)
cls = Task.task_type_from_func('waftest', func=exec_test, color='RED', ext_in='.py')
old = cls.runnable_status
def test_status(self):
for flag in ('all_tests', 'test'):
if getattr(Options.options, flag, False):
return RUN_ME
return old(self)
cls.runnable_status = test_status
cls.quiet = 1
def detect(conf):
return True
|
hyperspy/hyperspy
|
refs/heads/RELEASE_next_minor
|
hyperspy/utils/model.py
|
2
|
# -*- coding: utf-8 -*-
# Copyright 2007-2021 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
"""Model functions.
The :mod:`~hyperspy.api.model` module contains the following submodules:
components1D
1D components for HyperSpy model.
components2D
2D components for HyperSpy model.
"""
import hyperspy.components1d as components1D
import hyperspy.components2d as components2D
|
IV-GII/SocialCookies
|
refs/heads/master
|
ENV1/lib/python2.7/site-packages/django/contrib/auth/tests/test_signals.py
|
227
|
from django.contrib.auth import signals
from django.contrib.auth.models import User
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
@skipIfCustomUser
@override_settings(USE_TZ=False, PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class SignalTestCase(TestCase):
urls = 'django.contrib.auth.tests.urls'
fixtures = ['authtestdata.json']
def listener_login(self, user, **kwargs):
self.logged_in.append(user)
def listener_logout(self, user, **kwargs):
self.logged_out.append(user)
def listener_login_failed(self, sender, credentials, **kwargs):
self.login_failed.append(credentials)
def setUp(self):
"""Set up the listeners and reset the logged in/logged out counters"""
self.logged_in = []
self.logged_out = []
self.login_failed = []
signals.user_logged_in.connect(self.listener_login)
signals.user_logged_out.connect(self.listener_logout)
signals.user_login_failed.connect(self.listener_login_failed)
def tearDown(self):
"""Disconnect the listeners"""
signals.user_logged_in.disconnect(self.listener_login)
signals.user_logged_out.disconnect(self.listener_logout)
signals.user_login_failed.disconnect(self.listener_login_failed)
def test_login(self):
# Only a successful login will trigger the success signal.
self.client.login(username='testclient', password='bad')
self.assertEqual(len(self.logged_in), 0)
self.assertEqual(len(self.login_failed), 1)
self.assertEqual(self.login_failed[0]['username'], 'testclient')
# verify the password is cleansed
self.assertTrue('***' in self.login_failed[0]['password'])
# Like this:
self.client.login(username='testclient', password='password')
self.assertEqual(len(self.logged_in), 1)
self.assertEqual(self.logged_in[0].username, 'testclient')
# Ensure there were no more failures.
self.assertEqual(len(self.login_failed), 1)
def test_logout_anonymous(self):
# The log_out function will still trigger the signal for anonymous
# users.
self.client.get('/logout/next_page/')
self.assertEqual(len(self.logged_out), 1)
self.assertEqual(self.logged_out[0], None)
def test_logout(self):
self.client.login(username='testclient', password='password')
self.client.get('/logout/next_page/')
self.assertEqual(len(self.logged_out), 1)
self.assertEqual(self.logged_out[0].username, 'testclient')
def test_update_last_login(self):
"""Ensure that only `last_login` is updated in `update_last_login`"""
user = User.objects.get(pk=3)
old_last_login = user.last_login
user.username = "This username shouldn't get saved"
request = RequestFactory().get('/login')
signals.user_logged_in.send(sender=user.__class__, request=request,
user=user)
user = User.objects.get(pk=3)
self.assertEqual(user.username, 'staff')
self.assertNotEqual(user.last_login, old_last_login)
|
bhmm/legacy-bhmm-force-spectroscopy-manuscript
|
refs/heads/master
|
examples/synthetic-three-state-model/synthetic-example.py
|
3
|
#!/usr/bin/env python
"""
Generate plots for synthetic three-state force spectroscopy model.
"""
import argparse
import bhmm
from bhmm.util import testsystems
from bhmm.util.analysis import generate_latex_table
# dynamically import plotting tools
import os,sys,inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
import plots
def run(nstates, nsamples):
# Create model.
true_model = testsystems.force_spectroscopy_model()
nstates = true_model.nstates
tau = 0.001 # time interval per observation
# Generate synthetic data.
print "Generating synthetic data..."
[O, S] = true_model.generate_synthetic_observation_trajectories(ntrajectories=1, length=50000)
# DEBUG
print "synthetic observation trajectories:"
print O
print "Total state visits, min_state, max_state:"
print testsystems.total_state_visits(nstates, S)
# Generate MLHMM.
print "Generating MLHMM..."
estimator = bhmm.MLHMM(O, nstates)
print "Initial guess:"
print str(estimator.hmm.output_model)
print estimator.hmm.transition_matrix
print estimator.hmm.stationary_distribution
# Plot initial guess.
s_t = None
o_t = O[0]
plots.plot_state_assignments(estimator.hmm, s_t, o_t, time_units='s', obs_label='force / pN', tau=tau,
pdf_filename='synthetic-three-state-model-guess-nstates'+str(nstates)+'.pdf')
print "Fitting HMM..."
mle = estimator.fit()
# Plot.
s_t = mle.hidden_state_trajectories[0]
import numpy as np
o_t = O[0]
plots.plot_state_assignments(mle, s_t, o_t, time_units='s', obs_label='force / pN', tau=tau,
pdf_filename='synthetic-three-state-model-mlhmm-nstates'+str(nstates)+'.pdf')
# Initialize BHMM with MLHMM model.
print "Sampling models from BHMM..."
sampler = bhmm.BHMM(O, nstates, initial_model=mle)
bhmm_models = sampler.sample(nsamples=nsamples, save_hidden_state_trajectory=False)
# Generate a sample saving a hidden state trajectory.
final_models = sampler.sample(nsamples=1, save_hidden_state_trajectory=True)
# Plot final BHMM sample.
model = final_models[0]
s_t = model.hidden_state_trajectories[0]
o_t = O[0]
plots.plot_state_assignments(model, s_t, o_t, time_units='s', obs_label='force / pN', tau=tau,
pdf_filename='synthetic-three-state-model-bhmm-nstates'+str(nstates)+'.pdf')
# write latex table with sample statistics
conf = 0.95
sampled_hmm = bhmm.SampledGaussianHMM(mle, bhmm_models)
generate_latex_table(sampled_hmm, conf=conf, dt=1, time_unit='step',
caption='Bayesian HMM parameter estimates for synthetic three-state model.',
outfile='synthetic-three-state-model-bhmm-statistics.tex')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Maximum-likelihood and Bayesian HMM estimation for synthetic data')
parser.add_argument('--nstates', default=3, type=int, help='number of states')
parser.add_argument('--nsamples', default=100, type=int, help='number of samples in Bayesian estimator')
parser.add_argument('--verbose', dest='verbose', action='store_true', default=True, help='be loud and noisy')
args = parser.parse_args()
# be verbose?
bhmm.config.verbose = args.verbose
# go
run(args.nstates, args.nsamples)
|
ammarkhann/FinalSeniorCode
|
refs/heads/master
|
lib/python2.7/site-packages/ipykernel/kernelapp.py
|
3
|
"""An Application for launching a kernel"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import atexit
import os
import sys
import signal
import traceback
import logging
from tornado import ioloop
import zmq
from zmq.eventloop import ioloop as zmq_ioloop
from zmq.eventloop.zmqstream import ZMQStream
from IPython.core.application import (
BaseIPythonApplication, base_flags, base_aliases, catch_config_error
)
from IPython.core.profiledir import ProfileDir
from IPython.core.shellapp import (
InteractiveShellApp, shell_flags, shell_aliases
)
from IPython.utils import io
from ipython_genutils.path import filefind, ensure_dir_exists
from traitlets import (
Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, default
)
from ipython_genutils.importstring import import_item
from jupyter_core.paths import jupyter_runtime_dir
from jupyter_client import write_connection_file
from jupyter_client.connect import ConnectionFileMixin
# local imports
from .iostream import IOPubThread
from .heartbeat import Heartbeat
from .ipkernel import IPythonKernel
from .parentpoller import ParentPollerUnix, ParentPollerWindows
from jupyter_client.session import (
Session, session_flags, session_aliases,
)
from .zmqshell import ZMQInteractiveShell
#-----------------------------------------------------------------------------
# Flags and Aliases
#-----------------------------------------------------------------------------
kernel_aliases = dict(base_aliases)
kernel_aliases.update({
'ip' : 'IPKernelApp.ip',
'hb' : 'IPKernelApp.hb_port',
'shell' : 'IPKernelApp.shell_port',
'iopub' : 'IPKernelApp.iopub_port',
'stdin' : 'IPKernelApp.stdin_port',
'control' : 'IPKernelApp.control_port',
'f' : 'IPKernelApp.connection_file',
'transport': 'IPKernelApp.transport',
})
kernel_flags = dict(base_flags)
kernel_flags.update({
'no-stdout' : (
{'IPKernelApp' : {'no_stdout' : True}},
"redirect stdout to the null device"),
'no-stderr' : (
{'IPKernelApp' : {'no_stderr' : True}},
"redirect stderr to the null device"),
'pylab' : (
{'IPKernelApp' : {'pylab' : 'auto'}},
"""Pre-load matplotlib and numpy for interactive use with
the default matplotlib backend."""),
})
# inherit flags&aliases for any IPython shell apps
kernel_aliases.update(shell_aliases)
kernel_flags.update(shell_flags)
# inherit flags&aliases for Sessions
kernel_aliases.update(session_aliases)
kernel_flags.update(session_flags)
_ctrl_c_message = """\
NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
To exit, you will have to explicitly quit this process, by either sending
"quit" from a client, or using Ctrl-\\ in UNIX-like environments.
To read more about this, see https://github.com/ipython/ipython/issues/2049
"""
#-----------------------------------------------------------------------------
# Application class for starting an IPython Kernel
#-----------------------------------------------------------------------------
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
ConnectionFileMixin):
name='ipython-kernel'
aliases = Dict(kernel_aliases)
flags = Dict(kernel_flags)
classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
# the kernel class, as an importstring
kernel_class = Type('ipykernel.ipkernel.IPythonKernel',
klass='ipykernel.kernelbase.Kernel',
help="""The Kernel subclass to be used.
This should allow easy re-use of the IPKernelApp entry point
to configure and launch kernels other than IPython's own.
""").tag(config=True)
kernel = Any()
poller = Any() # don't restrict this even though current pollers are all Threads
heartbeat = Instance(Heartbeat, allow_none=True)
ports = Dict()
subcommands = {
'install': (
'ipykernel.kernelspec.InstallIPythonKernelSpecApp',
'Install the IPython kernel'
),
}
# connection info:
connection_dir = Unicode()
@default('connection_dir')
def _default_connection_dir(self):
return jupyter_runtime_dir()
@property
def abs_connection_file(self):
if os.path.basename(self.connection_file) == self.connection_file:
return os.path.join(self.connection_dir, self.connection_file)
else:
return self.connection_file
# streams, etc.
no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True)
no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True)
outstream_class = DottedObjectName('ipykernel.iostream.OutStream',
help="The importstring for the OutStream factory").tag(config=True)
displayhook_class = DottedObjectName('ipykernel.displayhook.ZMQDisplayHook',
help="The importstring for the DisplayHook factory").tag(config=True)
# polling
parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0),
help="""kill this process if its parent dies. On Windows, the argument
specifies the HANDLE of the parent process, otherwise it is simply boolean.
""").tag(config=True)
interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0),
help="""ONLY USED ON WINDOWS
Interrupt this process when the parent is signaled.
""").tag(config=True)
def init_crash_handler(self):
sys.excepthook = self.excepthook
def excepthook(self, etype, evalue, tb):
# write uncaught traceback to 'real' stderr, not zmq-forwarder
traceback.print_exception(etype, evalue, tb, file=sys.__stderr__)
def init_poller(self):
if sys.platform == 'win32':
if self.interrupt or self.parent_handle:
self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
elif self.parent_handle and self.parent_handle != 1:
# PID 1 (init) is special and will never go away,
# only be reassigned.
# Parent polling doesn't work if ppid == 1 to start with.
self.poller = ParentPollerUnix()
def _bind_socket(self, s, port):
iface = '%s://%s' % (self.transport, self.ip)
if self.transport == 'tcp':
if port <= 0:
port = s.bind_to_random_port(iface)
else:
s.bind("tcp://%s:%i" % (self.ip, port))
elif self.transport == 'ipc':
if port <= 0:
port = 1
path = "%s-%i" % (self.ip, port)
while os.path.exists(path):
port = port + 1
path = "%s-%i" % (self.ip, port)
else:
path = "%s-%i" % (self.ip, port)
s.bind("ipc://%s" % path)
return port
def write_connection_file(self):
"""write connection info to JSON file"""
cf = self.abs_connection_file
self.log.debug("Writing connection file: %s", cf)
write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
iopub_port=self.iopub_port, control_port=self.control_port)
def cleanup_connection_file(self):
cf = self.abs_connection_file
self.log.debug("Cleaning up connection file: %s", cf)
try:
os.remove(cf)
except (IOError, OSError):
pass
self.cleanup_ipc_files()
def init_connection_file(self):
if not self.connection_file:
self.connection_file = "kernel-%s.json"%os.getpid()
try:
self.connection_file = filefind(self.connection_file, ['.', self.connection_dir])
except IOError:
self.log.debug("Connection file not found: %s", self.connection_file)
# This means I own it, and I'll create it in this directory:
ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700)
# Also, I will clean it up:
atexit.register(self.cleanup_connection_file)
return
try:
self.load_connection_file()
except Exception:
self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
self.exit(1)
def init_sockets(self):
# Create a context, a session, and the kernel sockets.
self.log.info("Starting the kernel at pid: %i", os.getpid())
context = zmq.Context.instance()
# Uncomment this to try closing the context.
# atexit.register(context.term)
self.shell_socket = context.socket(zmq.ROUTER)
self.shell_socket.linger = 1000
self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
self.stdin_socket = context.socket(zmq.ROUTER)
self.stdin_socket.linger = 1000
self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
self.control_socket = context.socket(zmq.ROUTER)
self.control_socket.linger = 1000
self.control_port = self._bind_socket(self.control_socket, self.control_port)
self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
self.init_iopub(context)
def init_iopub(self, context):
self.iopub_socket = context.socket(zmq.PUB)
self.iopub_socket.linger = 1000
self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
self.configure_tornado_logger()
self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True)
self.iopub_thread.start()
# backward-compat: wrap iopub socket API in background thread
self.iopub_socket = self.iopub_thread.background_socket
def init_heartbeat(self):
"""start the heart beating"""
# heartbeat doesn't share context, because it mustn't be blocked
# by the GIL, which is accessed by libzmq when freeing zero-copy messages
hb_ctx = zmq.Context()
self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
self.hb_port = self.heartbeat.port
self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
self.heartbeat.start()
def log_connection_info(self):
"""display connection info, and store ports"""
basename = os.path.basename(self.connection_file)
if basename == self.connection_file or \
os.path.dirname(self.connection_file) == self.connection_dir:
# use shortname
tail = basename
else:
tail = self.connection_file
lines = [
"To connect another client to this kernel, use:",
" --existing %s" % tail,
]
# log connection info
# info-level, so often not shown.
# frontends should use the %connect_info magic
# to see the connection info
for line in lines:
self.log.info(line)
# also raw print to the terminal if no parent_handle (`ipython kernel`)
# unless log-level is CRITICAL (--quiet)
if not self.parent_handle and self.log_level < logging.CRITICAL:
io.rprint(_ctrl_c_message)
for line in lines:
io.rprint(line)
self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
stdin=self.stdin_port, hb=self.hb_port,
control=self.control_port)
def init_blackhole(self):
"""redirects stdout/stderr to devnull if necessary"""
if self.no_stdout or self.no_stderr:
blackhole = open(os.devnull, 'w')
if self.no_stdout:
sys.stdout = sys.__stdout__ = blackhole
if self.no_stderr:
sys.stderr = sys.__stderr__ = blackhole
def init_io(self):
"""Redirect input streams and set a display hook."""
if self.outstream_class:
outstream_factory = import_item(str(self.outstream_class))
sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout')
sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr')
if self.displayhook_class:
displayhook_factory = import_item(str(self.displayhook_class))
self.displayhook = displayhook_factory(self.session, self.iopub_socket)
sys.displayhook = self.displayhook
self.patch_io()
def patch_io(self):
"""Patch important libraries that can't handle sys.stdout forwarding"""
try:
import faulthandler
except ImportError:
pass
else:
# Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible
# updates to the upstream API and update accordingly (up-to-date as of Python 3.5):
# https://docs.python.org/3/library/faulthandler.html#faulthandler.enable
# change default file to __stderr__ from forwarded stderr
faulthandler_enable = faulthandler.enable
def enable(file=sys.__stderr__, all_threads=True, **kwargs):
return faulthandler_enable(file=file, all_threads=all_threads, **kwargs)
faulthandler.enable = enable
if hasattr(faulthandler, 'register'):
faulthandler_register = faulthandler.register
def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs):
return faulthandler_register(signum, file=file, all_threads=all_threads,
chain=chain, **kwargs)
faulthandler.register = register
def init_signal(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
def init_kernel(self):
"""Create the Kernel object itself"""
shell_stream = ZMQStream(self.shell_socket)
control_stream = ZMQStream(self.control_socket)
kernel_factory = self.kernel_class.instance
kernel = kernel_factory(parent=self, session=self.session,
shell_streams=[shell_stream, control_stream],
iopub_thread=self.iopub_thread,
iopub_socket=self.iopub_socket,
stdin_socket=self.stdin_socket,
log=self.log,
profile_dir=self.profile_dir,
user_ns=self.user_ns,
)
kernel.record_ports({
name + '_port': port for name, port in self.ports.items()
})
self.kernel = kernel
# Allow the displayhook to get the execution count
self.displayhook.get_execution_count = lambda: kernel.execution_count
def init_gui_pylab(self):
"""Enable GUI event loop integration, taking pylab into account."""
# Register inline backend as default
# this is higher priority than matplotlibrc,
# but lower priority than anything else (mpl.use() for instance).
# This only affects matplotlib >= 1.5
if not os.environ.get('MPLBACKEND'):
os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline'
# Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
# to ensure that any exception is printed straight to stderr.
# Normally _showtraceback associates the reply with an execution,
# which means frontends will never draw it, as this exception
# is not associated with any execute request.
shell = self.shell
_showtraceback = shell._showtraceback
try:
# replace error-sending traceback with stderr
def print_tb(etype, evalue, stb):
print ("GUI event loop or pylab initialization failed",
file=sys.stderr)
print (shell.InteractiveTB.stb2text(stb), file=sys.stderr)
shell._showtraceback = print_tb
InteractiveShellApp.init_gui_pylab(self)
finally:
shell._showtraceback = _showtraceback
def init_shell(self):
self.shell = getattr(self.kernel, 'shell', None)
if self.shell:
self.shell.configurables.append(self)
def init_extensions(self):
super(IPKernelApp, self).init_extensions()
# BEGIN HARDCODED WIDGETS HACK
# Ensure ipywidgets extension is loaded if available
extension_man = self.shell.extension_manager
if 'ipywidgets' not in extension_man.loaded:
try:
extension_man.load_extension('ipywidgets')
except ImportError as e:
self.log.debug('ipywidgets package not installed. Widgets will not be available.')
# END HARDCODED WIDGETS HACK
def configure_tornado_logger(self):
""" Configure the tornado logging.Logger.
Must set up the tornado logger or else tornado will call
basicConfig for the root logger which makes the root logger
go to the real sys.stderr instead of the capture streams.
This function mimics the setup of logging.basicConfig.
"""
logger = logging.getLogger('tornado')
handler = logging.StreamHandler()
formatter = logging.Formatter(logging.BASIC_FORMAT)
handler.setFormatter(formatter)
logger.addHandler(handler)
@catch_config_error
def initialize(self, argv=None):
super(IPKernelApp, self).initialize(argv)
if self.subapp is not None:
return
# register zmq IOLoop with tornado
zmq_ioloop.install()
self.init_blackhole()
self.init_connection_file()
self.init_poller()
self.init_sockets()
self.init_heartbeat()
# writing/displaying connection info must be *after* init_sockets/heartbeat
self.write_connection_file()
# Log connection info after writing connection file, so that the connection
# file is definitely available at the time someone reads the log.
self.log_connection_info()
self.init_io()
self.init_signal()
self.init_kernel()
# shell init steps
self.init_path()
self.init_shell()
if self.shell:
self.init_gui_pylab()
self.init_extensions()
self.init_code()
# flush stdout/stderr, so that anything written to these streams during
# initialization do not get associated with the first execution request
sys.stdout.flush()
sys.stderr.flush()
def start(self):
if self.subapp is not None:
return self.subapp.start()
if self.poller is not None:
self.poller.start()
self.kernel.start()
try:
ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
pass
launch_new_instance = IPKernelApp.launch_instance
def main():
"""Run an IPKernel as an application"""
app = IPKernelApp.instance()
app.initialize()
app.start()
if __name__ == '__main__':
main()
|
bowen0701/algorithms_data_structures
|
refs/heads/master
|
lc0201_bitwise_and_of_numbers_range.py
|
1
|
"""Leetcode 201. Bitwise AND of Numbers Range
Medium
URL: https://leetcode.com/problems/bitwise-and-of-numbers-range/
Given a range [m, n] where 0 <= m <= n <= 2147483647,
return the bitwise AND of all numbers in this range, inclusive.
Example 1:
Input: [5,7]
Output: 4
Example 2:
Input: [0,1]
Output: 0
"""
class SolutionBruteForce(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
Time limit exceeded.
Time complexity: O(n-m).
Space complexity: O(1).
"""
# Edge case when m = 0.
if m == 0:
return 0
# Apply brute force method.
result = m
for i in range(m + 1, n + 1):
result &= i
return result
class SolutionCommonPrefixBit(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
Time complexity: O(logn).
Space complexity: O(1).
"""
# Edge case.
if m == 0:
return 0
# Find common prefix binary code by right-shifting m & n.
n_shifts = 0
while m != n:
m >>= 1
n >>= 1
n_shifts += 1
# Left-shift m by n_shifts digits.
return m << n_shifts
def main():
# Output: 4
m, n = 5, 7
print SolutionBruteForce().rangeBitwiseAnd(m, n)
print SolutionCommonPrefixBit().rangeBitwiseAnd(m, n)
# Output: 0
m, n = 0, 1
print SolutionBruteForce().rangeBitwiseAnd(m, n)
print SolutionCommonPrefixBit().rangeBitwiseAnd(m, n)
if __name__ == '__main__':
main()
|
drpngx/tensorflow
|
refs/heads/master
|
tensorflow/contrib/learn/python/learn/learn_io/data_feeder_test.py
|
14
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `DataFeeder`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import numpy as np
import six
from six.moves import xrange # pylint: disable=redefined-builtin
# pylint: disable=wildcard-import
from tensorflow.contrib.learn.python.learn.learn_io import *
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import test
# pylint: enable=wildcard-import
class DataFeederTest(test.TestCase):
# pylint: disable=undefined-variable
"""Tests for `DataFeeder`."""
def setUp(self):
self._base_dir = os.path.join(self.get_temp_dir(), 'base_dir')
file_io.create_dir(self._base_dir)
def tearDown(self):
file_io.delete_recursively(self._base_dir)
def _wrap_dict(self, data, prepend=''):
return {prepend + '1': data, prepend + '2': data}
def _assert_raises(self, input_data):
with self.assertRaisesRegexp(TypeError, 'annot convert'):
data_feeder.DataFeeder(input_data, None, n_classes=0, batch_size=1)
def _assert_dtype(self, expected_np_dtype, expected_tf_dtype, input_data):
feeder = data_feeder.DataFeeder(input_data, None, n_classes=0, batch_size=1)
if isinstance(input_data, dict):
for v in list(feeder.input_dtype.values()):
self.assertEqual(expected_np_dtype, v)
else:
self.assertEqual(expected_np_dtype, feeder.input_dtype)
with ops.Graph().as_default() as g, self.test_session(g):
inp, _ = feeder.input_builder()
if isinstance(inp, dict):
for v in list(inp.values()):
self.assertEqual(expected_tf_dtype, v.dtype)
else:
self.assertEqual(expected_tf_dtype, inp.dtype)
def test_input_int8(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.int8)
self._assert_dtype(np.int8, dtypes.int8, data)
self._assert_dtype(np.int8, dtypes.int8, self._wrap_dict(data))
def test_input_int16(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.int16)
self._assert_dtype(np.int16, dtypes.int16, data)
self._assert_dtype(np.int16, dtypes.int16, self._wrap_dict(data))
def test_input_int32(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.int32)
self._assert_dtype(np.int32, dtypes.int32, data)
self._assert_dtype(np.int32, dtypes.int32, self._wrap_dict(data))
def test_input_int64(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.int64)
self._assert_dtype(np.int64, dtypes.int64, data)
self._assert_dtype(np.int64, dtypes.int64, self._wrap_dict(data))
def test_input_uint32(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.uint32)
self._assert_dtype(np.uint32, dtypes.uint32, data)
self._assert_dtype(np.uint32, dtypes.uint32, self._wrap_dict(data))
def test_input_uint64(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.uint64)
self._assert_dtype(np.uint64, dtypes.uint64, data)
self._assert_dtype(np.uint64, dtypes.uint64, self._wrap_dict(data))
def test_input_uint8(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.uint8)
self._assert_dtype(np.uint8, dtypes.uint8, data)
self._assert_dtype(np.uint8, dtypes.uint8, self._wrap_dict(data))
def test_input_uint16(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.uint16)
self._assert_dtype(np.uint16, dtypes.uint16, data)
self._assert_dtype(np.uint16, dtypes.uint16, self._wrap_dict(data))
def test_input_float16(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.float16)
self._assert_dtype(np.float16, dtypes.float16, data)
self._assert_dtype(np.float16, dtypes.float16, self._wrap_dict(data))
def test_input_float32(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.float32)
self._assert_dtype(np.float32, dtypes.float32, data)
self._assert_dtype(np.float32, dtypes.float32, self._wrap_dict(data))
def test_input_float64(self):
data = np.matrix([[1, 2], [3, 4]], dtype=np.float64)
self._assert_dtype(np.float64, dtypes.float64, data)
self._assert_dtype(np.float64, dtypes.float64, self._wrap_dict(data))
def test_input_bool(self):
data = np.array([[False for _ in xrange(2)] for _ in xrange(2)])
self._assert_dtype(np.bool, dtypes.bool, data)
self._assert_dtype(np.bool, dtypes.bool, self._wrap_dict(data))
def test_input_string(self):
input_data = np.array([['str%d' % i for i in xrange(2)] for _ in xrange(2)])
self._assert_dtype(input_data.dtype, dtypes.string, input_data)
self._assert_dtype(input_data.dtype, dtypes.string,
self._wrap_dict(input_data))
def _assertAllClose(self, src, dest, src_key_of=None, src_prop=None):
def func(x):
val = getattr(x, src_prop) if src_prop else x
return val if src_key_of is None else src_key_of[val]
if isinstance(src, dict):
for k in list(src.keys()):
self.assertAllClose(func(src[k]), dest)
else:
self.assertAllClose(func(src), dest)
def test_unsupervised(self):
def func(feeder):
with self.test_session():
inp, _ = feeder.input_builder()
feed_dict_fn = feeder.get_feed_dict_fn()
feed_dict = feed_dict_fn()
self._assertAllClose(inp, [[1, 2]], feed_dict, 'name')
data = np.matrix([[1, 2], [2, 3], [3, 4]])
func(data_feeder.DataFeeder(data, None, n_classes=0, batch_size=1))
func(
data_feeder.DataFeeder(
self._wrap_dict(data), None, n_classes=0, batch_size=1))
def test_data_feeder_regression(self):
def func(df):
inp, out = df.input_builder()
feed_dict_fn = df.get_feed_dict_fn()
feed_dict = feed_dict_fn()
self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name')
self._assertAllClose(out, [2, 1], feed_dict, 'name')
x = np.matrix([[1, 2], [3, 4]])
y = np.array([1, 2])
func(data_feeder.DataFeeder(x, y, n_classes=0, batch_size=3))
func(
data_feeder.DataFeeder(
self._wrap_dict(x, 'in'),
self._wrap_dict(y, 'out'),
n_classes=self._wrap_dict(0, 'out'),
batch_size=3))
def test_epoch(self):
def func(feeder):
with self.test_session():
feeder.input_builder()
epoch = feeder.make_epoch_variable()
feed_dict_fn = feeder.get_feed_dict_fn()
# First input
feed_dict = feed_dict_fn()
self.assertAllClose(feed_dict[epoch.name], [0])
# Second input
feed_dict = feed_dict_fn()
self.assertAllClose(feed_dict[epoch.name], [0])
# Third input
feed_dict = feed_dict_fn()
self.assertAllClose(feed_dict[epoch.name], [0])
# Back to the first input again, so new epoch.
feed_dict = feed_dict_fn()
self.assertAllClose(feed_dict[epoch.name], [1])
data = np.matrix([[1, 2], [2, 3], [3, 4]])
labels = np.array([0, 0, 1])
func(data_feeder.DataFeeder(data, labels, n_classes=0, batch_size=1))
func(
data_feeder.DataFeeder(
self._wrap_dict(data, 'in'),
self._wrap_dict(labels, 'out'),
n_classes=self._wrap_dict(0, 'out'),
batch_size=1))
def test_data_feeder_multioutput_regression(self):
def func(df):
inp, out = df.input_builder()
feed_dict_fn = df.get_feed_dict_fn()
feed_dict = feed_dict_fn()
self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name')
self._assertAllClose(out, [[3, 4], [1, 2]], feed_dict, 'name')
x = np.matrix([[1, 2], [3, 4]])
y = np.array([[1, 2], [3, 4]])
func(data_feeder.DataFeeder(x, y, n_classes=0, batch_size=2))
func(
data_feeder.DataFeeder(
self._wrap_dict(x, 'in'),
self._wrap_dict(y, 'out'),
n_classes=self._wrap_dict(0, 'out'),
batch_size=2))
def test_data_feeder_multioutput_classification(self):
def func(df):
inp, out = df.input_builder()
feed_dict_fn = df.get_feed_dict_fn()
feed_dict = feed_dict_fn()
self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name')
self._assertAllClose(
out, [[[0, 0, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1]],
[[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 0, 0]]], feed_dict,
'name')
x = np.matrix([[1, 2], [3, 4]])
y = np.array([[0, 1, 2], [2, 3, 4]])
func(data_feeder.DataFeeder(x, y, n_classes=5, batch_size=2))
func(
data_feeder.DataFeeder(
self._wrap_dict(x, 'in'),
self._wrap_dict(y, 'out'),
n_classes=self._wrap_dict(5, 'out'),
batch_size=2))
def test_streaming_data_feeder(self):
def func(df):
inp, out = df.input_builder()
feed_dict_fn = df.get_feed_dict_fn()
feed_dict = feed_dict_fn()
self._assertAllClose(inp, [[[1, 2]], [[3, 4]]], feed_dict, 'name')
self._assertAllClose(out, [[[1], [2]], [[2], [2]]], feed_dict, 'name')
def x_iter(wrap_dict=False):
yield np.array([[1, 2]]) if not wrap_dict else self._wrap_dict(
np.array([[1, 2]]), 'in')
yield np.array([[3, 4]]) if not wrap_dict else self._wrap_dict(
np.array([[3, 4]]), 'in')
def y_iter(wrap_dict=False):
yield np.array([[1], [2]]) if not wrap_dict else self._wrap_dict(
np.array([[1], [2]]), 'out')
yield np.array([[2], [2]]) if not wrap_dict else self._wrap_dict(
np.array([[2], [2]]), 'out')
func(
data_feeder.StreamingDataFeeder(
x_iter(), y_iter(), n_classes=0, batch_size=2))
func(
data_feeder.StreamingDataFeeder(
x_iter(True),
y_iter(True),
n_classes=self._wrap_dict(0, 'out'),
batch_size=2))
# Test non-full batches.
func(
data_feeder.StreamingDataFeeder(
x_iter(), y_iter(), n_classes=0, batch_size=10))
func(
data_feeder.StreamingDataFeeder(
x_iter(True),
y_iter(True),
n_classes=self._wrap_dict(0, 'out'),
batch_size=10))
def test_dask_data_feeder(self):
if HAS_PANDAS and HAS_DASK:
x = pd.DataFrame(
dict(
a=np.array([.1, .3, .4, .6, .2, .1, .6]),
b=np.array([.7, .8, .1, .2, .5, .3, .9])))
x = dd.from_pandas(x, npartitions=2)
y = pd.DataFrame(dict(labels=np.array([1, 0, 2, 1, 0, 1, 2])))
y = dd.from_pandas(y, npartitions=2)
# TODO(ipolosukhin): Remove or restore this.
# x = extract_dask_data(x)
# y = extract_dask_labels(y)
df = data_feeder.DaskDataFeeder(x, y, n_classes=2, batch_size=2)
inp, out = df.input_builder()
feed_dict_fn = df.get_feed_dict_fn()
feed_dict = feed_dict_fn()
self.assertAllClose(feed_dict[inp.name], [[0.40000001, 0.1],
[0.60000002, 0.2]])
self.assertAllClose(feed_dict[out.name], [[0., 0., 1.], [0., 1., 0.]])
# TODO(rohanj): Fix this test by fixing data_feeder. Currently, h5py doesn't
# support permutation based indexing lookups (More documentation at
# http://docs.h5py.org/en/latest/high/dataset.html#fancy-indexing)
def DISABLED_test_hdf5_data_feeder(self):
def func(df):
inp, out = df.input_builder()
feed_dict_fn = df.get_feed_dict_fn()
feed_dict = feed_dict_fn()
self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name')
self.assertAllClose(out, [2, 1], feed_dict, 'name')
try:
import h5py # pylint: disable=g-import-not-at-top
x = np.matrix([[1, 2], [3, 4]])
y = np.array([1, 2])
file_path = os.path.join(self._base_dir, 'test_hdf5.h5')
h5f = h5py.File(file_path, 'w')
h5f.create_dataset('x', data=x)
h5f.create_dataset('y', data=y)
h5f.close()
h5f = h5py.File(file_path, 'r')
x = h5f['x']
y = h5f['y']
func(data_feeder.DataFeeder(x, y, n_classes=0, batch_size=3))
func(
data_feeder.DataFeeder(
self._wrap_dict(x, 'in'),
self._wrap_dict(y, 'out'),
n_classes=self._wrap_dict(0, 'out'),
batch_size=3))
except ImportError:
print("Skipped test for hdf5 since it's not installed.")
class SetupPredictDataFeederTest(DataFeederTest):
"""Tests for `DataFeeder.setup_predict_data_feeder`."""
def test_iterable_data(self):
# pylint: disable=undefined-variable
def func(df):
self._assertAllClose(six.next(df), [[1, 2], [3, 4]])
self._assertAllClose(six.next(df), [[5, 6]])
data = [[1, 2], [3, 4], [5, 6]]
x = iter(data)
x_dict = iter([self._wrap_dict(v) for v in iter(data)])
func(data_feeder.setup_predict_data_feeder(x, batch_size=2))
func(data_feeder.setup_predict_data_feeder(x_dict, batch_size=2))
if __name__ == '__main__':
test.main()
|
berny6969/enigma2
|
refs/heads/master
|
lib/python/Tools/Directories.py
|
20
|
# -*- coding: utf-8 -*-
import os
from os import mkdir, rmdir, system, walk, stat as os_stat, listdir, readlink, makedirs, error as os_error, symlink, access, F_OK, R_OK, W_OK, rename as os_rename
from stat import S_IMODE
from re import compile
from enigma import eEnv
try:
from os import chmod
have_chmod = True
except:
have_chmod = False
try:
from os import utime
have_utime = True
except:
have_utime = False
SCOPE_TRANSPONDERDATA = 0
SCOPE_SYSETC = 1
SCOPE_FONTS = 2
SCOPE_SKIN = 3
SCOPE_SKIN_IMAGE = 4
SCOPE_USERETC = 5
SCOPE_CONFIG = 6
SCOPE_LANGUAGE = 7
SCOPE_HDD = 8
SCOPE_PLUGINS = 9
SCOPE_MEDIA = 10
SCOPE_PLAYLIST = 11
SCOPE_CURRENT_SKIN = 12
SCOPE_METADIR = 16
SCOPE_CURRENT_PLUGIN = 17
PATH_CREATE = 0
PATH_DONTCREATE = 1
PATH_FALLBACK = 2
defaultPaths = {
SCOPE_TRANSPONDERDATA: (eEnv.resolve("${sysconfdir}/"), PATH_DONTCREATE),
SCOPE_SYSETC: (eEnv.resolve("${sysconfdir}/"), PATH_DONTCREATE),
SCOPE_FONTS: (eEnv.resolve("${datadir}/fonts/"), PATH_DONTCREATE),
SCOPE_CONFIG: (eEnv.resolve("${sysconfdir}/enigma2/"), PATH_CREATE),
SCOPE_PLUGINS: (eEnv.resolve("${libdir}/enigma2/python/Plugins/"), PATH_CREATE),
SCOPE_LANGUAGE: (eEnv.resolve("${datadir}/enigma2/po/"), PATH_DONTCREATE),
SCOPE_SKIN: (eEnv.resolve("${datadir}/enigma2/"), PATH_DONTCREATE),
SCOPE_SKIN_IMAGE: (eEnv.resolve("${datadir}/enigma2/"), PATH_DONTCREATE),
SCOPE_HDD: ("/hdd/movie/", PATH_DONTCREATE),
SCOPE_MEDIA: ("/media/", PATH_DONTCREATE),
SCOPE_PLAYLIST: (eEnv.resolve("${sysconfdir}/enigma2/playlist/"), PATH_CREATE),
SCOPE_USERETC: ("", PATH_DONTCREATE), # user home directory
SCOPE_METADIR: (eEnv.resolve("${datadir}/meta"), PATH_CREATE),
}
FILE_COPY = 0 # copy files from fallback dir to the basedir
FILE_MOVE = 1 # move files
PATH_COPY = 2 # copy the complete fallback dir to the basedir
PATH_MOVE = 3 # move the fallback dir to the basedir (can be used for changes in paths)
fallbackPaths = {
SCOPE_CONFIG: [("/home/root/", FILE_MOVE),
(eEnv.resolve("${datadir}/enigma2/defaults/"), FILE_COPY)],
SCOPE_HDD: [("/hdd/movies", PATH_MOVE)]
}
def resolveFilename(scope, base = "", path_prefix = None):
if base.startswith("~/"):
# you can only use the ~/ if we have a prefix directory
assert path_prefix is not None
base = os.path.join(path_prefix, base[2:])
# don't resolve absolute paths
if base.startswith('/'):
return base
if scope == SCOPE_CURRENT_SKIN:
from Components.config import config
# allow files in the config directory to replace skin files
tmp = defaultPaths[SCOPE_CONFIG][0]
if base and pathExists(tmp + base):
path = tmp
else:
tmp = defaultPaths[SCOPE_SKIN][0]
pos = config.skin.primary_skin.value.rfind('/')
if pos != -1:
#if basefile is not available use default skin path as fallback
tmpfile = tmp+config.skin.primary_skin.value[:pos+1] + base
if pathExists(tmpfile):
path = tmp+config.skin.primary_skin.value[:pos+1]
else:
path = tmp
else:
path = tmp
elif scope == SCOPE_CURRENT_PLUGIN:
tmp = defaultPaths[SCOPE_PLUGINS]
from Components.config import config
skintmp = defaultPaths[SCOPE_SKIN]
pos = config.skin.primary_skin.value.rfind('/')
if pos != -1:
#if basefile is not available inside current skin path, use the original provided file as fallback
skintmpfile = skintmp[0]+config.skin.primary_skin.value[:pos+1] + base
if fileExists(skintmpfile):
path = skintmp[0]+config.skin.primary_skin.value[:pos+1]
else:
path = tmp[0]
else:
path = tmp[0]
else:
tmp = defaultPaths[scope]
path = tmp[0]
flags = tmp[1]
if flags == PATH_CREATE:
if not pathExists(path):
try:
mkdir(path)
except OSError:
print "resolveFilename: Couldn't create %s" % path
return None
fallbackPath = fallbackPaths.get(scope)
if fallbackPath and not fileExists(path + base):
for x in fallbackPath:
try:
if x[1] == FILE_COPY:
if fileExists(x[0] + base):
try:
os.link(x[0] + base, path + base)
except:
system("cp " + x[0] + base + " " + path + base)
break
elif x[1] == FILE_MOVE:
if fileExists(x[0] + base):
os.rename(x[0] + base, path + base)
break
elif x[1] == PATH_COPY:
if pathExists(x[0]):
if not pathExists(defaultPaths[scope][0]):
mkdir(path)
system("cp -a " + x[0] + "* " + path)
break
elif x[1] == PATH_MOVE:
if pathExists(x[0]):
os.rename(x[0], path + base)
break
except Exception, e:
print "[D] Failed to recover %s:" % (path+base), e
# FIXME: we also have to handle DATADIR etc. here.
return path + base
# this is only the BASE - an extension must be added later.
pathExists = os.path.exists
isMount = os.path.ismount
def defaultRecordingLocation(candidate=None):
if candidate and os.path.exists(candidate):
return candidate
# First, try whatever /hdd points to, or /media/hdd
try:
path = os.readlink('/hdd')
except:
path = '/media/hdd'
if not os.path.exists(path):
path = ''
# Find the largest local disk
from Components import Harddisk
mounts = [m for m in Harddisk.getProcMounts() if m[1].startswith('/media/')]
biggest = 0
havelocal = False
for candidate in mounts:
try:
islocal = candidate[1].startswith('/dev/') # Good enough
stat = os.statvfs(candidate[1])
# Free space counts double
size = (stat.f_blocks + stat.f_bavail) * stat.f_bsize
if (islocal and not havelocal) or ((islocal or not havelocal) and (size > biggest)):
path = candidate[1]
havelocal = islocal
biggest = size
except Exception, e:
print "[DRL]", e
if path:
# If there's a movie subdir, we'd probably want to use that.
movie = os.path.join(path, 'movie')
if os.path.isdir(movie):
path = movie
if not path.endswith('/'):
path += '/' # Bad habits die hard, old code relies on this
return path
def createDir(path, makeParents = False):
try:
if makeParents:
makedirs(path)
else:
mkdir(path)
except:
return 0
else:
return 1
def removeDir(path):
try:
rmdir(path)
except:
return 0
else:
return 1
def fileExists(f, mode='r'):
if mode == 'r':
acc_mode = R_OK
elif mode == 'w':
acc_mode = W_OK
else:
acc_mode = F_OK
return access(f, acc_mode)
def fileCheck(f, mode='r'):
return fileExists(f, mode) and f
def getRecordingFilename(basename, dirname = None):
# filter out non-allowed characters
non_allowed_characters = "/.\\:*?<>|\""
filename = ""
basename = basename.replace('\xc2\x86', '').replace('\xc2\x87', '')
for c in basename:
if c in non_allowed_characters or ord(c) < 32:
c = "_"
filename += c
# max filename length for ext4 is 255 (minus 8 characters for .ts.meta)
filename = filename[:247]
if dirname is not None:
if not dirname.startswith('/'):
dirname = os.path.join(defaultRecordingLocation(), dirname)
else:
dirname = defaultRecordingLocation()
filename = os.path.join(dirname, filename)
i = 0
while True:
path = filename
if i > 0:
path += "_%03d" % i
try:
open(path + ".ts")
i += 1
except IOError:
return path
# this is clearly a hack:
def InitFallbackFiles():
resolveFilename(SCOPE_CONFIG, "userbouquet.favourites.tv")
resolveFilename(SCOPE_CONFIG, "bouquets.tv")
resolveFilename(SCOPE_CONFIG, "userbouquet.favourites.radio")
resolveFilename(SCOPE_CONFIG, "bouquets.radio")
# returns a list of tuples containing pathname and filename matching the given pattern
# example-pattern: match all txt-files: ".*\.txt$"
def crawlDirectory(directory, pattern):
list = []
if directory:
expression = compile(pattern)
for root, dirs, files in walk(directory):
for file in files:
if expression.match(file) is not None:
list.append((root, file))
return list
def copyfile(src, dst):
try:
f1 = open(src, "rb")
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
f2 = open(dst, "w+b")
while True:
buf = f1.read(16*1024)
if not buf:
break
f2.write(buf)
st = os_stat(src)
mode = S_IMODE(st.st_mode)
if have_chmod:
chmod(dst, mode)
if have_utime:
utime(dst, (st.st_atime, st.st_mtime))
except:
print "copy", src, "to", dst, "failed!"
return -1
return 0
def copytree(src, dst, symlinks=False):
names = listdir(src)
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
if not os.path.isdir(dst):
mkdir(dst)
else:
makedirs(dst)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = readlink(srcname)
symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks)
else:
copyfile(srcname, dstname)
except:
print "dont copy srcname (no file or link or folder)"
try:
st = os_stat(src)
mode = S_IMODE(st.st_mode)
if have_chmod:
chmod(dst, mode)
if have_utime:
utime(dst, (st.st_atime, st.st_mtime))
except:
print "copy stats for", src, "failed!"
# Renames files or if source and destination are on different devices moves them in background
# input list of (source, destination)
def moveFiles(fileList):
movedList = []
try:
try:
for item in fileList:
os_rename(item[0], item[1])
movedList.append(item)
except OSError, e:
if e.errno == 18:
print "[Directories] cannot rename across devices, trying slow move"
import Screens.CopyFiles
Screens.CopyFiles.moveFiles(fileList, item[0])
print "[Directories] Moving in background..."
else:
raise
except Exception, e:
print "[Directories] Failed move:", e
for item in movedList:
try:
os_rename(item[1], item[0])
except:
print "[Directories] Failed to undo move:", item
raise
def getSize(path, pattern=".*"):
path_size = 0
if os.path.isdir(path):
files = crawlDirectory(path, pattern)
for file in files:
filepath = os.path.join(file[0], file[1])
path_size += os.path.getsize(filepath)
elif os.path.isfile(path):
path_size = os.path.getsize(path)
return path_size
|
throwable-one/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/modeltests/custom_methods/models.py
|
92
|
"""
3. Giving models custom methods
Any method you add to a model will be available to instances.
"""
from django.db import models
import datetime
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
def __unicode__(self):
return self.headline
def was_published_today(self):
return self.pub_date == datetime.date.today()
def articles_from_same_day_1(self):
return Article.objects.filter(pub_date=self.pub_date).exclude(id=self.id)
def articles_from_same_day_2(self):
"""
Verbose version of get_articles_from_same_day_1, which does a custom
database query for the sake of demonstration.
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute("""
SELECT id, headline, pub_date
FROM custom_methods_article
WHERE pub_date = %s
AND id != %s""", [connection.ops.value_to_db_date(self.pub_date),
self.id])
return [self.__class__(*row) for row in cursor.fetchall()]
|
joelpinheiro/safebox-smartcard-auth
|
refs/heads/master
|
Client/veclient/lib/python2.7/locale.py
|
4
|
/usr/lib/python2.7/locale.py
|
AndreyPopovNew/asuswrt-merlin-rt-n
|
refs/heads/master
|
release/src/router/samba36/source4/torture/libnet/python/samr-test.py
|
67
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Unix SMB/CIFS implementation.
# Copyright (C) Kamen Mazdrashki <kamen.mazdrashki@postpath.com> 2009
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# Usage:
# export ACCOUNT_NAME=kamen
# export NEW_PASS=test
# export SUBUNITRUN=$samba4srcdir/scripting/bin/subunitrun
# PYTHONPATH="$samba4srcdir/torture/libnet/python" $SUBUNITRUN samr-test -Ukma-exch.devel/Administrator%333
#
import os
from samba import net
import samba.tests
if not "ACCOUNT_NAME" in os.environ.keys():
raise Exception("Please supply ACCOUNT_NAME in environment")
if not "NEW_PASS" in os.environ.keys():
raise Exception("Please supply NEW_PASS in environment")
account_name = os.environ["ACCOUNT_NAME"]
new_pass = os.environ["NEW_PASS"]
#
# Tests start here
#
class Libnet_SetPwdTest(samba.tests.TestCase):
########################################################################################
def test_SetPassword(self):
creds = self.get_credentials()
net.SetPassword(account_name=account_name,
domain_name=creds.get_domain(),
newpassword=new_pass,
credentials=creds)
########################################################################################
|
albert-chin/yowsup
|
refs/heads/master
|
yowsup/layers/protocol_presence/__init__.py
|
70
|
from .layer import YowPresenceProtocolLayer
|
paran0ids0ul/infernal-twin
|
refs/heads/master
|
build/pip/pip/__main__.py
|
834
|
from __future__ import absolute_import
import os
import sys
# If we are running from a wheel, add the wheel to sys.path
# This allows the usage python pip-*.whl/pip install pip-*.whl
if __package__ == '':
# __file__ is pip-*.whl/pip/__main__.py
# first dirname call strips of '/__main__.py', second strips off '/pip'
# Resulting path is the name of the wheel itself
# Add that to sys.path so we can import pip
path = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, path)
import pip # noqa
if __name__ == '__main__':
sys.exit(pip.main())
|
redhat-cip/tempest
|
refs/heads/master
|
tempest/tests/common/test_cred_provider.py
|
22
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from tempest_lib import auth
from tempest_lib import exceptions as lib_exc
from tempest_lib.services.identity.v2 import token_client as v2_client
from tempest_lib.services.identity.v3 import token_client as v3_client
from tempest.common import cred_provider
from tempest.common import tempest_fixtures as fixtures
from tempest import config
from tempest.tests import base
from tempest.tests import fake_config
from tempest.tests import fake_identity
class ConfiguredV2CredentialsTests(base.TestCase):
attributes = {
'username': 'fake_username',
'password': 'fake_password',
'tenant_name': 'fake_tenant_name'
}
identity_response = fake_identity._fake_v2_response
credentials_class = auth.KeystoneV2Credentials
tokenclient_class = v2_client.TokenClientJSON
identity_version = 'v2'
def setUp(self):
super(ConfiguredV2CredentialsTests, self).setUp()
self.useFixture(fake_config.ConfigFixture())
self.stubs.Set(config, 'TempestConfigPrivate', fake_config.FakePrivate)
self.stubs.Set(self.tokenclient_class, 'raw_request',
self.identity_response)
def _get_credentials(self, attributes=None):
if attributes is None:
attributes = self.attributes
return self.credentials_class(**attributes)
def _check(self, credentials, credentials_class, filled):
# Check the right version of credentials has been returned
self.assertIsInstance(credentials, credentials_class)
# Check the id attributes are filled in
attributes = [x for x in credentials.ATTRIBUTES if (
'_id' in x and x != 'domain_id')]
for attr in attributes:
if filled:
self.assertIsNotNone(getattr(credentials, attr))
else:
self.assertIsNone(getattr(credentials, attr))
def _verify_credentials(self, credentials_class, filled=True,
identity_version=None):
for ctype in cred_provider.CREDENTIAL_TYPES:
if identity_version is None:
creds = cred_provider.get_configured_credentials(
credential_type=ctype, fill_in=filled)
else:
creds = cred_provider.get_configured_credentials(
credential_type=ctype, fill_in=filled,
identity_version=identity_version)
self._check(creds, credentials_class, filled)
def test_create(self):
creds = self._get_credentials()
self.assertEqual(self.attributes, creds._initial)
def test_create_invalid_attr(self):
self.assertRaises(lib_exc.InvalidCredentials,
self._get_credentials,
attributes=dict(invalid='fake'))
def test_get_configured_credentials(self):
self.useFixture(fixtures.LockFixture('auth_version'))
self._verify_credentials(credentials_class=self.credentials_class)
def test_get_configured_credentials_unfilled(self):
self.useFixture(fixtures.LockFixture('auth_version'))
self._verify_credentials(credentials_class=self.credentials_class,
filled=False)
def test_get_configured_credentials_version(self):
# version specified and not loaded from config
self.useFixture(fixtures.LockFixture('auth_version'))
self._verify_credentials(credentials_class=self.credentials_class,
identity_version=self.identity_version)
def test_is_valid(self):
creds = self._get_credentials()
self.assertTrue(creds.is_valid())
class ConfiguredV3CredentialsTests(ConfiguredV2CredentialsTests):
attributes = {
'username': 'fake_username',
'password': 'fake_password',
'project_name': 'fake_project_name',
'user_domain_name': 'fake_domain_name'
}
credentials_class = auth.KeystoneV3Credentials
identity_response = fake_identity._fake_v3_response
tokenclient_class = v3_client.V3TokenClientJSON
identity_version = 'v3'
def setUp(self):
super(ConfiguredV3CredentialsTests, self).setUp()
# Additional config items reset by cfg fixture after each test
cfg.CONF.set_default('auth_version', 'v3', group='identity')
# Identity group items
for prefix in ['', 'alt_', 'admin_']:
cfg.CONF.set_default(prefix + 'domain_name', 'fake_domain_name',
group='identity')
|
ximenesuk/openmicroscopy
|
refs/heads/develop
|
components/tools/OmeroPy/test/gatewaytest/test_connection.py
|
4
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2009-2013 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
pytest fixtures used as defined in conftest.py:
- gatewaywrapper
- author_testimg
"""
import omero
import Ice
from omero.gateway.scripts import dbhelpers
import pytest
class TestConnectionMethods (object):
def testMultiProcessSession (self, gatewaywrapper):
#120 amongst other things trying to getSession() twice for the same session dies. Also in separate processes.
# we mimic this by calling setGroupForSession, which calls sessionservice.getSession, 2 times on cloned connections
gatewaywrapper.loginAsAuthor()
assert gatewaywrapper.gateway.getSession() != None
c2 = gatewaywrapper.gateway.clone()
assert c2.connect(sUuid=gatewaywrapper.gateway._sessionUuid)
assert c2.getSession() != None
a = c2.getAdminService()
g = omero.gateway.ExperimenterGroupWrapper(c2, a.containedGroups(c2.getUserId())[-1])
c2.setGroupForSession(g)
c3 = gatewaywrapper.gateway.clone()
assert c3.connect(sUuid=gatewaywrapper.gateway._sessionUuid)
assert c3.getSession() != None
a = c3.getAdminService()
g = omero.gateway.ExperimenterGroupWrapper(c3, a.containedGroups(c3.getUserId())[1])
c3.setGroupForSession(g)
def testSeppuku (self, gatewaywrapper, author_testimg):
# author_testimg in args to make sure the image has been imported
gatewaywrapper.loginAsAuthor()
assert gatewaywrapper.getTestImage() != None
gatewaywrapper.gateway.seppuku()
pytest.raises(Ice.ConnectionLostException, gatewaywrapper.getTestImage)
gatewaywrapper._has_connected = False
gatewaywrapper.doDisconnect()
gatewaywrapper.loginAsAuthor()
assert gatewaywrapper.getTestImage() != None
gatewaywrapper.gateway.seppuku(softclose=False)
pytest.raises(Ice.ConnectionLostException, gatewaywrapper.getTestImage)
gatewaywrapper._has_connected = False
gatewaywrapper.doDisconnect()
# Also make sure softclose does the right thing
gatewaywrapper.loginAsAuthor()
g2 = gatewaywrapper.gateway.clone()
def g2_getTestImage():
return dbhelpers.getImage(g2, 'testimg1')
assert g2.connect(gatewaywrapper.gateway._sessionUuid)
assert gatewaywrapper.getTestImage() != None
assert g2_getTestImage() != None
g2.seppuku(softclose=True)
pytest.raises(Ice.ConnectionLostException, g2_getTestImage)
assert gatewaywrapper.getTestImage() != None
g2 = gatewaywrapper.gateway.clone()
assert g2.connect(gatewaywrapper.gateway._sessionUuid)
assert gatewaywrapper.getTestImage() != None
assert g2_getTestImage() != None
g2.seppuku(softclose=False)
pytest.raises(Ice.ConnectionLostException, g2_getTestImage)
pytest.raises(Ice.ObjectNotExistException, gatewaywrapper.getTestImage)
gatewaywrapper._has_connected = False
gatewaywrapper.doDisconnect()
def testTopLevelObjects (self, gatewaywrapper, author_testimg):
##
# Test listProjects as root (sees, does not own)
parents = author_testimg.getAncestry()
project_id = parents[-1].getId()
# Original (4.1) test fails since 'admin' is logged into group 0, but the project
# created above is in new group.
# gatewaywrapper.loginAsAdmin() # test passes if we remain logged in as Author
ids = map(lambda x: x.getId(), gatewaywrapper.gateway.listProjects())
assert project_id in ids
gatewaywrapper.loginAsAdmin() # test passes if we NOW log in as Admin (different group)
ids = map(lambda x: x.getId(), gatewaywrapper.gateway.listProjects())
assert project_id not in ids
##
# Test listProjects as author (sees, owns)
gatewaywrapper.loginAsAuthor()
ids = map(lambda x: x.getId(), gatewaywrapper.gateway.listProjects())
assert project_id in ids
ids = map(lambda x: x.getId(), gatewaywrapper.gateway.listProjects())
assert project_id in ids
##
# Test listProjects as guest (does not see, does not own)
gatewaywrapper.doLogin(gatewaywrapper.USER)
ids = map(lambda x: x.getId(), gatewaywrapper.gateway.listProjects())
assert project_id not in ids
ids = map(lambda x: x.getId(), gatewaywrapper.gateway.listProjects())
assert project_id not in ids
##
# Test getProject
gatewaywrapper.loginAsAuthor()
assert gatewaywrapper.gateway.getObject("Project", project_id).getId() == project_id
##
# Test getDataset
dataset_id = parents[0].getId()
assert gatewaywrapper.gateway.getObject("Dataset", dataset_id).getId() == dataset_id
##
# Test listExperimenters
#exps = map(lambda x: x.omeName, gatewaywrapper.gateway.listExperimenters()) # removed from blitz gateway
exps = map(lambda x: x.omeName, gatewaywrapper.gateway.getObjects("Experimenter"))
for omeName in (gatewaywrapper.USER.name, gatewaywrapper.AUTHOR.name, gatewaywrapper.ADMIN.name.decode('utf-8')):
assert omeName in exps
assert len(list(gatewaywrapper.gateway.getObjects("Experimenter", attributes={'omeName':omeName}))) > 0
comboName = gatewaywrapper.USER.name+gatewaywrapper.AUTHOR.name+gatewaywrapper.ADMIN.name
assert len(list(gatewaywrapper.gateway.getObjects("Experimenter", attributes={'omeName':comboName}))) == 0
##
# Test lookupExperimenter
assert gatewaywrapper.gateway.getObject("Experimenter", attributes={'omeName':gatewaywrapper.USER.name}).omeName == gatewaywrapper.USER.name
assert gatewaywrapper.gateway.getObject("Experimenter", attributes={'omeName':comboName}) is None
##
# still logged in as Author, test listImages(ns)
def listImages(ns=None):
imageAnnLinks = gatewaywrapper.gateway.getAnnotationLinks("Image", ns=ns)
return [omero.gateway.ImageWrapper(gatewaywrapper.gateway, link.parent) for link in imageAnnLinks]
ns = 'weblitz.test_annotation'
obj = gatewaywrapper.getTestImage()
# Make sure it doesn't yet exist
obj.removeAnnotations(ns)
assert obj.getAnnotation(ns) == None
# Check without the ann
assert len(listImages(ns=ns)) == 0
annclass = omero.gateway.CommentAnnotationWrapper
# createAndLink
annclass.createAndLink(target=obj, ns=ns, val='foo')
imgs = listImages(ns=ns)
assert len(imgs) == 1
assert imgs[0] == obj
# and clean up
obj.removeAnnotations(ns)
assert obj.getAnnotation(ns) == None
def testCloseSession (self, gatewaywrapper):
#74 the failed connection for a user not in the system group does not get closed
gatewaywrapper.gateway.setIdentity(gatewaywrapper.USER.name, gatewaywrapper.USER.passwd)
setprop = gatewaywrapper.gateway.c.ic.getProperties().setProperty
map(lambda x: setprop(x[0],str(x[1])), gatewaywrapper.gateway._ic_props.items())
gatewaywrapper.gateway.c.ic.getImplicitContext().put(omero.constants.GROUP, gatewaywrapper.gateway.group)
# I'm not certain the following assertion is as intended.
# This should be reviewed, see ticket #6037
#assert gatewaywrapper.gateway._sessionUuid == None
pytest.raises(omero.ClientError, gatewaywrapper.gateway._createSession)
assert gatewaywrapper.gateway._sessionUuid != None
#74 bug found while fixing this, the uuid passed to closeSession was not wrapped in rtypes, so logout didn't
gatewaywrapper.gateway._closeSession() # was raising ValueError
gatewaywrapper.gateway = None
def testMiscellaneous (self, gatewaywrapper):
gatewaywrapper.loginAsUser()
assert gatewaywrapper.gateway.getUser().omeName == gatewaywrapper.USER.name
|
edmorley/django
|
refs/heads/master
|
tests/user_commands/urls.py
|
156
|
from django.conf.urls import url
urlpatterns = [
url(r'^some/url/$', lambda req:req, name='some_url'),
]
|
lupyuen/RaspberryPiImage
|
refs/heads/master
|
home/pi/GrovePi/Software/Python/others/temboo/Library/CloudMine/FileStorage/GetFile.py
|
4
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetFile
# Retrieves a file from the CloudMine server with a given key.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetFile(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetFile Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetFile, self).__init__(temboo_session, '/Library/CloudMine/FileStorage/GetFile')
def new_input_set(self):
return GetFileInputSet()
def _make_result_set(self, result, path):
return GetFileResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetFileChoreographyExecution(session, exec_id, path)
class GetFileInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetFile
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by CloudMine after registering your app.)
"""
super(GetFileInputSet, self)._set_input('APIKey', value)
def set_ApplicationIdentifier(self, value):
"""
Set the value of the ApplicationIdentifier input for this Choreo. ((required, string) The application identifier provided by CloudMine after registering your app.)
"""
super(GetFileInputSet, self)._set_input('ApplicationIdentifier', value)
def set_EncodeFileContent(self, value):
"""
Set the value of the EncodeFileContent input for this Choreo. ((optional, boolean) Returns the file content as Base64 encoded data when set to "true". This should be set to "true" when returning binary files. Defaults to "false".)
"""
super(GetFileInputSet, self)._set_input('EncodeFileContent', value)
def set_Key(self, value):
"""
Set the value of the Key input for this Choreo. ((required, string) The key whose value you want.)
"""
super(GetFileInputSet, self)._set_input('Key', value)
def set_SessionToken(self, value):
"""
Set the value of the SessionToken input for this Choreo. ((conditional, string) The session token for an existing user (returned by the AccountLogin Choreo). This is only required if your app is performing this operation on behalf of another user.)
"""
super(GetFileInputSet, self)._set_input('SessionToken', value)
class GetFileResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetFile Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from CloudMine.)
"""
return self._output.get('Response', None)
class GetFileChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetFileResultSet(response, path)
|
meska/SoCo
|
refs/heads/master
|
soco/exceptions.py
|
9
|
# -*- coding: utf-8 -*-
"""Exceptions that are used by SoCo."""
from __future__ import unicode_literals
class SoCoException(Exception):
"""Base class for all SoCo exceptions."""
class UnknownSoCoException(SoCoException):
"""An unknown UPnP error.
The exception object will contain the raw response sent back from
the speaker as the first of its args.
"""
class SoCoUPnPException(SoCoException):
"""A UPnP Fault Code, raised in response to actions sent over the
network.
"""
def __init__(self, message, error_code, error_xml, error_description=""):
"""
Args:
message (str): The message from the server.
error_code (str): The UPnP Error Code as a string.
error_xml (str): The xml containing the error, as a utf-8
encoded string.
error_description (str): A description of the error. Default is ""
"""
super(SoCoUPnPException, self).__init__()
self.message = message
self.error_code = error_code
self.error_description = error_description
self.error_xml = error_xml
def __str__(self):
return self.message
class CannotCreateDIDLMetadata(SoCoException):
"""
.. deprecated:: 0.11
Use `DIDLMetadataError` instead.
"""
class DIDLMetadataError(CannotCreateDIDLMetadata):
"""Raised if a data container class cannot create the DIDL metadata due to
missing information.
For backward compatibility, this is currently a subclass of
`CannotCreateDIDLMetadata`. In a future version, it will likely become
a direct subclass of `SoCoException`.
"""
class MusicServiceException(SoCoException):
"""An error relating to a third party music service."""
class UnknownXMLStructure(SoCoException):
"""Raised if XML with an unknown or unexpected structure is returned."""
class SoCoSlaveException(SoCoException):
"""Raised when a master command is called on a slave."""
|
imajes/Sick-Beard
|
refs/heads/master
|
lib/hachoir_parser/audio/aiff.py
|
90
|
"""
Audio Interchange File Format (AIFF) parser.
Author: Victor Stinner
Creation: 27 december 2006
"""
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet,
UInt16, UInt32, Float80, TimestampMac32,
RawBytes, NullBytes,
String, Enum, PascalString32)
from lib.hachoir_core.endian import BIG_ENDIAN
from lib.hachoir_core.text_handler import filesizeHandler
from lib.hachoir_core.tools import alignValue
from lib.hachoir_parser.audio.id3 import ID3v2
CODEC_NAME = {
'ACE2': u"ACE 2-to-1",
'ACE8': u"ACE 8-to-3",
'MAC3': u"MAC 3-to-1",
'MAC6': u"MAC 6-to-1",
'NONE': u"None",
'sowt': u"Little-endian, no compression",
}
class Comment(FieldSet):
def createFields(self):
yield TimestampMac32(self, "timestamp")
yield PascalString32(self, "text")
def parseText(self):
yield String(self, "text", self["size"].value)
def parseID3(self):
yield ID3v2(self, "id3v2", size=self["size"].value*8)
def parseComment(self):
yield UInt16(self, "nb_comment")
for index in xrange(self["nb_comment"].value):
yield Comment(self, "comment[]")
def parseCommon(self):
yield UInt16(self, "nb_channel")
yield UInt32(self, "nb_sample")
yield UInt16(self, "sample_size")
yield Float80(self, "sample_rate")
yield Enum(String(self, "codec", 4, strip="\0", charset="ASCII"), CODEC_NAME)
def parseVersion(self):
yield TimestampMac32(self, "timestamp")
def parseSound(self):
yield UInt32(self, "offset")
yield UInt32(self, "block_size")
size = (self.size - self.current_size) // 8
if size:
yield RawBytes(self, "data", size)
class Chunk(FieldSet):
TAG_INFO = {
'COMM': ('common', "Common chunk", parseCommon),
'COMT': ('comment', "Comment", parseComment),
'NAME': ('name', "Name", parseText),
'AUTH': ('author', "Author", parseText),
'FVER': ('version', "Version", parseVersion),
'SSND': ('sound', "Sound data", parseSound),
'ID3 ': ('id3', "ID3", parseID3),
}
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = (8 + alignValue(self["size"].value, 2)) * 8
tag = self["type"].value
if tag in self.TAG_INFO:
self._name, self._description, self._parser = self.TAG_INFO[tag]
else:
self._parser = None
def createFields(self):
yield String(self, "type", 4, "Signature (FORM)", charset="ASCII")
yield filesizeHandler(UInt32(self, "size"))
size = self["size"].value
if size:
if self._parser:
for field in self._parser(self):
yield field
if size % 2:
yield NullBytes(self, "padding", 1)
else:
yield RawBytes(self, "data", size)
class AiffFile(Parser):
PARSER_TAGS = {
"id": "aiff",
"category": "audio",
"file_ext": ("aif", "aiff", "aifc"),
"mime": (u"audio/x-aiff",),
"magic_regex": (("FORM.{4}AIF[CF]", 0),),
"min_size": 12*8,
"description": "Audio Interchange File Format (AIFF)"
}
endian = BIG_ENDIAN
def validate(self):
if self.stream.readBytes(0, 4) != "FORM":
return "Invalid signature"
if self.stream.readBytes(8*8, 4) not in ("AIFF", "AIFC"):
return "Invalid type"
return True
def createFields(self):
yield String(self, "signature", 4, "Signature (FORM)", charset="ASCII")
yield filesizeHandler(UInt32(self, "filesize"))
yield String(self, "type", 4, "Form type (AIFF or AIFC)", charset="ASCII")
while not self.eof:
yield Chunk(self, "chunk[]")
def createDescription(self):
if self["type"].value == "AIFC":
return "Audio Interchange File Format Compressed (AIFC)"
else:
return "Audio Interchange File Format (AIFF)"
def createContentSize(self):
return self["filesize"].value * 8
|
asrozar/perception
|
refs/heads/master
|
perception/database/migrations/versions/1dda1836ac55_create_host_using_sshv1_table.py
|
1
|
"""create host_using_sshv1 table
Revision ID: 1dda1836ac55
Revises: 3132f6875d83
Create Date: 2017-05-22 09:58:07.585231
"""
from sqlalchemy.dialects import postgresql
from alembic import op
import sqlalchemy as sa
import datetime
def _get_date():
return datetime.datetime.now()
# revision identifiers, used by Alembic.
revision = '1dda1836ac55'
down_revision = '3132f6875d83'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('hosts_using_sshv1',
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('perception_product_uuid', postgresql.UUID, nullable=False),
sa.Column('ip_addr', postgresql.INET, unique=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), default=_get_date))
def downgrade():
op.drop_table('hosts_using_sshv1')
|
LiChenda/NeuralNetwork
|
refs/heads/master
|
IMGPC.py
|
1
|
#!/usr/bin/env python
# encoding: utf-8
from PIL import Image
from numpy import *
#from pylab import *
def getVectFromImg(filename):
img = Image.open(filename)
img = img.convert('1')
img.thumbnail((32,32))
img = img.resize((32,32))
imgbw = zeros((1, 1025))
for i in range(32):
for j in range(32):
if(img.getpixel((j, i))):
imgbw[0,32*i+j + 1] = 0
else:
imgbw[0,32*i+j + 1] = 1
return imgbw
|
arnavd96/Cinemiezer
|
refs/heads/master
|
myvenv/lib/python3.4/site-packages/django/contrib/admin/sites.py
|
42
|
from functools import update_wrapper
from django.apps import apps
from django.conf import settings
from django.contrib.admin import ModelAdmin, actions
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import NoReverseMatch, reverse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _, ugettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.i18n import JavaScriptCatalog
system_check_errors = []
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
# Text to put at the end of each page's <title>.
site_title = ugettext_lazy('Django site admin')
# Text to put in each page's <h1>.
site_header = ugettext_lazy('Django administration')
# Text to put at the top of the admin index page.
index_title = ugettext_lazy('Site administration')
# URL for the "View site" link at the top of each admin page.
site_url = '/'
_empty_value_display = '-'
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured(
'The model %s is abstract, so it cannot be registered with admin.' % model.__name__
)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Instantiate the admin class to save in the registry
admin_obj = admin_class(model, self)
if admin_class is not ModelAdmin and settings.DEBUG:
system_check_errors.extend(admin_obj.check())
self._registry[model] = admin_obj
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def is_registered(self, model):
"""
Check if a model class is registered with this `AdminSite`.
"""
return model in self._registry
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
@property
def empty_value_display(self):
return self._empty_value_display
@empty_value_display.setter
def empty_value_display(self, empty_value_display):
self._empty_value_display = empty_value_display
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
if request.path == reverse('admin:logout', current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
# Inner import to prevent django.contrib.admin (app) from
# importing django.contrib.auth.models.User (unrelated model).
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
request.get_full_path(),
reverse('admin:login', current_app=self.name)
)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import url, include
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.contenttypes.views imports ContentType.
from django.contrib.contenttypes import views as contenttype_views
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
url(r'^$', wrap(self.index), name='index'),
url(r'^login/$', self.login, name='login'),
url(r'^logout/$', wrap(self.logout), name='logout'),
url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
name='view_on_site'),
]
# Add in each model's views, and create a list of valid URLS for the
# app_index
valid_app_labels = []
for model, model_admin in self._registry.items():
urlpatterns += [
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)),
]
if model._meta.app_label not in valid_app_labels:
valid_app_labels.append(model._meta.app_label)
# If there were ModelAdmins registered, we should have a list of app
# labels for which we need to allow access to the app_index view,
if valid_app_labels:
regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$'
urlpatterns += [
url(regex, wrap(self.app_index), name='app_list'),
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), 'admin', self.name
def each_context(self, request):
"""
Returns a dictionary of variables to put in the template context for
*every* page in the admin site.
For sites running on a subpath, use the SCRIPT_NAME value if site_url
hasn't been customized.
"""
script_name = request.META['SCRIPT_NAME']
site_url = script_name if self.site_url == '/' and script_name else self.site_url
return {
'site_title': self.site_title,
'site_header': self.site_header,
'site_url': site_url,
'has_permission': self.has_permission(request),
'available_apps': self.get_app_list(request),
}
def password_change(self, request, extra_context=None):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.admin.forms import AdminPasswordChangeForm
from django.contrib.auth.views import password_change
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'password_change_form': AdminPasswordChangeForm,
'post_change_redirect': url,
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
request.current_app = self.name
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
request.current_app = self.name
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
"""
return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request)
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'extra_context': dict(
self.each_context(request),
# Since the user isn't logged out at this point, the value of
# has_permission must be overridden.
has_permission=False,
**(extra_context or {})
),
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
request.current_app = self.name
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
if request.method == 'GET' and self.has_permission(request):
# Already logged-in, redirect to admin index
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
from django.contrib.auth.views import login
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.admin.forms eventually imports User.
from django.contrib.admin.forms import AdminAuthenticationForm
context = dict(
self.each_context(request),
title=_('Log in'),
app_path=request.get_full_path(),
username=request.user.get_username(),
)
if (REDIRECT_FIELD_NAME not in request.GET and
REDIRECT_FIELD_NAME not in request.POST):
context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name)
context.update(extra_context or {})
defaults = {
'extra_context': context,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
request.current_app = self.name
return login(request, **defaults)
def _build_app_dict(self, request, label=None):
"""
Builds the app dictionary. Takes an optional label parameters to filter
models of a specific app.
"""
app_dict = {}
if label:
models = {
m: m_a for m, m_a in self._registry.items()
if m._meta.app_label == label
}
else:
models = self._registry
for model, model_admin in models.items():
app_label = model._meta.app_label
has_module_perms = model_admin.has_module_permission(request)
if not has_module_perms:
if label:
raise PermissionDenied
continue
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True not in perms.values():
continue
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change'):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add'):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': apps.get_app_config(app_label).verbose_name,
'app_label': app_label,
'app_url': reverse(
'admin:app_list',
kwargs={'app_label': app_label},
current_app=self.name,
),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if label:
return app_dict.get(label)
return app_dict
def get_app_list(self, request):
"""
Returns a sorted list of all the installed apps that have been
registered in this site.
"""
app_dict = self._build_app_dict(request)
# Sort the apps alphabetically.
app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower())
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
return app_list
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_list = self.get_app_list(request)
context = dict(
self.each_context(request),
title=self.index_title,
app_list=app_list,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.index_template or 'admin/index.html', context)
def app_index(self, request, app_label, extra_context=None):
app_dict = self._build_app_dict(request, app_label)
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
app_name = apps.get_app_config(app_label).verbose_name
context = dict(
self.each_context(request),
title=_('%(app)s administration') % {'app': app_name},
app_list=[app_dict],
app_label=app_label,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
|
alshedivat/tensorflow
|
refs/heads/master
|
tensorflow/contrib/keras/api/keras/datasets/imdb/__init__.py
|
39
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""IMDB movie review sentiment classification dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras.datasets.imdb import get_word_index
from tensorflow.python.keras.datasets.imdb import load_data
del absolute_import
del division
del print_function
|
lokeshpancharia/BuildingMachineLearningSystemsWithPython
|
refs/heads/master
|
ch06/04_sent.py
|
22
|
# This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
#
# This script trains tries to tweak hyperparameters to improve P/R AUC
#
import time
start_time = time.time()
import re
import nltk
import numpy as np
from sklearn.metrics import precision_recall_curve, roc_curve, auc
from sklearn.cross_validation import ShuffleSplit
from utils import plot_pr
from utils import load_sanders_data
from utils import tweak_labels
from utils import log_false_positives
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import f1_score
from sklearn.base import BaseEstimator
from sklearn.naive_bayes import MultinomialNB
from utils import load_sent_word_net
sent_word_net = load_sent_word_net()
phase = "04"
import json
poscache_filename = "poscache.json"
try:
poscache = json.load(open(poscache_filename, "r"))
except IOError:
poscache = {}
class LinguisticVectorizer(BaseEstimator):
def get_feature_names(self):
return np.array(['sent_neut', 'sent_pos', 'sent_neg',
'nouns', 'adjectives', 'verbs', 'adverbs',
'allcaps', 'exclamation', 'question'])
def fit(self, documents, y=None):
return self
def _get_sentiments(self, d):
# http://www.ling.upenn.edu/courses/Fall_2003/ling001/penn_treebank_pos.html
sent = tuple(nltk.word_tokenize(d))
if poscache is not None:
if d in poscache:
tagged = poscache[d]
else:
poscache[d] = tagged = nltk.pos_tag(sent)
else:
tagged = nltk.pos_tag(sent)
pos_vals = []
neg_vals = []
nouns = 0.
adjectives = 0.
verbs = 0.
adverbs = 0.
for w, t in tagged:
p, n = 0, 0
sent_pos_type = None
if t.startswith("NN"):
sent_pos_type = "n"
nouns += 1
elif t.startswith("JJ"):
sent_pos_type = "a"
adjectives += 1
elif t.startswith("VB"):
sent_pos_type = "v"
verbs += 1
elif t.startswith("RB"):
sent_pos_type = "r"
adverbs += 1
if sent_pos_type is not None:
sent_word = "%s/%s" % (sent_pos_type, w)
if sent_word in sent_word_net:
p, n = sent_word_net[sent_word]
pos_vals.append(p)
neg_vals.append(n)
l = len(sent)
avg_pos_val = np.mean(pos_vals)
avg_neg_val = np.mean(neg_vals)
return [1 - avg_pos_val - avg_neg_val, avg_pos_val, avg_neg_val,
nouns / l, adjectives / l, verbs / l, adverbs / l]
def transform(self, documents):
obj_val, pos_val, neg_val, nouns, adjectives, verbs, adverbs = np.array(
[self._get_sentiments(d) for d in documents]).T
allcaps = []
exclamation = []
question = []
for d in documents:
allcaps.append(
np.sum([t.isupper() for t in d.split() if len(t) > 2]))
exclamation.append(d.count("!"))
question.append(d.count("?"))
result = np.array(
[obj_val, pos_val, neg_val, nouns, adjectives, verbs, adverbs, allcaps,
exclamation, question]).T
return result
emo_repl = {
# positive emoticons
"<3": " good ",
":d": " good ", # :D in lower case
":dd": " good ", # :DD in lower case
"8)": " good ",
":-)": " good ",
":)": " good ",
";)": " good ",
"(-:": " good ",
"(:": " good ",
# negative emoticons:
":/": " bad ",
":>": " sad ",
":')": " sad ",
":-(": " bad ",
":(": " bad ",
":S": " bad ",
":-S": " bad ",
}
emo_repl_order = [k for (k_len, k) in reversed(
sorted([(len(k), k) for k in list(emo_repl.keys())]))]
re_repl = {
r"\br\b": "are",
r"\bu\b": "you",
r"\bhaha\b": "ha",
r"\bhahaha\b": "ha",
r"\bdon't\b": "do not",
r"\bdoesn't\b": "does not",
r"\bdidn't\b": "did not",
r"\bhasn't\b": "has not",
r"\bhaven't\b": "have not",
r"\bhadn't\b": "had not",
r"\bwon't\b": "will not",
r"\bwouldn't\b": "would not",
r"\bcan't\b": "can not",
r"\bcannot\b": "can not",
}
def create_union_model(params=None):
def preprocessor(tweet):
tweet = tweet.lower()
for k in emo_repl_order:
tweet = tweet.replace(k, emo_repl[k])
for r, repl in re_repl.items():
tweet = re.sub(r, repl, tweet)
return tweet.replace("-", " ").replace("_", " ")
tfidf_ngrams = TfidfVectorizer(preprocessor=preprocessor,
analyzer="word")
ling_stats = LinguisticVectorizer()
all_features = FeatureUnion(
[('ling', ling_stats), ('tfidf', tfidf_ngrams)])
#all_features = FeatureUnion([('tfidf', tfidf_ngrams)])
#all_features = FeatureUnion([('ling', ling_stats)])
clf = MultinomialNB()
pipeline = Pipeline([('all', all_features), ('clf', clf)])
if params:
pipeline.set_params(**params)
return pipeline
def __grid_search_model(clf_factory, X, Y):
cv = ShuffleSplit(
n=len(X), n_iter=10, test_size=0.3, random_state=0)
param_grid = dict(vect__ngram_range=[(1, 1), (1, 2), (1, 3)],
vect__min_df=[1, 2],
vect__smooth_idf=[False, True],
vect__use_idf=[False, True],
vect__sublinear_tf=[False, True],
vect__binary=[False, True],
clf__alpha=[0, 0.01, 0.05, 0.1, 0.5, 1],
)
grid_search = GridSearchCV(clf_factory(),
param_grid=param_grid,
cv=cv,
score_func=f1_score,
verbose=10)
grid_search.fit(X, Y)
clf = grid_search.best_estimator_
print(clf)
return clf
def train_model(clf, X, Y, name="NB ngram", plot=False):
# create it again for plotting
cv = ShuffleSplit(
n=len(X), n_iter=10, test_size=0.3, random_state=0)
train_errors = []
test_errors = []
scores = []
pr_scores = []
precisions, recalls, thresholds = [], [], []
clfs = [] # just to later get the median
for train, test in cv:
X_train, y_train = X[train], Y[train]
X_test, y_test = X[test], Y[test]
clf.fit(X_train, y_train)
clfs.append(clf)
train_score = clf.score(X_train, y_train)
test_score = clf.score(X_test, y_test)
train_errors.append(1 - train_score)
test_errors.append(1 - test_score)
scores.append(test_score)
proba = clf.predict_proba(X_test)
fpr, tpr, roc_thresholds = roc_curve(y_test, proba[:, 1])
precision, recall, pr_thresholds = precision_recall_curve(
y_test, proba[:, 1])
pr_scores.append(auc(recall, precision))
precisions.append(precision)
recalls.append(recall)
thresholds.append(pr_thresholds)
if plot:
scores_to_sort = pr_scores
median = np.argsort(scores_to_sort)[len(scores_to_sort) / 2]
plot_pr(pr_scores[median], name, phase, precisions[median],
recalls[median], label=name)
log_false_positives(clfs[median], X_test, y_test, name)
summary = (np.mean(scores), np.std(scores),
np.mean(pr_scores), np.std(pr_scores))
print("%.3f\t%.3f\t%.3f\t%.3f\t" % summary)
return np.mean(train_errors), np.mean(test_errors)
def print_incorrect(clf, X, Y):
Y_hat = clf.predict(X)
wrong_idx = Y_hat != Y
X_wrong = X[wrong_idx]
Y_wrong = Y[wrong_idx]
Y_hat_wrong = Y_hat[wrong_idx]
for idx in range(len(X_wrong)):
print("clf.predict('%s')=%i instead of %i" %
(X_wrong[idx], Y_hat_wrong[idx], Y_wrong[idx]))
def get_best_model():
best_params = dict(all__tfidf__ngram_range=(1, 2),
all__tfidf__min_df=1,
all__tfidf__stop_words=None,
all__tfidf__smooth_idf=False,
all__tfidf__use_idf=False,
all__tfidf__sublinear_tf=True,
all__tfidf__binary=False,
clf__alpha=0.01,
)
best_clf = create_union_model(best_params)
return best_clf
if __name__ == "__main__":
X_orig, Y_orig = load_sanders_data()
#from sklearn.utils import shuffle
# print "shuffle, sample"
#X_orig, Y_orig = shuffle(X_orig, Y_orig)
#X_orig = X_orig[:100,]
#Y_orig = Y_orig[:100,]
classes = np.unique(Y_orig)
for c in classes:
print("#%s: %i" % (c, sum(Y_orig == c)))
print("== Pos vs. neg ==")
pos_neg = np.logical_or(Y_orig == "positive", Y_orig == "negative")
X = X_orig[pos_neg]
Y = Y_orig[pos_neg]
Y = tweak_labels(Y, ["positive"])
train_model(get_best_model(), X, Y, name="pos vs neg", plot=True)
print("== Pos/neg vs. irrelevant/neutral ==")
X = X_orig
Y = tweak_labels(Y_orig, ["positive", "negative"])
# best_clf = grid_search_model(create_union_model, X, Y, name="sent vs
# rest", plot=True)
train_model(get_best_model(), X, Y, name="pos+neg vs rest", plot=True)
print("== Pos vs. rest ==")
X = X_orig
Y = tweak_labels(Y_orig, ["positive"])
train_model(get_best_model(), X, Y, name="pos vs rest",
plot=True)
print("== Neg vs. rest ==")
X = X_orig
Y = tweak_labels(Y_orig, ["negative"])
train_model(get_best_model(), X, Y, name="neg vs rest",
plot=True)
print("time spent:", time.time() - start_time)
json.dump(poscache, open(poscache_filename, "w"))
|
henrysher/aws-cloudinit
|
refs/heads/v0.7.2
|
cloudinit/url_helper.py
|
1
|
# vi: ts=4 expandtab
#
# Copyright (C) 2012 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# Copyright (C) 2012 Yahoo! Inc.
# Copyright (C) 2014 Amazon.com, Inc. or its affiliates.
#
# Author: Scott Moser <scott.moser@canonical.com>
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
# Author: Joshua Harlow <harlowja@yahoo-inc.com>
# Author: Andrew Jorgensen <ajorgens@amazon.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
import warnings
import requests
from requests import exceptions
from urlparse import (urlparse, urlunparse)
from cloudinit import log as logging
from cloudinit import version
LOG = logging.getLogger(__name__)
# Check if requests has ssl support (added in requests >= 0.8.8)
SSL_ENABLED = False
CONFIG_ENABLED = False # This was added in 0.7 (but taken out in >=1.0)
try:
from distutils.version import LooseVersion
with warning.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
import pkg_resources
_REQ = pkg_resources.get_distribution('requests')
_REQ_VER = LooseVersion(_REQ.version) # pylint: disable=E1103
if _REQ_VER >= LooseVersion('0.8.8'):
SSL_ENABLED = True
if _REQ_VER >= LooseVersion('0.7.0') and _REQ_VER < LooseVersion('1.0.0'):
CONFIG_ENABLED = True
except:
pass
def _cleanurl(url):
parsed_url = list(urlparse(url, scheme='http')) # pylint: disable=E1123
if not parsed_url[1] and parsed_url[2]:
# Swap these since this seems to be a common
# occurrence when given urls like 'www.google.com'
parsed_url[1] = parsed_url[2]
parsed_url[2] = ''
return urlunparse(parsed_url)
class UrlResponse(object):
def __init__(self, response):
self._response = response
@property
def contents(self):
return self._response.content
@property
def url(self):
return self._response.url
def ok(self, redirects_ok=False):
upper = 300
if redirects_ok:
upper = 400
if self.code >= 200 and self.code < upper:
return True
else:
return False
@property
def headers(self):
return self._response.headers
@property
def code(self):
return self._response.status_code
def __str__(self):
return self.contents
class UrlError(IOError):
def __init__(self, cause, code=None, headers=None):
IOError.__init__(self, str(cause))
self.cause = cause
self.code = code
self.headers = headers
if self.headers is None:
self.headers = {}
def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
headers=None, headers_cb=None, ssl_details=None,
check_status=True, allow_redirects=True):
url = _cleanurl(url)
req_args = {
'url': url,
}
scheme = urlparse(url).scheme # pylint: disable=E1101
if scheme == 'https' and ssl_details:
if not SSL_ENABLED:
LOG.warn("SSL is not enabled, cert. verification can not occur!")
else:
if 'ca_certs' in ssl_details and ssl_details['ca_certs']:
req_args['verify'] = ssl_details['ca_certs']
else:
req_args['verify'] = True
if 'cert_file' in ssl_details and 'key_file' in ssl_details:
req_args['cert'] = [ssl_details['cert_file'],
ssl_details['key_file']]
elif 'cert_file' in ssl_details:
req_args['cert'] = str(ssl_details['cert_file'])
req_args['allow_redirects'] = allow_redirects
req_args['method'] = 'GET'
if timeout is not None:
req_args['timeout'] = max(float(timeout), 0)
if data:
req_args['method'] = 'POST'
# It doesn't seem like config
# was added in older library versions (or newer ones either), thus we
# need to manually do the retries if it wasn't...
if CONFIG_ENABLED:
req_config = {
'store_cookies': False,
}
# Don't use the retry support built-in
# since it doesn't allow for 'sleep_times'
# in between tries....
# if retries:
# req_config['max_retries'] = max(int(retries), 0)
req_args['config'] = req_config
manual_tries = 1
if retries:
manual_tries = max(int(retries) + 1, 1)
if not headers:
headers = {
'User-Agent': 'Cloud-Init/%s' % (version.version_string()),
}
if not headers_cb:
def _cb(url):
return headers
headers_cb = _cb
if data:
# Do this after the log (it might be large)
req_args['data'] = data
if sec_between is None:
sec_between = -1
excps = []
# Handle retrying ourselves since the built-in support
# doesn't handle sleeping between tries...
for i in range(0, manual_tries):
try:
req_args['headers'] = headers_cb(url)
filtered_req_args = {}
for (k, v) in req_args.items():
if k == 'data':
continue
filtered_req_args[k] = v
LOG.debug("[%s/%s] open '%s' with %s configuration", i,
manual_tries, url, filtered_req_args)
r = requests.request(**req_args)
if check_status:
r.raise_for_status() # pylint: disable=E1103
LOG.debug("Read from %s (%s, %sb) after %s attempts", url,
r.status_code, len(r.content), # pylint: disable=E1103
(i + 1))
# Doesn't seem like we can make it use a different
# subclass for responses, so add our own backward-compat
# attrs
return UrlResponse(r)
except exceptions.RequestException as e:
if (isinstance(e, (exceptions.HTTPError))
and hasattr(e, 'response') # This appeared in v 0.10.8
and hasattr(e.response, 'status_code')):
excps.append(UrlError(e, code=e.response.status_code,
headers=e.response.headers))
else:
excps.append(UrlError(e))
if SSL_ENABLED and isinstance(e, exceptions.SSLError):
# ssl exceptions are not going to get fixed by waiting a
# few seconds
break
if i + 1 < manual_tries and sec_between > 0:
LOG.debug("Please wait %s seconds while we wait to try again",
sec_between)
time.sleep(sec_between)
if excps:
raise excps[-1]
return None # Should throw before this...
def wait_for_url(urls, max_wait=None, timeout=None,
status_cb=None, headers_cb=None, sleep_time=1,
exception_cb=None):
"""
urls: a list of urls to try
max_wait: roughly the maximum time to wait before giving up
The max time is *actually* len(urls)*timeout as each url will
be tried once and given the timeout provided.
timeout: the timeout provided to urlopen
status_cb: call method with string message when a url is not available
headers_cb: call method with single argument of url to get headers
for request.
exception_cb: call method with 2 arguments 'msg' (per status_cb) and
'exception', the exception that occurred.
the idea of this routine is to wait for the EC2 metdata service to
come up. On both Eucalyptus and EC2 we have seen the case where
the instance hit the MD before the MD service was up. EC2 seems
to have permenantely fixed this, though.
In openstack, the metadata service might be painfully slow, and
unable to avoid hitting a timeout of even up to 10 seconds or more
(LP: #894279) for a simple GET.
Offset those needs with the need to not hang forever (and block boot)
on a system where cloud-init is configured to look for EC2 Metadata
service but is not going to find one. It is possible that the instance
data host (169.254.169.254) may be firewalled off Entirely for a sytem,
meaning that the connection will block forever unless a timeout is set.
"""
start_time = time.time()
def log_status_cb(msg, exc=None):
LOG.debug(msg)
if status_cb is None:
status_cb = log_status_cb
def timeup(max_wait, start_time):
return ((max_wait <= 0 or max_wait is None) or
(time.time() - start_time > max_wait))
loop_n = 0
while True:
sleep_time = int(loop_n / 5) + 1
for url in urls:
now = time.time()
if loop_n != 0:
if timeup(max_wait, start_time):
break
if timeout and (now + timeout > (start_time + max_wait)):
# shorten timeout to not run way over max_time
timeout = int((start_time + max_wait) - now)
reason = ""
e = None
try:
if headers_cb is not None:
headers = headers_cb(url)
else:
headers = {}
response = readurl(url, headers=headers, timeout=timeout,
check_status=False)
if not response.contents:
reason = "empty response [%s]" % (response.code)
e = UrlError(ValueError(reason),
code=response.code, headers=response.headers)
elif not response.ok():
reason = "bad status code [%s]" % (response.code)
e = UrlError(ValueError(reason),
code=response.code, headers=response.headers)
else:
return url
except UrlError as e:
reason = "request error [%s]" % e
except Exception as e:
reason = "unexpected error [%s]" % e
time_taken = int(time.time() - start_time)
status_msg = "Calling '%s' failed [%s/%ss]: %s" % (url,
time_taken,
max_wait,
reason)
status_cb(status_msg)
if exception_cb:
# This can be used to alter the headers that will be sent
# in the future, for example this is what the MAAS datasource
# does.
exception_cb(msg=status_msg, exception=e)
if timeup(max_wait, start_time):
break
loop_n = loop_n + 1
LOG.debug("Please wait %s seconds while we wait to try again",
sleep_time)
time.sleep(sleep_time)
return False
|
UmassJin/Leetcode
|
refs/heads/master
|
Array/Gray_Code.py
|
1
|
```
The gray code is a binary numeral system where two successive values differ in only one bit.
Given a non-negative integer n representing the total number of bits in the code, print the sequence of gray code. A gray code sequence must begin with 0.
For example, given n = 2, return [0,1,3,2]. Its gray code sequence is:
00 - 0
01 - 1
11 - 3
10 - 2
Note:
For a given n, a gray code sequence is not uniquely defined.
For example, [0,2,3,1] is also a valid gray code sequence according to the above definition.
For now, the judge is able to judge based on one instance of gray code sequence. Sorry about that.
```
class Solution:
# @param {integer} n
# @return {integer[]}
def grayCode(self, n):
result = []
for i in xrange(2**n):
result.append(i>>1 ^ i)
return result
def graycode( n):
return [ (i >> 1 ^ i )for i in xrange(1<<n)]
print graycode(3)
#Conver from Gray Code to Normal Code:
def graycode(num):
result = []
for number in num:
mask = number >> 1
while mask != 0:
number = number ^ mask
mask = mask >> 1
result.append(number)
return result
print graycode([0, 1, 3, 2, 6, 7, 5, 4])
#Reference: http://en.wikipedia.org/wiki/Gray_code
|
ruimashita/django-haystack
|
refs/heads/master
|
haystack/__init__.py
|
9
|
# encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from haystack.constants import DEFAULT_ALIAS
from haystack import signals
from haystack.utils import loading
__author__ = 'Daniel Lindsley'
__version__ = (2, 4, 0)
# Setup default logging.
log = logging.getLogger('haystack')
stream = logging.StreamHandler()
stream.setLevel(logging.INFO)
log.addHandler(stream)
# Help people clean up from 1.X.
if hasattr(settings, 'HAYSTACK_SITECONF'):
raise ImproperlyConfigured('The HAYSTACK_SITECONF setting is no longer used & can be removed.')
if hasattr(settings, 'HAYSTACK_SEARCH_ENGINE'):
raise ImproperlyConfigured('The HAYSTACK_SEARCH_ENGINE setting has been replaced with HAYSTACK_CONNECTIONS.')
if hasattr(settings, 'HAYSTACK_ENABLE_REGISTRATIONS'):
raise ImproperlyConfigured('The HAYSTACK_ENABLE_REGISTRATIONS setting is no longer used & can be removed.')
if hasattr(settings, 'HAYSTACK_INCLUDE_SPELLING'):
raise ImproperlyConfigured('The HAYSTACK_INCLUDE_SPELLING setting is now a per-backend setting & belongs in HAYSTACK_CONNECTIONS.')
# Check the 2.X+ bits.
if not hasattr(settings, 'HAYSTACK_CONNECTIONS'):
raise ImproperlyConfigured('The HAYSTACK_CONNECTIONS setting is required.')
if DEFAULT_ALIAS not in settings.HAYSTACK_CONNECTIONS:
raise ImproperlyConfigured("The default alias '%s' must be included in the HAYSTACK_CONNECTIONS setting." % DEFAULT_ALIAS)
# Load the connections.
connections = loading.ConnectionHandler(settings.HAYSTACK_CONNECTIONS)
# Load the router(s).
connection_router = loading.ConnectionRouter()
if hasattr(settings, 'HAYSTACK_ROUTERS'):
if not isinstance(settings.HAYSTACK_ROUTERS, (list, tuple)):
raise ImproperlyConfigured("The HAYSTACK_ROUTERS setting must be either a list or tuple.")
connection_router = loading.ConnectionRouter(settings.HAYSTACK_ROUTERS)
# Setup the signal processor.
signal_processor_path = getattr(settings, 'HAYSTACK_SIGNAL_PROCESSOR', 'haystack.signals.BaseSignalProcessor')
signal_processor_class = loading.import_class(signal_processor_path)
signal_processor = signal_processor_class(connections, connection_router)
# Per-request, reset the ghetto query log.
# Probably not extraordinarily thread-safe but should only matter when
# DEBUG = True.
def reset_search_queries(**kwargs):
for conn in connections.all():
conn.reset_queries()
if settings.DEBUG:
from django.core import signals as django_signals
django_signals.request_started.connect(reset_search_queries)
|
abdulbaqi/quranf
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/compat.py
|
571
|
# -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
#: Python 3.0.x
is_py30 = (is_py3 and _ver[1] == 0)
#: Python 3.1.x
is_py31 = (is_py3 and _ver[1] == 1)
#: Python 3.2.x
is_py32 = (is_py3 and _ver[1] == 2)
#: Python 3.3.x
is_py33 = (is_py3 and _ver[1] == 3)
#: Python 3.4.x
is_py34 = (is_py3 and _ver[1] == 4)
#: Python 2.7.x
is_py27 = (is_py2 and _ver[1] == 7)
#: Python 2.6.x
is_py26 = (is_py2 and _ver[1] == 6)
#: Python 2.5.x
is_py25 = (is_py2 and _ver[1] == 5)
#: Python 2.4.x
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# ---------
# Platforms
# ---------
# Syntax sugar.
_ver = sys.version.lower()
is_pypy = ('pypy' in _ver)
is_jython = ('jython' in _ver)
is_ironpython = ('iron' in _ver)
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
# Windows-based system.
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
is_linux = ('linux' in str(sys.platform).lower())
is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try:
import simplejson as json
except ImportError:
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
from httplib import IncompleteRead
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
from http.client import IncompleteRead
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
|
waldyrious/GraphUI
|
refs/heads/master
|
gui/draw/rounded.py
|
3
|
# Copyright (c) 2007 Enough Project.
# See LICENSE for details.
import pygame
import backend
from math import pi
def _rounded_rect(surface, color, rect, width, corner_radius):
backend.lock(surface)
rect.height -= width//2
rect.width -= width//2
diameter = corner_radius * 2
for offset, angles, corner in (((0, 0), (pi/2, pi), rect.topleft,),
((-diameter, 0), (0, pi/2), rect.topright),
((-diameter,-diameter), (3./2*pi, 2*pi), rect.bottomright),
((0, -diameter), (pi, 3./2*pi), rect.bottomleft),
):
corner = [a+b for a,b in zip(offset, corner)]
corner_rect = pygame.Rect(corner[0], corner[1], diameter, diameter)
backend.arc(surface, color, corner_rect, angles[0], angles[1], width)
line_dist = corner_radius
for p1, p2 in (((rect.topleft[0] + line_dist, rect.topleft[1]),
(rect.topright[0] - line_dist, rect.topright[1])),
((rect.topright[0], rect.topright[1] + line_dist),
(rect.bottomright[0], rect.bottomright[1] - line_dist)),
((rect.bottomright[0] - line_dist, rect.bottomright[1]),
(rect.bottomleft[0] + line_dist, rect.bottomleft[1])),
((rect.bottomleft[0], rect.bottomleft[1] - line_dist),
(rect.topleft[0], rect.topleft[1] + line_dist)),
):
backend.line(surface, color, p1, p2, width)
backend.unlock(surface)
import offset
def rounded_rect(surface, color, rect, width, corner_radius):
rect = offset.rect_offset(rect)
_rounded_rect(surface, color, rect, width, corner_radius)
|
iniverno/RnR-LLC
|
refs/heads/master
|
gen-scripts/workloads.py
|
1
|
# checkpoint name/path pairs
all_checkpoints = [
("/simics/checkpoints-u3/barnes/barnes-512", "barnes_512", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/barnes/barnes-16k", "barnes_16k", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/barnes/barnes-64k", "barnes_64k", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/barnes/barnes-128k", "barnes_128k", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/ocean/ocean-66", "ocean_66", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/ocean/ocean-258", "ocean_258", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/ocean/ocean-514", "ocean_514", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/ocean/ocean-1026", "ocean_1026", [1, 2, 4, 8, 16, 32]),
("/simics/checkpoints-u3/jbb/jbb", "jbb", [1, 2, 4, 8, 16]),
("/simics/checkpoints-u3/jbb/jbb_warm", "jbb", [1, 2, 4, 8, 16]),
("/simics/checkpoints-u3/oltp/oltp_warm", "oltp", [1, 2, 4, 8, 16]),
("/simics/checkpoints-u3/apache/apache_warm", "apache", [1, 2, 4, 8, 16]),
("/simics/checkpoints-u3/zeus/zeus_warm", "zeus", [1, 2, 4, 8, 16]),
]
regress_list = [
("/simics/checkpoints-u3/jbb/jbb_warm", "jbb", 100, 1, 500, None),
("/simics/checkpoints-u3/oltp/oltp_warm", "oltp", 2, 1, 500, None),
("/simics/checkpoints-u3/apache/apache_warm", "apache", 8, 1, 500, None),
("/simics/checkpoints-u3/zeus/zeus_warm", "zeus", 8, 1, 500, None),
]
## checkpoint path | workload name | trans | dump_int | memory(MB) | cache warmup file
test_runs = [
("jbb/jbb_warm", "jbb", 100, 1, 500, None),
("oltp/oltp_warm", "oltp", 2, 1, 500, None),
("apache/apache_warm", "apache", 8, 1, 500, None),
("zeus/zeus_warm", "zeus", 8, 1, 500, None),
]
short_runs = [
("barnes/barnes-512", "barnes_512", 1, 1, 500, None),
("ocean/ocean-66", "ocean_66", 1, 1, 500, None),
("jbb/jbb_warm", "jbb", 1000,100, 500, None),
("ecperf/ecperf", "ecperf", 0, 1, 500, None),
("SPEC2K/gcc", "gcc", 0, 1, 500, None),
("SPEC2K/equake", "equake", 0, 1, 500, None),
("oltp/oltp_warm", "oltp", 5, 1, 500, None),
("apache/apache_warm", "apache", 50, 1, 500, None),
("zeus/zeus_warm", "zeus", 50, 1, 500, None),
]
half_runs = [
("barnes/barnes-16k", "barnes_16k", 1, 1, 500, None),
("ocean/ocean-514", "ocean_514", 1, 1, 500, None),
("jbb/jbb_warm", "jbb", 100000,1000,1000, None),
("ecperf/ecperf", "ecperf", 4, 1,1000, None),
("oltp/oltp_warm", "oltp", 1000, 10,1000, None),
("apache/apache_warm", "apache", 10000, 100,1000, None),
("zeus/zeus_warm", "zeus", 10000, 100,1000, None),
]
full_runs = [
("barnes/barnes-64k", "barnes_64k", 1, 1, 500, None),
("ocean/ocean-1026", "ocean_1026", 1, 1, 500, None),
("jbb/jbb_warm", "jbb", 200000, 2000, 1000, None),
("ecperf/ecperf", "ecperf", 10, 1, 1000, None),
("oltp/oltp_warm", "oltp", 2000, 20, 1000, None),
("apache/apache_warm", "apache", 20000, 200, 1000, None),
("zeus/zeus_warm", "zeus", 20000, 200, 1000, None),
]
# all comm. checkpoint sets use the "_warm" checkpoints
# these checkpoints will also load the warmup cache files (.caches.gz)
warm_runs = [
("jbb/jbb_warm", "jbb", 100000, 1000, 1900, "yes"),
("oltp/oltp_warm", "oltp", 1000, 10, 1900, "yes"),
("apache/apache_warm", "apache", 1000, 10, 1900, "yes"),
("zeus/zeus_warm", "zeus", 1000, 10, 1900, "yes"),
]
jbb_nuca_runs = [
("jbb-nuca/jbb-HBOLocks", "jbb_HBO", 1000, 1, 500, ""),
("jbb-nuca/jbb-TATASLocks", "jbb_TATAS", 1000, 1, 500, "")
]
## checkpoint path | workload name | trans | dump_int | memory(MB) | cache warmup file | mbench_arg_prefix | mbench_arg_string
transactional_runs = [
("deque", "deque", 2000, 1, 500, "", "2000ops-32bkoff", "2000 32"),
("btree", "btree", 1000, 1, 500, "", "priv-alloc-20pct", "20"),
("prioqueue", "prioqueue", 8192, 1, 500, "", "8192ops", ""),
("sortedList", "sortedList", 500, 1, 500, "", "500ops-64len", "500 64"),
("isolation-test", "isolation-test", 1, 1, 500, "", "", ""),
("logging-test", "logging-test", 1, 1, 500, "", "", ""),
("partial-rollback", "partial-rollback", 1, 1, 500, "", "", "2048 4"),
]
# Currently these only work for eager VM TM systems
eagervm_transactional_runs = [
("compensation", "compensation", 1, 1, 500, "", "", ""),
("commit-action", "commit-action", 1, 1, 500, "", "", ""),
]
stamp_runs = [
("vacation", "vacation", 1, 1, 500, "", "TM-n8-q10-u80-r65536-t4096", "-n8 -q10 -u80 -r65536 -t4096"),
("delaunay", "delaunay", 1, 1, 500, "", "TM-gen4.2-m30", "-i inputs/gen4.2 -m 30"),
("genome", "genome", 1, 1, 500, "", "TM-g256-s16-n8192", "-g256 -s16 -n8192"),
("kmeans", "kmeans", 1, 1, 500, "", "TM-m20-n20-t0.05", "-m20 -n20 -t0.05 -i inputs/random1000_12"),
("bayes", "bayes", 1, 1, 500, "", "TM-v16-r384-n3-p20-s0", "-v16 -r384 -n3 -p20 -s0"),
("labyrinth", "labyrinth", 1, 1, 500, "", "TM-x32-y32-z3-n32", "random-x32-y32-z3-n32.txt")
]
base_runs = [
#("ocean-locks/ocean-locks-66", "ocean-locks_66", 1, 1, 500, None),
#("final-checkpoints/ocean-locks/ocean-locks-base-66", "ocean-locks-base-66", 1, 1, 500, None),
#("final-checkpoints/ocean-locks/ocean-locks-base-130", "ocean-locks-base-130", 1, 1, 500, None),
#("final-checkpoints/ocean-locks/ocean-locks-base-258", "ocean-locks-base-258", 1, 1, 500, None),
#("ocean-locks/ocean-locks-258", "ocean-locks_258", 1, 1, 500, None),
]
nested_transactional_runs = [
("raytrace/raytrace-nested-trans-opt-teapot", "raytrace-nested-trans-opt-teapot", 1, 1, 500, None),
("radiosity/radiosity-nested-trans", "radiosity-nested-trans", 1, 1, 500 , None),
("cholesky/cholesky-nested-trans-14", "cholesky-nested-trans-14", 1, 1, 500, None),
]
mcs_lock_runs = [
("raytrace/raytrace-nested-mcs-opt-teapot", "raytrace-nested-mcs-opt-teapot", 1, 1, 500, None),
("radiosity/radiosity-nested-mcs", "radiosity-nested-mcs", 1, 1, 500 , None),
]
###############################################################################
# Environment Variables Manipulations
#############################################
##################################
import string, os
# if you are going to assume the value presents in mfacet.py, you should
# make that "required = 1" here.
# NOTE: a empty env variable "" will be treated like a None, this is because
# python 2.1 does not support unsetenv, (or "del os.environ["key"]"). So we
# lose the ability to pass a "" as parameter before the lab upgrade to 2.2.
g_env_list = (
# (string) name, (string) default value , required?
("INTERACTIVE", None, 0),
("OPAL_CONFIG_NAME", None, 0),
("OPAL_CONFIG_FILE", None, 0),
("CHECKPOINT_AT_END", None, 0),
("GENERATE_TRACE", None, 0),
("CACHE_DATA_DUMP", None, 0),
("PROTOCOL_OPTION", None, 0),
("NETWORK_TOPOLOGY", None, 0),
("WARMUP_FILE", None, 0),
("RANDOM_SEED", None, 0),
("WORKLOAD", None, 1),
("BANDWIDTH", None, 1),
("CHECKPOINT_DIR", None, 1),
("CHECKPOINT", None, 1),
("BENCHMARK", None, 0),
("LOCK_TYPE", None, 0),
("READ_SET", None, 0),
("MICROBENCH_DIR", "microbenchmarks/transactional", 0),
("MAX_DEPTH", "1", 0),
("ENABLE_TOURMALINE", "0", 0),
("XACT_LAZY_VM", "0", 0),
("XACT_EAGER_CD", "1", 0),
("XACT_VISUALIZER", "0", 0),
("XACT_STORE_PREDICTOR_ENTRIES", "0", 0),
("XACT_STORE_PREDICTOR_HISTORY", "0", 0),
("XACT_STORE_PREDICTOR_THRESHOLD", "0", 0),
("XACT_COMMIT_TOKEN_LATENCY", "0", 0),
("XACT_LOG_BUFFER_SIZE", "0", 0),
("XACT_NO_BACKOFF", "0", 0),
("XACT_FIRST_ACCESS_COST", "0", 0),
("XACT_FIRST_PAGE_ACCESS_COST", "0", 0),
("XACT_CONFLICT_RES", "BASE", 0),
("ENABLE_MAGIC_WAITING", "0", 0),
("XACT_ENABLE_VIRTUALIZATION_LOGTM_SE", "0", 0),
("PROFILE_EXCEPTIONS", "0", 0),
("XACT_DEBUG", "0", 0),
("PROFILE_XACT", "0", 0),
("PROFILE_NONXACT", "0", 0),
("ENABLE_WATCHPOINT", "0", 0),
("PROTOCOL", None, 1),
("PROCESSORS", None, 1),
("CHIPS", None, 1),
("PROCS_PER_CHIP", "1", 1),
("SMT_THREADS", 1, 1),
("NUM_L2_BANKS", "0", 0),
("RESULTS_DIR", None, 1),
("TRANSACTIONS", None, 1),
("DUMP_INTERVAL", "1", 1),
("CONDORCLUSTER", "1", 0),
("CONDORPROCESS", "1", 0),
("MBENCH_ARG_STRING", None, 0),
("MBENCH_ARG_PREFIX", None, 0),
("READ_WRITE_FILTER", "Perfect_", 0),
("VIRTUAL_READ_WRITE_FILTER", "Perfect_", 0),
("SUMMARY_READ_WRITE_FILTER", "Perfect_", 0),
("BATCH_SIZE", None, 0),
("USE_LOCAL_MIRROR", None, 0),
("LM_LICENSE_FILE", "/p/multifacet/projects/simics/licenses/license.dat", 1),
("SIMICS_EXTRA_LIB", "./modules", 1),
#("SIMICS_HOST", "x86-linux", 1),
("LD_ASSUME_KERNEL", "2.4.1", 1),
("MACHTYPE", "i386", 1),
("SHELL", "/bin/tcsh", 1),
("PATH", "s/std/bin:/usr/afsws/bin:/usr/ccs/bin:/usr/ucb:/bin:/usr/bin:/usr/X11R6/bin:/unsup/condor/bin:.", 1),
)
# check invariants of a dictionary w.r.t. g_env_list
def check_requirements(env_dict):
assert(env_dict)
# make sure all variables are defined
for i in g_env_list:
if i[2] == 0:
# make sure it at least is in the dictionary
if (not env_dict.has_key(i[0])):
print "Error: %s is not in the dictionary"%i[0]
assert(0)
elif i[2] == 1:
# make sure the key is in the dictionary and not None
if not (env_dict.has_key(i[0]) and env_dict[i[0]] != None):
print "Error: required key %s missing"%i[0]
assert(0)
else:
assert(0)
# make sure no extra variables are defined
assert(len(g_env_list) == len(env_dict))
return
# returns a directory contains all env vars and their default value
def prepare_env_dictionary(simics):
env_dict = {}
for i in g_env_list:
key = string.upper(i[0])
assert(not env_dict.has_key(key))
if(simics == 1):
# get values from system ENV, both not_set & "" get None
env_dict[key] = os.environ.get(key, i[1])
if (env_dict[key] == ""): env_dict[key] = None
else:
# set default value
env_dict[key] = i[1]
return env_dict
# set some key's value after initilization
# note value is converted to string before inserted into the directory
def set_var(env_dict, key, value):
assert(env_dict and env_dict.has_key(key))
if(value == None or str(value) == ""):
env_dict[key] = None
else:
env_dict[key] = str(value)
return
# get some key's value after initilization
def get_var(env_dict, key):
assert(env_dict and env_dict.has_key(key))
assert(env_dict[key] != "")
return env_dict[key]
# return condor env string
def get_condor_env_string(env_dict):
li = []
for k in env_dict.keys():
if(env_dict[k] != None):
# only output not None values, since in the condor, nothing else could
# mess-up the env values
li.append("%s=%s"%(k, env_dict[k]))
return string.join(li, ';')
# return shell setenv string
def get_shell_setenv_string(env_dict):
li = []
for k in env_dict.keys():
# make sure we overwrite the old env value
if(env_dict[k] != None):
# km - ugly hack
if(k == "MBENCH_ARG_STRING"):
li.append("export %s='%s'"%(k, env_dict[k]))
else:
li.append("export %s=%s"%(k, env_dict[k]))
else:
li.append("export %s=%s"%(k, ""))
return string.join(li, '\n')
# put all variables to system environment
def update_system_env(env_dict):
check_requirements(env_dict)
for k in env_dict.keys():
# make sure we overwrite the old env value
if(env_dict[k] != None):
os.environ[k] = env_dict[k]
else:
if os.environ.has_key(k):
os.environ[k] = ""
del os.environ[k]
return
# print all variables to stdout
def print_all_variables(env_dict):
#check_requirements(env_dict)
for k in env_dict.keys():
print "%30s "%k,
print env_dict[k]
return
# get output filename prefix from environment variables
def get_output_file_name_prefix(env_dict, condor):
workload_name = get_var(env_dict, "WORKLOAD")
chips = int(get_var(env_dict, "CHIPS"))
procs_per_chip = int(get_var(env_dict, "PROCS_PER_CHIP"))
smt_threads = int(get_var(env_dict, "SMT_THREADS"))
opal_config_name = get_var(env_dict, "OPAL_CONFIG_NAME")
protocol = get_var(env_dict, "PROTOCOL")
protocol_options = get_var(env_dict, "PROTOCOL_OPTION")
bandwidth = int(get_var(env_dict, "BANDWIDTH"))
if(condor == 1):
return "%s/%s-%dc-%dp-%dt-%s-%s-%s-%d-$(Cluster)-$(Process)" % (workload_name, workload_name, chips, procs_per_chip, smt_threads, protocol, protocol_options, opal_config_name, bandwidth)
else:
condor_cluster = int(get_var(env_dict, "CONDORCLUSTER"))
condor_process = int(get_var(env_dict, "CONDORPROCESS"))
return "%s/%s-%dc-%dp-%dt-%s-%s-%s-%d-%d-%d" % (workload_name, workload_name, chips, procs_per_chip, smt_threads, protocol, protocol_options, opal_config_name, bandwidth, condor_cluster, condor_process)
def get_script_file_name(env_dict):
workload_name = get_var(env_dict, "WORKLOAD")
chips = int(get_var(env_dict, "CHIPS"))
procs_per_chip = int(get_var(env_dict, "PROCS_PER_CHIP"))
smt_threads = int(get_var(env_dict, "SMT_THREADS"))
opal_config_name = get_var(env_dict, "OPAL_CONFIG_NAME")
protocol = get_var(env_dict, "PROTOCOL")
protocol_options = get_var(env_dict, "PROTOCOL_OPTION")
bandwidth = int(get_var(env_dict, "BANDWIDTH"))
script_filename = "%s_%dc_%dp_%dt_%s_%s_%s_%s.sh" % (
workload_name,
chips,
procs_per_chip,
smt_threads,
protocol,
protocol_option,
opal_config_name,
bandwidth)
return script_filename
def get_microbench_output_file_name_prefix(env_dict, condor):
workload_name = get_var(env_dict, "WORKLOAD")
chips = int(get_var(env_dict, "CHIPS"))
processors = int(get_var(env_dict, "PROCESSORS"))
procs_per_chip = int(get_var(env_dict, "PROCS_PER_CHIP"))
smt_threads = int(get_var(env_dict, "SMT_THREADS"))
filter_config = get_var(env_dict, "READ_WRITE_FILTER")
virtual_filter_config = get_var(env_dict, "VIRTUAL_READ_WRITE_FILTER")
summary_filter_config = get_var(env_dict, "SUMMARY_READ_WRITE_FILTER")
opal_config_name = get_var(env_dict, "OPAL_CONFIG_NAME")
protocol_option = get_var(env_dict, "PROTOCOL_OPTION")
bandwidth = int(get_var(env_dict, "BANDWIDTH"))
arg_prefix = get_var(env_dict, "MBENCH_ARG_PREFIX")
if(condor == 1):
return "%s-%s/%s-%s-%dc-%dp-%dt-%s-%s-%s-%s-%s-%d-$(Cluster)-$(Process)" % (workload_name, arg_prefix, workload_name, arg_prefix, chips, procs_per_chip, smt_threads,opal_config_name, protocol_option, filter_config, virtual_filter_config, summary_filter_config, bandwidth)
else:
condor_cluster = int(get_var(env_dict, "CONDORCLUSTER"))
condor_process = int(get_var(env_dict, "CONDORPROCESS"))
return "%s-%s/%s-%s-%dp-%dt-%s-%s-%s-%s-%s-%d-%d-%d" % (workload_name, arg_prefix, workload_name, arg_prefix, processors, smt_threads, opal_config_name, protocol_option, filter_config, virtual_filter_config, summary_filter_config, bandwidth, condor_cluster, condor_process)
def get_microbench_script_file_name(env_dict):
workload_name = get_var(env_dict, "WORKLOAD")
arg_prefix = get_var(env_dict, "MBENCH_ARG_PREFIX")
chips = int(get_var(env_dict, "CHIPS"))
procs_per_chip = int(get_var(env_dict, "PROCS_PER_CHIP"))
smt_threads = int(get_var(env_dict, "SMT_THREADS"))
filter_config = get_var(env_dict, "READ_WRITE_FILTER")
virtual_filter_config = get_var(env_dict, "VIRTUAL_READ_WRITE_FILTER")
summary_filter_config = get_var(env_dict, "SUMMARY_READ_WRITE_FILTER")
opal_config_name = get_var(env_dict, "OPAL_CONFIG_NAME")
protocol_option = get_var(env_dict, "PROTOCOL_OPTION")
bandwidth = int(get_var(env_dict, "BANDWIDTH"))
return "%s-%s-%dc-%dp-%dt-%s-%s-%s-%s-%s-%d.sh" % (workload_name, arg_prefix, chips, procs_per_chip, smt_threads, opal_config_name, protocol_option, filter_config, virtual_filter_config, summary_filter_config, bandwidth)
# test this module
def test():
env_dict = prepare_env_dictionary(simics = 0)
set_var(env_dict, "WORKLOAD", 100)
set_var(env_dict, "BANDWIDTH", 100)
set_var(env_dict, "CHECKPOINT", 100)
set_var(env_dict, "PROTOCOL", 100)
set_var(env_dict, "CHIPS", 1)
set_var(env_dict, "PROCS_PER_CHP", 100)
set_var(env_dict, "SMT_THREADS", 1)
set_var(env_dict, "RESULTS_DIR", 100)
set_var(env_dict, "TRANSACTIONS", 100)
set_var(env_dict, "DUMP_INTERVAL", 100)
set_var(env_dict, "CONDORCLUSTER", 100)
set_var(env_dict, "CONDORPROCESS", 100)
print get_var(env_dict, "DUMP_INTERVAL")
print get_condor_env_string(env_dict)
print get_shell_setenv_string(env_dict)
set_var(env_dict, "DUMP_INTERVAL", 222)
set_var(env_dict, "INTERACTIVE", "")
update_system_env(env_dict)
env_dict = prepare_env_dictionary(simics = 1)
print_all_variables(env_dict)
print get_var(env_dict, "DUMP_INTERVAL")
print get_condor_env_string(env_dict)
print get_shell_setenv_string(env_dict)
# for debug
#test()
|
rafadev/django-guardian
|
refs/heads/master
|
docs/exts.py
|
21
|
def setup(app):
app.add_crossref_type(
directivename = "admin",
rolename = "admin",
indextemplate = "pair: %s; admin",
)
app.add_crossref_type(
directivename = "command",
rolename = "command",
indextemplate = "pair: %s; command",
)
app.add_crossref_type(
directivename = "form",
rolename = "form",
indextemplate = "pair: %s; form",
)
app.add_crossref_type(
directivename = "manager",
rolename = "manager",
indextemplate = "pair: %s; manager",
)
app.add_crossref_type(
directivename = "model",
rolename = "model",
indextemplate = "pair: %s; model",
)
app.add_crossref_type(
directivename = "setting",
rolename = "setting",
indextemplate = "pair: %s; setting",
)
app.add_crossref_type(
directivename = "shortcut",
rolename = "shortcut",
indextemplate = "pair: %s; shortcut",
)
|
ossdemura/django-miniblog
|
refs/heads/dev
|
Lib/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py
|
3132
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
|
Lilywei123/tempest
|
refs/heads/master
|
tempest/api/compute/floating_ips/test_floating_ips_actions_negative.py
|
2
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest.api.compute.floating_ips import base
from tempest.common.utils import data_utils
from tempest import config
from tempest import exceptions
from tempest import test
CONF = config.CONF
class FloatingIPsNegativeTestJSON(base.BaseFloatingIPsTest):
server_id = None
@classmethod
def resource_setup(cls):
super(FloatingIPsNegativeTestJSON, cls).resource_setup()
cls.client = cls.floating_ips_client
# Server creation
resp, server = cls.create_test_server(wait_until='ACTIVE')
cls.server_id = server['id']
# Generating a nonexistent floatingIP id
cls.floating_ip_ids = []
resp, body = cls.client.list_floating_ips()
for i in range(len(body)):
cls.floating_ip_ids.append(body[i]['id'])
while True:
cls.non_exist_id = data_utils.rand_int_id(start=999)
if CONF.service_available.neutron:
cls.non_exist_id = str(uuid.uuid4())
if cls.non_exist_id not in cls.floating_ip_ids:
break
@test.attr(type=['negative', 'gate'])
@test.services('network')
def test_allocate_floating_ip_from_nonexistent_pool(self):
# Negative test:Allocation of a new floating IP from a nonexistent_pool
# to a project should fail
self.assertRaises(exceptions.NotFound,
self.client.create_floating_ip,
"non_exist_pool")
@test.attr(type=['negative', 'gate'])
@test.services('network')
def test_delete_nonexistent_floating_ip(self):
# Negative test:Deletion of a nonexistent floating IP
# from project should fail
# Deleting the non existent floating IP
self.assertRaises(exceptions.NotFound, self.client.delete_floating_ip,
self.non_exist_id)
@test.attr(type=['negative', 'gate'])
@test.services('network')
def test_associate_nonexistent_floating_ip(self):
# Negative test:Association of a non existent floating IP
# to specific server should fail
# Associating non existent floating IP
self.assertRaises(exceptions.NotFound,
self.client.associate_floating_ip_to_server,
"0.0.0.0", self.server_id)
@test.attr(type=['negative', 'gate'])
@test.services('network')
def test_dissociate_nonexistent_floating_ip(self):
# Negative test:Dissociation of a non existent floating IP should fail
# Dissociating non existent floating IP
self.assertRaises(exceptions.NotFound,
self.client.disassociate_floating_ip_from_server,
"0.0.0.0", self.server_id)
@test.attr(type=['negative', 'gate'])
@test.services('network')
def test_associate_ip_to_server_without_passing_floating_ip(self):
# Negative test:Association of empty floating IP to specific server
# should raise NotFound exception
self.assertRaises(exceptions.NotFound,
self.client.associate_floating_ip_to_server,
'', self.server_id)
|
goku1997/bitcoin
|
refs/heads/master
|
qa/rpc-tests/zapwallettxes.py
|
25
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ZapWalletTXesTest (BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self, split=False):
self.nodes = start_nodes(3, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test (self):
print "Mining blocks..."
self.nodes[0].generate(1)
self.sync_all()
self.nodes[1].generate(101)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 50)
txid0 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
txid1 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
txid2 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
txid3 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
tx0 = self.nodes[0].gettransaction(txid0)
assert_equal(tx0['txid'], txid0) #tx0 must be available (confirmed)
tx1 = self.nodes[0].gettransaction(txid1)
assert_equal(tx1['txid'], txid1) #tx1 must be available (confirmed)
tx2 = self.nodes[0].gettransaction(txid2)
assert_equal(tx2['txid'], txid2) #tx2 must be available (unconfirmed)
tx3 = self.nodes[0].gettransaction(txid3)
assert_equal(tx3['txid'], txid3) #tx3 must be available (unconfirmed)
#restart bitcoind
self.nodes[0].stop()
bitcoind_processes[0].wait()
self.nodes[0] = start_node(0,self.options.tmpdir)
tx3 = self.nodes[0].gettransaction(txid3)
assert_equal(tx3['txid'], txid3) #tx must be available (unconfirmed)
self.nodes[0].stop()
bitcoind_processes[0].wait()
#restart bitcoind with zapwallettxes
self.nodes[0] = start_node(0,self.options.tmpdir, ["-zapwallettxes=1"])
aException = False
try:
tx3 = self.nodes[0].gettransaction(txid3)
except JSONRPCException,e:
print e
aException = True
assert_equal(aException, True) #there must be a expection because the unconfirmed wallettx0 must be gone by now
tx0 = self.nodes[0].gettransaction(txid0)
assert_equal(tx0['txid'], txid0) #tx0 (confirmed) must still be available because it was confirmed
if __name__ == '__main__':
ZapWalletTXesTest ().main ()
|
codingforentrepreneurs/try-django-19
|
refs/heads/master
|
src/trydjango19/wsgi.py
|
4
|
"""
WSGI config for trydjango19 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "trydjango19.settings")
application = get_wsgi_application()
|
youdonghai/intellij-community
|
refs/heads/master
|
python/testData/intentions/PyConvertToFStringIntentionTest/percentOperatorDynamicWidth.py
|
31
|
"%*s" % (5, 'spam')
|
fumen/gae-fumen
|
refs/heads/master
|
lib/requests/auth.py
|
68
|
# -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import os
import re
import time
import hashlib
import threading
import warnings
from base64 import b64encode
from .compat import urlparse, str, basestring
from .cookies import extract_cookies_to_jar
from ._internal_utils import to_native_string
from .utils import parse_dict_header
from .status_codes import codes
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
# "I want us to put a big-ol' comment on top of it that
# says that this behaviour is dumb but we need to preserve
# it because people are relying on it."
# - Lukasa
#
# These are here solely to maintain backwards compatibility
# for things like ints. This will be removed in 3.0.0.
if not isinstance(username, basestring):
warnings.warn(
"Non-string usernames will no longer be supported in Requests "
"3.0.0. Please convert the object you've passed in ({0!r}) to "
"a string or bytes object in the near future to avoid "
"problems.".format(username),
category=DeprecationWarning,
)
username = str(username)
if not isinstance(password, basestring):
warnings.warn(
"Non-string passwords will no longer be supported in Requests "
"3.0.0. Please convert the object you've passed in ({0!r}) to "
"a string or bytes object in the near future to avoid "
"problems.".format(password),
category=DeprecationWarning,
)
password = str(password)
# -- End Removal --
if isinstance(username, str):
username = username.encode('latin1')
if isinstance(password, str):
password = password.encode('latin1')
authstr = 'Basic ' + to_native_string(
b64encode(b':'.join((username, password))).strip()
)
return authstr
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __eq__(self, other):
return all([
self.username == getattr(other, 'username', None),
self.password == getattr(other, 'password', None)
])
def __ne__(self, other):
return not self == other
def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
# Keep state in per-thread local storage
self._thread_local = threading.local()
def init_per_thread_state(self):
# Ensure state is initialized just once per-thread
if not hasattr(self._thread_local, 'init'):
self._thread_local.init = True
self._thread_local.last_nonce = ''
self._thread_local.nonce_count = 0
self._thread_local.chal = {}
self._thread_local.pos = None
self._thread_local.num_401_calls = None
def build_digest_header(self, method, url):
"""
:rtype: str
"""
realm = self._thread_local.chal['realm']
nonce = self._thread_local.chal['nonce']
qop = self._thread_local.chal.get('qop')
algorithm = self._thread_local.chal.get('algorithm')
opaque = self._thread_local.chal.get('opaque')
hash_utf8 = None
if algorithm is None:
_algorithm = 'MD5'
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
#: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
if p_parsed.query:
path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (method, path)
HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2)
if nonce == self._thread_local.last_nonce:
self._thread_local.nonce_count += 1
else:
self._thread_local.nonce_count = 1
ncvalue = '%08x' % self._thread_local.nonce_count
s = str(self._thread_local.nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if not qop:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
nonce, ncvalue, cnonce, 'auth', HA2
)
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
return None
self._thread_local.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
def handle_redirect(self, r, **kwargs):
"""Reset num_401_calls counter on redirects."""
if r.is_redirect:
self._thread_local.num_401_calls = 1
def handle_401(self, r, **kwargs):
"""
Takes the given response and tries digest-auth, if needed.
:rtype: requests.Response
"""
if self._thread_local.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
r.request.body.seek(self._thread_local.pos)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
self._thread_local.num_401_calls += 1
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.close()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
self._thread_local.num_401_calls = 1
return r
def __call__(self, r):
# Initialize per-thread state, if needed
self.init_per_thread_state()
# If we have a saved nonce, skip the 401
if self._thread_local.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
try:
self._thread_local.pos = r.body.tell()
except AttributeError:
# In the case of HTTPDigestAuth being reused and the body of
# the previous request was a file-like object, pos has the
# file position of the previous body. Ensure it's set to
# None.
self._thread_local.pos = None
r.register_hook('response', self.handle_401)
r.register_hook('response', self.handle_redirect)
self._thread_local.num_401_calls = 1
return r
def __eq__(self, other):
return all([
self.username == getattr(other, 'username', None),
self.password == getattr(other, 'password', None)
])
def __ne__(self, other):
return not self == other
|
markbrough/maedi-projects
|
refs/heads/master
|
maediprojects/views/users.py
|
1
|
from flask import Flask, render_template, flash, request, Markup, \
session, redirect, url_for, escape, Response, abort, send_file, \
current_app
from flask.ext.login import (LoginManager, current_user, login_required,
login_user, logout_user, UserMixin,
confirm_login,
fresh_login_required)
from flask.ext.babel import gettext
from maediprojects import app, db, models
from maediprojects.query import user as quser
from maediprojects.lib import codelists
login_manager = LoginManager()
login_manager.setup_app(app)
login_manager.login_view = "login"
login_manager.login_message = gettext(u"Please log in to access this page.")
login_manager.login_message_category = "danger"
@login_manager.user_loader
def load_user(id):
return quser.user(id)
@app.route("/users/")
@login_required
def users():
users = quser.user()
return render_template("users.html",
users = users,
loggedinuser=current_user)
@app.route("/users/new/", methods=["GET", "POST"])
@login_required
def users_new():
if request.method=="GET":
user = {}
return render_template("user.html",
user = user,
loggedinuser=current_user,
codelists = codelists.get_codelists())
elif request.method == "POST":
if quser.addUser(request.form):
flash(gettext(u"Successfully created user!"), "success")
else:
flash(gettext(u"Sorry, couldn't create that user!"), "danger")
return redirect(url_for("users"))
@app.route("/users/<user_id>/", methods=["GET", "POST"])
@login_required
def users_edit(user_id):
if request.method=="GET":
user = quser.user(user_id)
return render_template("user.html",
user = user,
loggedinuser=current_user,
codelists = codelists.get_codelists())
elif request.method == "POST":
data = request.form.to_dict()
data["id"] = user_id
if quser.updateUser(data):
flash(gettext(u"Successfully updated user!"), "success")
else:
flash(gettext(u"Sorry, couldn't update that user!"), "danger")
return redirect(url_for("users"))
@app.route("/login/", methods=["GET", "POST"])
def login():
if request.method == "POST" and "username" in request.form:
user = quser.user_by_username(request.form["username"])
if (user and user.check_password(request.form["password"])):
if login_user(user):
flash(gettext(u"Logged in!"), "success")
if request.args.get("next"):
redir_url = request.script_root + request.args.get("next")
else:
redir_url = url_for("dashboard")
return redirect(redir_url)
else:
flash(gettext(u"Sorry, but you could not log in."), "danger")
else:
flash(gettext(u"Invalid username or password."), "danger")
return render_template("login.html",
loggedinuser=current_user)
@app.route('/logout/')
@login_required
def logout():
logout_user()
flash(gettext(u'Logged out'), 'success')
redir_url = url_for("login")
return redirect(redir_url)
|
mrrrgn/olympia
|
refs/heads/master
|
apps/users/helpers.py
|
14
|
import random
from django.utils.encoding import smart_unicode
import jinja2
from jingo import register, env
from tower import ugettext as _
import amo
@register.function
def emaillink(email, title=None, klass=None):
if not email:
return ""
fallback = email[::-1] # reverse
# inject junk somewhere
i = random.randint(0, len(email) - 1)
fallback = u"%s%s%s" % (jinja2.escape(fallback[:i]),
u'<span class="i">null</span>',
jinja2.escape(fallback[i:]))
# replace @ and .
fallback = fallback.replace('@', '@').replace('.', '.')
if title:
title = jinja2.escape(title)
else:
title = '<span class="emaillink">%s</span>' % fallback
node = (u'<a%s href="#">%s</a><span class="emaillink js-hidden">%s</span>'
% ((' class="%s"' % klass) if klass else '', title, fallback))
return jinja2.Markup(node)
@register.filter
def user_link(user):
if not user:
return ''
return jinja2.Markup(_user_link(user))
@register.function
def users_list(users, size=None, max_text_length=None):
if not users:
return ''
tail = []
if size and size < len(users):
users = users[:size]
tail = [_('others', 'user_list_others')]
if max_text_length:
user_list = [_user_link(user, max_text_length) for user in users]
else:
user_list = map(_user_link, users)
return jinja2.Markup(', '.join(user_list + tail))
@register.inclusion_tag('users/helpers/addon_users_list.html')
@jinja2.contextfunction
def addon_users_list(context, addon):
ctx = dict(context.items())
ctx.update(addon=addon, amo=amo)
return ctx
def _user_link(user, max_text_length=None):
if isinstance(user, basestring):
return user
username = user.name
if max_text_length and len(user.name) > max_text_length:
username = user.name[:max_text_length].strip() + '...'
return u'<a href="%s" title="%s">%s</a>' % (
user.get_url_path(), jinja2.escape(user.name),
jinja2.escape(smart_unicode(username)))
@register.filter
@jinja2.contextfilter
def user_vcard(context, user, table_class='person-info', is_profile=False):
c = dict(context.items())
c.update({
'profile': user,
'table_class': table_class,
'is_profile': is_profile
})
t = env.get_template('users/vcard.html').render(c)
return jinja2.Markup(t)
@register.inclusion_tag('users/report_abuse.html')
@jinja2.contextfunction
def user_report_abuse(context, hide, profile):
new = dict(context.items())
new.update({'hide': hide, 'profile': profile,
'abuse_form': context['abuse_form']})
return new
@register.filter
def contribution_type(type):
return amo.CONTRIB_TYPES[type]
@register.function
def user_data(amo_user):
anonymous, currency, email = True, 'USD', ''
if hasattr(amo_user, 'is_anonymous'):
anonymous = amo_user.is_anonymous()
if not anonymous:
email = amo_user.email
return {'anonymous': anonymous, 'currency': currency, 'email': email}
|
pitah81/android_kernel_elephone_p8000
|
refs/heads/master
|
tools/perf/scripts/python/failed-syscalls-by-pid.py
|
11180
|
# failed system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
mhbu50/erpnext
|
refs/heads/develop
|
erpnext/setup/utils.py
|
3
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, add_days
from frappe.utils import get_datetime_str, nowdate
from erpnext import get_default_company
def get_root_of(doctype):
"""Get root element of a DocType with a tree structure"""
result = frappe.db.sql_list("""select name from `tab%s`
where lft=1 and rgt=(select max(rgt) from `tab%s` where docstatus < 2)""" %
(doctype, doctype))
return result[0] if result else None
def get_ancestors_of(doctype, name):
"""Get ancestor elements of a DocType with a tree structure"""
lft, rgt = frappe.db.get_value(doctype, name, ["lft", "rgt"])
result = frappe.db.sql_list("""select name from `tab%s`
where lft<%s and rgt>%s order by lft desc""" % (doctype, "%s", "%s"), (lft, rgt))
return result or []
def before_tests():
frappe.clear_cache()
# complete setup if missing
from frappe.desk.page.setup_wizard.setup_wizard import setup_complete
if not frappe.get_list("Company"):
setup_complete({
"currency" :"USD",
"full_name" :"Test User",
"company_name" :"Wind Power LLC",
"timezone" :"America/New_York",
"company_abbr" :"WP",
"industry" :"Manufacturing",
"country" :"United States",
"fy_start_date" :"2011-01-01",
"fy_end_date" :"2011-12-31",
"language" :"english",
"company_tagline" :"Testing",
"email" :"test@erpnext.com",
"password" :"test",
"chart_of_accounts" : "Standard",
"domains" : ["Manufacturing"],
})
frappe.db.sql("delete from `tabLeave Allocation`")
frappe.db.sql("delete from `tabLeave Application`")
frappe.db.sql("delete from `tabSalary Slip`")
frappe.db.sql("delete from `tabItem Price`")
frappe.db.set_value("Stock Settings", None, "auto_insert_price_list_rate_if_missing", 0)
enable_all_roles_and_domains()
frappe.db.commit()
@frappe.whitelist()
def get_exchange_rate(from_currency, to_currency, transaction_date=None, args=None):
if not (from_currency and to_currency):
# manqala 19/09/2016: Should this be an empty return or should it throw and exception?
return
if from_currency == to_currency:
return 1
if not transaction_date:
transaction_date = nowdate()
currency_settings = frappe.get_doc("Accounts Settings").as_dict()
allow_stale_rates = currency_settings.get("allow_stale")
filters = [
["date", "<=", get_datetime_str(transaction_date)],
["from_currency", "=", from_currency],
["to_currency", "=", to_currency]
]
if args == "for_buying":
filters.append(["for_buying", "=", "1"])
elif args == "for_selling":
filters.append(["for_selling", "=", "1"])
if not allow_stale_rates:
stale_days = currency_settings.get("stale_days")
checkpoint_date = add_days(transaction_date, -stale_days)
filters.append(["date", ">", get_datetime_str(checkpoint_date)])
# cksgb 19/09/2016: get last entry in Currency Exchange with from_currency and to_currency.
entries = frappe.get_all(
"Currency Exchange", fields=["exchange_rate"], filters=filters, order_by="date desc",
limit=1)
if entries:
return flt(entries[0].exchange_rate)
try:
cache = frappe.cache()
key = "currency_exchange_rate:{0}:{1}".format(from_currency, to_currency)
value = cache.get(key)
if not value:
import requests
api_url = "https://frankfurter.app/{0}".format(transaction_date)
response = requests.get(api_url, params={
"base": from_currency,
"symbols": to_currency
})
# expire in 6 hours
response.raise_for_status()
value = response.json()["rates"][to_currency]
cache.setex(key, value, 6 * 60 * 60)
return flt(value)
except:
frappe.msgprint(_("Unable to find exchange rate for {0} to {1} for key date {2}. Please create a Currency Exchange record manually").format(from_currency, to_currency, transaction_date))
return 0.0
def enable_all_roles_and_domains():
""" enable all roles and domain for testing """
# add all roles to users
domains = frappe.get_all("Domain")
if not domains:
return
from frappe.desk.page.setup_wizard.setup_wizard import add_all_roles_to
frappe.get_single('Domain Settings').set_active_domains(\
[d.name for d in domains])
add_all_roles_to('Administrator')
def insert_record(records):
for r in records:
doc = frappe.new_doc(r.get("doctype"))
doc.update(r)
try:
doc.insert(ignore_permissions=True)
except frappe.DuplicateEntryError as e:
# pass DuplicateEntryError and continue
if e.args and e.args[0]==doc.doctype and e.args[1]==doc.name:
# make sure DuplicateEntryError is for the exact same doc and not a related doc
pass
else:
raise
def welcome_email():
site_name = get_default_company()
title = _("Welcome to {0}".format(site_name))
return title
|
ekzhu/datasketch
|
refs/heads/master
|
examples/hyperloglog_examples.py
|
3
|
'''
Some examples for MinHash
'''
from datasketch.hyperloglog import HyperLogLog
data1 = ['hyperloglog', 'is', 'a', 'probabilistic', 'data', 'structure', 'for',
'estimating', 'the', 'cardinality', 'of', 'dataset', 'dataset', 'a']
data2 = ['hyperloglog', 'is', 'a', 'probabilistic', 'DATA', 'structure', 'for',
'estimating', 'the', 'number', 'of', 'distinct', 'values', 'of',
'dataset', 'dataset', 'a']
def eg1():
h = HyperLogLog()
for d in data1:
h.update(d.encode('utf8'))
print("Estimated cardinality is", h.count())
s1 = set(data1)
print("Actual cardinality is", len(s1))
def eg2():
h1 = HyperLogLog()
h2 = HyperLogLog()
for d in data1:
h1.update(d.encode('utf8'))
for d in data2:
h2.update(d.encode('utf8'))
u = HyperLogLog.union(h1, h2)
print("Estimated union cardinality is", u.count())
s1 = set(data1)
s2 = set(data2)
su = s1.union(s2)
print("Actual union cardinality is", len(su))
if __name__ == "__main__":
eg1()
eg2()
|
gt945/CodeIgniter
|
refs/heads/3.0-stable
|
user_guide_src/source/conf.py
|
1
|
# -*- coding: utf-8 -*-
#
# CodeIgniter documentation build configuration file, created by
# sphinx-quickstart on Sun Aug 28 07:24:38 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.ifconfig', 'sphinxcontrib.phpdomain']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'CodeIgniter'
copyright = u'2014 - 2016, British Columbia Institute of Technology'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.1.0'
# The full version, including alpha/beta/rc tags.
release = '3.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :php:func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. php:function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
highlight_language = 'ci'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# Specifying a few options; just a starting point & we can play with it.
html_theme_options = {
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["./_themes"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'images/ci-icon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'CodeIgniterdoc'
html_copy_source = False
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CodeIgniter.tex', u'CodeIgniter Documentation',
u'British Columbia Institute of Technology', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'codeigniter', u'CodeIgniter Documentation',
[u'British Columbia Institute of Technology'], 1)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'CodeIgniter'
epub_author = u'British Columbia Institute of Technology'
epub_publisher = u'British Columbia Institute of Technology'
epub_copyright = u'2014 - 2016, British Columbia Institute of Technology'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
|
brijeshkesariya/odoo
|
refs/heads/8.0
|
addons/account/project/project.py
|
273
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_analytic_journal(osv.osv):
_name = 'account.analytic.journal'
_description = 'Analytic Journal'
_columns = {
'name': fields.char('Journal Name', required=True),
'code': fields.char('Journal Code', size=8),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the analytic journal without removing it."),
'type': fields.selection([('sale','Sale'), ('purchase','Purchase'), ('cash','Cash'), ('general','General'), ('situation','Situation')], 'Type', required=True, help="Gives the type of the analytic journal. When it needs for a document (eg: an invoice) to create analytic entries, Odoo will look for a matching journal of the same type."),
'line_ids': fields.one2many('account.analytic.line', 'journal_id', 'Lines', copy=False),
'company_id': fields.many2one('res.company', 'Company', required=True),
}
_defaults = {
'active': True,
'type': 'general',
'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
class account_journal(osv.osv):
_inherit="account.journal"
_columns = {
'analytic_journal_id':fields.many2one('account.analytic.journal','Analytic Journal', help="Journal for analytic entries"),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
tswicegood/ns.py
|
refs/heads/master
|
tests.py
|
1
|
import os
import sys
sys.path.insert(0, os.path.dirname(__file__))
sys.path[0:0] = [os.path.join(os.path.dirname(__file__), "support", a) \
for a in ["a", "b", "y", "z"]]
import unittest
class BasicTestCase(unittest.TestCase):
def test_bar_is_imported_from_b(self):
from foo import bar
self.assertEqual("a", bar.__file__.split("/")[-3])
def test_baz_is_imported_from_b(self):
from foo import baz
self.assertEqual("b", baz.__file__.split("/")[-3])
class NestedTestCase(unittest.TestCase):
def test_baz_is_imported_from_y(self):
from xyz.biz import baz
self.assertEqual("y", baz.__file__.split("/")[-4])
def test_biz_is_imported_from_z(self):
from xyz.biz import biz
self.assertEqual("z", biz.__file__.split("/")[-4])
if __name__ == "__main__":
unittest.main()
|
terrelln/python-zstandard
|
refs/heads/master
|
tests/test_module_attributes.py
|
1
|
from __future__ import unicode_literals
import unittest
import zstandard as zstd
from . common import (
make_cffi,
)
@make_cffi
class TestModuleAttributes(unittest.TestCase):
def test_version(self):
self.assertEqual(zstd.ZSTD_VERSION, (1, 2, 0))
def test_constants(self):
self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
def test_hasattr(self):
attrs = (
'COMPRESSION_RECOMMENDED_INPUT_SIZE',
'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
'MAGIC_NUMBER',
'WINDOWLOG_MIN',
'WINDOWLOG_MAX',
'CHAINLOG_MIN',
'CHAINLOG_MAX',
'HASHLOG_MIN',
'HASHLOG_MAX',
'HASHLOG3_MAX',
'SEARCHLOG_MIN',
'SEARCHLOG_MAX',
'SEARCHLENGTH_MIN',
'SEARCHLENGTH_MAX',
'TARGETLENGTH_MIN',
'TARGETLENGTH_MAX',
'STRATEGY_FAST',
'STRATEGY_DFAST',
'STRATEGY_GREEDY',
'STRATEGY_LAZY',
'STRATEGY_LAZY2',
'STRATEGY_BTLAZY2',
'STRATEGY_BTOPT',
)
for a in attrs:
self.assertTrue(hasattr(zstd, a), a)
|
rdjdejong/LerenEnBeslissen-2017
|
refs/heads/master
|
main.py
|
1
|
open('__init__.py', 'a')
import os
from getAuthors import makeSeperateDir, seperatePortraits, makeAuthorDirFromFile
from readtsv import addAuthorToImage
from makeDir import createAuthorFolders
from affine import makeAffine
import csv
from skimage import io
import cv2
# import demos.classifier as dcl
if __name__ == '__main__':
print "Welcome to the book cover parser \nChoose one of the following options \n1: Seperate the portrets \n2: Sort authors into files with their name"
print "3: Do an afine transformation and place in folders\n4: Generate embeddings for neural net"
print "5: Train neural net\n6: Test neural net"
choose = raw_input()
if choose == "1":
makeSeperateDir()
seperatePortraits()
elif choose == "2":
addAuthorToImage()
makeAuthorDirFromFile()
createAuthorFolders()
elif choose == "3":
if not os.path.exists("./aligned-images/"):
os.makedirs("./aligned-images/")
with open('onlyAuthor.csv','rb') as csvfile:
read = csv.reader(csvfile, delimiter=',')
for row in read:
img = io.imread(row[1])
afineImg = makeAffine(img)
authorDir = "./aligned-images/" + row[0]
if not os.path.exists(authorDir):
os.makedirs(authorDir)
dirImg = row[1].split("/")
nameImg = authorDir + "/aligned-" + dirImg[-1]
image_to_write = cv2.cvtColor(afineImg, cv2.COLOR_RGB2BGR)
print nameImg
cv2.imwrite(nameImg, image_to_write)
elif choose == "4":
if not os.path.exists('./generated-embeddings'):
os.mkdir('generated-embeddings')
os.system('./batch-represent/main.lua' +
' -outDir ./generated-embeddings/' +
' -data ./aligned-images/ ')
elif choose == '5':
os.system('./demos/classifier.py train ./generated-embeddings/')
elif choose == '6':
print "what is the name of the folder containting the test cases?"
directory = raw_input()
if os.path.exists('./{}/'.format(directory)):
image_paths = \
[os.path.join(directory, f) for f in os.listdir('./{}/'\
.format(directory))]
for photo in image_paths:
os.system("./demos/classifier.py infer ./generated-embeddings/classifier.pkl " + photo)
# dcl.infer({'classifierModel':'./generated-embeddings/classifier.pkl', 'imgs':photo})
else:
print "Directory does not exist"
|
fumitoh/modelx
|
refs/heads/master
|
modelx/tests/core/api/test_traceback.py
|
1
|
import modelx as mx
from modelx.core.errors import (
FormulaError,
DeepReferenceError,
NoneReturnedError)
import pytest
from textwrap import dedent
@pytest.fixture(scope="module")
def errormodel():
m = mx.new_model("ErrorModel")
s = m.new_space("ErrorSpace")
@mx.defcells
def foo(x):
if x > 0:
return foo(x-1) + 1
else:
raise ValueError
@mx.defcells
def bar(x):
if x > 0:
return bar(x-1)
else:
return None
@mx.defcells
def infinite(x):
return infinite(x-1)
@mx.defcells
def listcomp(t):
if t > 0:
return sum([listcomp(t - i) for i in range(1, 2)])
else:
raise ValueError()
s.new_cells("lam", formula=lambda x: qux(x-1) if x > 0 else 1/0)
@mx.defcells
def qux(x):
return lam(x)
@mx.defcells
def quux(t):
def my_sum(*args):
return sum(args)
return my_sum('a')
return m
def test_value_error(errormodel):
cells = errormodel.ErrorSpace.foo
with pytest.raises(FormulaError) as errinfo:
cells(1)
errmsg = dedent("""\
Error raised during formula execution
ValueError
Formula traceback:
0: ErrorModel.ErrorSpace.foo(x=1), line 3
1: ErrorModel.ErrorSpace.foo(x=0), line 5
Formula source:
def foo(x):
if x > 0:
return foo(x-1) + 1
else:
raise ValueError
""")
assert errinfo.value.args[0] == errmsg
assert isinstance(mx.get_error(), ValueError)
assert mx.get_traceback() == [(cells.node(1), 3),
(cells.node(0), 5)]
def test_none_returned_error(errormodel):
cells = errormodel.ErrorSpace.bar
with pytest.raises(FormulaError) as errinfo:
cells(1)
errmsg = dedent("""\
Error raised during formula execution
modelx.core.errors.NoneReturnedError: ErrorModel.ErrorSpace.bar(x=0)
Formula traceback:
0: ErrorModel.ErrorSpace.bar(x=1), line 3
1: ErrorModel.ErrorSpace.bar(x=0)
Formula source:
def bar(x):
if x > 0:
return bar(x-1)
else:
return None
""")
assert errinfo.value.args[0] == errmsg
assert isinstance(mx.get_error(), NoneReturnedError)
assert mx.get_traceback() == [(cells.node(1), 3),
(cells.node(0), 0)]
def test_deep_reference_error(errormodel):
cells = errormodel.ErrorSpace.infinite
saved = mx.get_recursion()
try:
mx.set_recursion(3)
with pytest.raises(FormulaError) as errinfo:
cells(3)
finally:
mx.set_recursion(saved)
errmsg = dedent("""\
Error raised during formula execution
modelx.core.errors.DeepReferenceError: Formula chain exceeded the 3 limit
Formula traceback:
0: ErrorModel.ErrorSpace.infinite(x=3), line 2
1: ErrorModel.ErrorSpace.infinite(x=2), line 2
2: ErrorModel.ErrorSpace.infinite(x=1), line 2
3: ErrorModel.ErrorSpace.infinite(x=0), line 2
Formula source:
def infinite(x):
return infinite(x-1)
""")
assert errinfo.value.args[0] == errmsg
assert isinstance(mx.get_error(), DeepReferenceError)
assert mx.get_traceback() == [(cells.node(3), 2),
(cells.node(2), 2),
(cells.node(1), 2),
(cells.node(0), 2)]
def test_listcomp_error(errormodel):
# https://github.com/fumitoh/modelx/issues/31
cells = errormodel.ErrorSpace.listcomp
with pytest.raises(FormulaError) as errinfo:
cells(1)
errmsg = dedent("""\
Error raised during formula execution
ValueError
Formula traceback:
0: ErrorModel.ErrorSpace.listcomp(t=1), line 3
1: ErrorModel.ErrorSpace.listcomp(t=0), line 5
Formula source:
def listcomp(t):
if t > 0:
return sum([listcomp(t - i) for i in range(1, 2)])
else:
raise ValueError()
""")
assert errinfo.value.args[0] == errmsg
assert isinstance(mx.get_error(), ValueError)
assert mx.get_traceback() == [(cells.node(1), 3),
(cells.node(0), 5)]
def test_lambda_error(errormodel):
cells = errormodel.ErrorSpace.lam
qux = errormodel.ErrorSpace.qux
with pytest.raises(FormulaError) as errinfo:
cells(1)
errmsg = dedent("""\
Error raised during formula execution
ZeroDivisionError: division by zero
Formula traceback:
0: ErrorModel.ErrorSpace.lam(x=1), line 1
1: ErrorModel.ErrorSpace.qux(x=0), line 2
2: ErrorModel.ErrorSpace.lam(x=0), line 1
Formula source:
lambda x: qux(x-1) if x > 0 else 1/0""")
assert errinfo.value.args[0] == errmsg
assert isinstance(mx.get_error(), ZeroDivisionError)
assert mx.get_traceback() == [(cells.node(1), 1),
(qux.node(0), 2),
(cells.node(0), 1)]
def test_nested_def_error(errormodel):
cells = errormodel.ErrorSpace.quux
with pytest.raises(FormulaError) as errinfo:
cells(1)
errmsg = dedent("""\
Error raised during formula execution
TypeError: unsupported operand type(s) for +: 'int' and 'str'
Formula traceback:
0: ErrorModel.ErrorSpace.quux(t=1), line 4
Formula source:
def quux(t):
def my_sum(*args):
return sum(args)
return my_sum('a')
""")
assert errinfo.value.args[0] == errmsg
assert isinstance(mx.get_error(), TypeError)
assert mx.get_traceback() == [(cells.node(1), 4)]
|
vgan/soiqbot
|
refs/heads/master
|
oauthlib/common.py
|
16
|
# -*- coding: utf-8 -*-
"""
oauthlib.common
~~~~~~~~~~~~~~
This module provides data structures and utilities common
to all implementations of OAuth.
"""
from __future__ import absolute_import, unicode_literals
import collections
import datetime
import logging
import random
import re
import sys
import time
try:
from urllib import quote as _quote
from urllib import unquote as _unquote
from urllib import urlencode as _urlencode
except ImportError:
from urllib.parse import quote as _quote
from urllib.parse import unquote as _unquote
from urllib.parse import urlencode as _urlencode
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789')
CLIENT_ID_CHARACTER_SET = (r' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMN'
'OPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}')
PASSWORD_PATTERN = re.compile(r'password=[^&]+')
INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]')
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz'
'0123456789' '_.-')
log = logging.getLogger('oauthlib')
PY3 = sys.version_info[0] == 3
if PY3:
unicode_type = str
bytes_type = bytes
else:
unicode_type = unicode
bytes_type = str
# 'safe' must be bytes (Python 2.6 requires bytes, other versions allow either)
def quote(s, safe=b'/'):
s = s.encode('utf-8') if isinstance(s, unicode_type) else s
s = _quote(s, safe)
# PY3 always returns unicode. PY2 may return either, depending on whether
# it had to modify the string.
if isinstance(s, bytes_type):
s = s.decode('utf-8')
return s
def unquote(s):
s = _unquote(s)
# PY3 always returns unicode. PY2 seems to always return what you give it,
# which differs from quote's behavior. Just to be safe, make sure it is
# unicode before we return.
if isinstance(s, bytes_type):
s = s.decode('utf-8')
return s
def urlencode(params):
utf8_params = encode_params_utf8(params)
urlencoded = _urlencode(utf8_params)
if isinstance(urlencoded, unicode_type): # PY3 returns unicode
return urlencoded
else:
return urlencoded.decode("utf-8")
def encode_params_utf8(params):
"""Ensures that all parameters in a list of 2-element tuples are encoded to
bytestrings using UTF-8
"""
encoded = []
for k, v in params:
encoded.append((
k.encode('utf-8') if isinstance(k, unicode_type) else k,
v.encode('utf-8') if isinstance(v, unicode_type) else v))
return encoded
def decode_params_utf8(params):
"""Ensures that all parameters in a list of 2-element tuples are decoded to
unicode using UTF-8.
"""
decoded = []
for k, v in params:
decoded.append((
k.decode('utf-8') if isinstance(k, bytes_type) else k,
v.decode('utf-8') if isinstance(v, bytes_type) else v))
return decoded
urlencoded = set(always_safe) | set('=&;%+~,*@!')
def urldecode(query):
"""Decode a query string in x-www-form-urlencoded format into a sequence
of two-element tuples.
Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce
correct formatting of the query string by validation. If validation fails
a ValueError will be raised. urllib.parse_qsl will only raise errors if
any of name-value pairs omits the equals sign.
"""
# Check if query contains invalid characters
if query and not set(query) <= urlencoded:
error = ("Error trying to decode a non urlencoded string. "
"Found invalid characters: %s "
"in the string: '%s'. "
"Please ensure the request/response body is "
"x-www-form-urlencoded.")
raise ValueError(error % (set(query) - urlencoded, query))
# Check for correctly hex encoded values using a regular expression
# All encoded values begin with % followed by two hex characters
# correct = %00, %A0, %0A, %FF
# invalid = %G0, %5H, %PO
if INVALID_HEX_PATTERN.search(query):
raise ValueError('Invalid hex encoding in query string.')
# We encode to utf-8 prior to parsing because parse_qsl behaves
# differently on unicode input in python 2 and 3.
# Python 2.7
# >>> urlparse.parse_qsl(u'%E5%95%A6%E5%95%A6')
# u'\xe5\x95\xa6\xe5\x95\xa6'
# Python 2.7, non unicode input gives the same
# >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6')
# '\xe5\x95\xa6\xe5\x95\xa6'
# but now we can decode it to unicode
# >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6').decode('utf-8')
# u'\u5566\u5566'
# Python 3.3 however
# >>> urllib.parse.parse_qsl(u'%E5%95%A6%E5%95%A6')
# u'\u5566\u5566'
query = query.encode(
'utf-8') if not PY3 and isinstance(query, unicode_type) else query
# We want to allow queries such as "c2" whereas urlparse.parse_qsl
# with the strict_parsing flag will not.
params = urlparse.parse_qsl(query, keep_blank_values=True)
# unicode all the things
return decode_params_utf8(params)
def extract_params(raw):
"""Extract parameters and return them as a list of 2-tuples.
Will successfully extract parameters from urlencoded query strings,
dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an
empty list of parameters. Any other input will result in a return
value of None.
"""
if isinstance(raw, bytes_type) or isinstance(raw, unicode_type):
try:
params = urldecode(raw)
except ValueError:
params = None
elif hasattr(raw, '__iter__'):
try:
dict(raw)
except ValueError:
params = None
except TypeError:
params = None
else:
params = list(raw.items() if isinstance(raw, dict) else raw)
params = decode_params_utf8(params)
else:
params = None
return params
def generate_nonce():
"""Generate pseudorandom nonce that is unlikely to repeat.
Per `section 3.3`_ of the OAuth 1 RFC 5849 spec.
Per `section 3.2.1`_ of the MAC Access Authentication spec.
A random 64-bit number is appended to the epoch timestamp for both
randomness and to decrease the likelihood of collisions.
.. _`section 3.2.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1
.. _`section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3
"""
return unicode_type(unicode_type(random.getrandbits(64)) + generate_timestamp())
def generate_timestamp():
"""Get seconds since epoch (UTC).
Per `section 3.3`_ of the OAuth 1 RFC 5849 spec.
Per `section 3.2.1`_ of the MAC Access Authentication spec.
.. _`section 3.2.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1
.. _`section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3
"""
return unicode_type(int(time.time()))
def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET):
"""Generates a non-guessable OAuth token
OAuth (1 and 2) does not specify the format of tokens except that they
should be strings of random characters. Tokens should not be guessable
and entropy when generating the random characters is important. Which is
why SystemRandom is used instead of the default random.choice method.
"""
rand = random.SystemRandom()
return ''.join(rand.choice(chars) for x in range(length))
def generate_signed_token(private_pem, request):
import jwt
now = datetime.datetime.utcnow()
claims = {
'scope': request.scope,
'exp': now + datetime.timedelta(seconds=request.expires_in)
}
claims.update(request.claims)
token = jwt.encode(claims, private_pem, 'RS256')
token = to_unicode(token, "UTF-8")
return token
def verify_signed_token(public_pem, token):
import jwt
return jwt.decode(token, public_pem, algorithms=['RS256'])
def generate_client_id(length=30, chars=CLIENT_ID_CHARACTER_SET):
"""Generates an OAuth client_id
OAuth 2 specify the format of client_id in
http://tools.ietf.org/html/rfc6749#appendix-A.
"""
return generate_token(length, chars)
def add_params_to_qs(query, params):
"""Extend a query with a list of two-tuples."""
if isinstance(params, dict):
params = params.items()
queryparams = urlparse.parse_qsl(query, keep_blank_values=True)
queryparams.extend(params)
return urlencode(queryparams)
def add_params_to_uri(uri, params, fragment=False):
"""Add a list of two-tuples to the uri query components."""
sch, net, path, par, query, fra = urlparse.urlparse(uri)
if fragment:
fra = add_params_to_qs(fra, params)
else:
query = add_params_to_qs(query, params)
return urlparse.urlunparse((sch, net, path, par, query, fra))
def safe_string_equals(a, b):
""" Near-constant time string comparison.
Used in order to avoid timing attacks on sensitive information such
as secret keys during request verification (`rootLabs`_).
.. _`rootLabs`: http://rdist.root.org/2010/01/07/timing-independent-array-comparison/
"""
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def to_unicode(data, encoding='UTF-8'):
"""Convert a number of different types of objects to unicode."""
if isinstance(data, unicode_type):
return data
if isinstance(data, bytes_type):
return unicode_type(data, encoding=encoding)
if hasattr(data, '__iter__'):
try:
dict(data)
except TypeError:
pass
except ValueError:
# Assume it's a one dimensional data structure
return (to_unicode(i, encoding) for i in data)
else:
# We support 2.6 which lacks dict comprehensions
if hasattr(data, 'items'):
data = data.items()
return dict(((to_unicode(k, encoding), to_unicode(v, encoding)) for k, v in data))
return data
class CaseInsensitiveDict(dict):
"""Basic case insensitive dict with strings only keys."""
proxy = {}
def __init__(self, data):
self.proxy = dict((k.lower(), k) for k in data)
for k in data:
self[k] = data[k]
def __contains__(self, k):
return k.lower() in self.proxy
def __delitem__(self, k):
key = self.proxy[k.lower()]
super(CaseInsensitiveDict, self).__delitem__(key)
del self.proxy[k.lower()]
def __getitem__(self, k):
key = self.proxy[k.lower()]
return super(CaseInsensitiveDict, self).__getitem__(key)
def get(self, k, default=None):
return self[k] if k in self else default
def __setitem__(self, k, v):
super(CaseInsensitiveDict, self).__setitem__(k, v)
self.proxy[k.lower()] = k
class Request(object):
"""A malleable representation of a signable HTTP request.
Body argument may contain any data, but parameters will only be decoded if
they are one of:
* urlencoded query string
* dict
* list of 2-tuples
Anything else will be treated as raw body data to be passed through
unmolested.
"""
def __init__(self, uri, http_method='GET', body=None, headers=None,
encoding='utf-8'):
# Convert to unicode using encoding if given, else assume unicode
encode = lambda x: to_unicode(x, encoding) if encoding else x
self.uri = encode(uri)
self.http_method = encode(http_method)
self.headers = CaseInsensitiveDict(encode(headers or {}))
self.body = encode(body)
self.decoded_body = extract_params(self.body)
self.oauth_params = []
self.validator_log = {}
self._params = {
"access_token": None,
"client": None,
"client_id": None,
"client_secret": None,
"code": None,
"extra_credentials": None,
"grant_type": None,
"redirect_uri": None,
"refresh_token": None,
"response_type": None,
"scope": None,
"scopes": None,
"state": None,
"token": None,
"user": None,
"token_type_hint": None,
}
self._params.update(dict(urldecode(self.uri_query)))
self._params.update(dict(self.decoded_body or []))
self._params.update(self.headers)
def __getattr__(self, name):
if name in self._params:
return self._params[name]
else:
raise AttributeError(name)
def __repr__(self):
body = self.body
if body and 'password=' in body:
body = PASSWORD_PATTERN.sub('password=***', body)
return '<oauthlib.Request url="%s", http_method="%s", headers="%s", body="%s">' % (
self.uri, self.http_method, self.headers, body)
@property
def uri_query(self):
return urlparse.urlparse(self.uri).query
@property
def uri_query_params(self):
if not self.uri_query:
return []
return urlparse.parse_qsl(self.uri_query, keep_blank_values=True,
strict_parsing=True)
@property
def duplicate_params(self):
seen_keys = collections.defaultdict(int)
all_keys = (p[0]
for p in (self.decoded_body or []) + self.uri_query_params)
for k in all_keys:
seen_keys[k] += 1
return [k for k, c in seen_keys.items() if c > 1]
|
cloudcomputinghust/IoT
|
refs/heads/master
|
platform_manager/api/migrations/0002_auto_20170224_0831.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-02-24 08:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PlatformAssignmentModel',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('sensor_id', models.TextField(unique=True)),
('platform_id', models.TextField(unique=True)),
('assign_status', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='PlatformDeploymentModel',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('platform_id', models.TextField(unique=True)),
('platform_ip', models.TextField()),
],
),
migrations.DeleteModel(
name='Deployment',
),
]
|
Xeralux/tensorflow
|
refs/heads/master
|
tensorflow/contrib/slim/python/slim/model_analyzer.py
|
166
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tools for analyzing the operations and variables in a TensorFlow graph.
To analyze the operations in a graph:
images, labels = LoadData(...)
predictions = MyModel(images)
slim.model_analyzer.analyze_ops(tf.get_default_graph(), print_info=True)
To analyze the model variables in a graph:
variables = tf.model_variables()
slim.model_analyzer.analyze_vars(variables, print_info=False)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def tensor_description(var):
"""Returns a compact and informative string about a tensor.
Args:
var: A tensor variable.
Returns:
a string with type and size, e.g.: (float32 1x8x8x1024).
"""
description = '(' + str(var.dtype.name) + ' '
sizes = var.get_shape()
for i, size in enumerate(sizes):
description += str(size)
if i < len(sizes) - 1:
description += 'x'
description += ')'
return description
def analyze_ops(graph, print_info=False):
"""Compute the estimated size of the ops.outputs in the graph.
Args:
graph: the graph containing the operations.
print_info: Optional, if true print ops and their outputs.
Returns:
total size of the ops.outputs
"""
if print_info:
print('---------')
print('Operations: name -> (type shapes) [size]')
print('---------')
total_size = 0
for op in graph.get_operations():
op_size = 0
shapes = []
for output in op.outputs:
# if output.num_elements() is None or [] assume size 0.
output_size = output.get_shape().num_elements() or 0
if output.get_shape():
shapes.append(tensor_description(output))
op_size += output_size
if print_info:
print(op.name, '\t->', ', '.join(shapes), '[' + str(op_size) + ']')
total_size += op_size
return total_size
def analyze_vars(variables, print_info=False):
"""Prints the names and shapes of the variables.
Args:
variables: list of variables, for example tf.global_variables().
print_info: Optional, if true print variables and their shape.
Returns:
(total size of the variables, total bytes of the variables)
"""
if print_info:
print('---------')
print('Variables: name (type shape) [size]')
print('---------')
total_size = 0
total_bytes = 0
for var in variables:
# if var.num_elements() is None or [] assume size 0.
var_size = var.get_shape().num_elements() or 0
var_bytes = var_size * var.dtype.size
total_size += var_size
total_bytes += var_bytes
if print_info:
print(var.name, tensor_description(var), '[%d, bytes: %d]' %
(var_size, var_bytes))
if print_info:
print('Total size of variables: %d' % total_size)
print('Total bytes of variables: %d' % total_bytes)
return total_size, total_bytes
|
henaras/sahara
|
refs/heads/master
|
sahara/plugins/hdp/confighints_helper.py
|
7
|
# Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils as json
from sahara.service.edp.oozie.workflow_creator import workflow_factory
from sahara.utils import files as pkg
def get_possible_hive_config_from(file_name):
'''Return the possible configs, args, params for a Hive job.'''
config = {
'configs': load_hadoop_json_for_tag(file_name, 'hive-site.xml'),
'params': {}
}
return config
def get_possible_mapreduce_config_from(file_name):
'''Return the possible configs, args, params for a MapReduce job.'''
config = {
'configs': get_possible_pig_config_from(file_name).get('configs')
}
config['configs'] += workflow_factory.get_possible_mapreduce_configs()
return config
def get_possible_pig_config_from(file_name):
'''Return the possible configs, args, params for a Pig job.'''
config = {
'configs': load_hadoop_json_for_tag(file_name, 'mapred-site.xml'),
'args': [],
'params': {}
}
return config
def get_properties_for_tag(configurations, tag_name):
'''Get the properties for a tag
Given a list of configurations, return the properties for the named tag.
If the named tag cannot be found returns an empty list.
'''
for obj in configurations:
if obj.get('tag') == tag_name:
return obj.get('properties')
return []
def load_hadoop_json_for_tag(file_name, tag_name):
'''Given a file name and a tag, return the configs from that tag.'''
full_json = load_json_file(file_name)
properties = get_properties_for_tag(full_json['configurations'], tag_name)
configs = []
for prop in properties:
configs.append({
'name': prop.get('name'),
'value': prop.get('default_value'),
'description': prop.get('description')
})
return configs
def load_json_file(file_name):
'''Given a package relative json file name, return the json.'''
ftext = pkg.get_file_text(file_name)
loaded_json = json.loads(ftext)
return loaded_json
|
barachka/odoo
|
refs/heads/master
|
addons/payment_paypal/models/res_company.py
|
422
|
# -*- coding: utf-8 -*-
from openerp.osv import fields, osv
class ResCompany(osv.Model):
_inherit = "res.company"
def _get_paypal_account(self, cr, uid, ids, name, arg, context=None):
Acquirer = self.pool['payment.acquirer']
company_id = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.id
paypal_ids = Acquirer.search(cr, uid, [
('website_published', '=', True),
('name', 'ilike', 'paypal'),
('company_id', '=', company_id),
], limit=1, context=context)
if paypal_ids:
paypal = Acquirer.browse(cr, uid, paypal_ids[0], context=context)
return dict.fromkeys(ids, paypal.paypal_email_account)
return dict.fromkeys(ids, False)
def _set_paypal_account(self, cr, uid, id, name, value, arg, context=None):
Acquirer = self.pool['payment.acquirer']
company_id = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.id
paypal_account = self.browse(cr, uid, id, context=context).paypal_account
paypal_ids = Acquirer.search(cr, uid, [
('website_published', '=', True),
('paypal_email_account', '=', paypal_account),
('company_id', '=', company_id),
], context=context)
if paypal_ids:
Acquirer.write(cr, uid, paypal_ids, {'paypal_email_account': value}, context=context)
return True
_columns = {
'paypal_account': fields.function(
_get_paypal_account,
fnct_inv=_set_paypal_account,
nodrop=True,
type='char', string='Paypal Account',
help="Paypal username (usually email) for receiving online payments."
),
}
|
iradul/qtwebkit
|
refs/heads/pjs
|
Tools/Scripts/webkitpy/bindings/__init__.py
|
6014
|
# Required for Python to search this directory for module files
|
jinluyuan/osf.io
|
refs/heads/develop
|
scripts/tests/test_dataverse_migrate_to_external_account.py
|
47
|
from nose.tools import *
from scripts.dataverse.migrate_to_external_account import do_migration, get_targets
from framework.auth import Auth
from tests.base import OsfTestCase
from tests.factories import ProjectFactory, UserFactory
from website.addons.dataverse.model import AddonDataverseUserSettings
class TestDatasetMigration(OsfTestCase):
def test_migration_no_project(self):
user = UserFactory()
api_token = 'api-token-2345'
user.add_addon('dataverse')
user_addon = user.get_addon('dataverse')
user_addon.api_token = api_token
user_addon.save()
do_migration([user_addon], dry=False)
user_addon.reload()
assert_is_none(user_addon.api_token)
assert_equal(len(user_addon.external_accounts), 1)
account = user_addon.external_accounts[0]
assert_equal(account.provider, 'dataverse')
assert_equal(account.oauth_key, 'dataverse.harvard.edu')
assert_equal(account.oauth_secret, api_token)
def test_migration_includes_project(self):
user = UserFactory()
project = ProjectFactory(creator=user)
api_token = 'api-token-2345'
user.add_addon('dataverse', auth=Auth(user))
user_addon = user.get_addon('dataverse')
user_addon.api_token = api_token
user_addon.save()
project.add_addon('dataverse', auth=Auth(user))
node_addon = project.get_addon('dataverse')
node_addon.user_settings = user_addon
node_addon.save()
do_migration([user_addon], dry=False)
user_addon.reload()
node_addon.reload()
account = user_addon.external_accounts[0]
assert_equal(account, node_addon.external_account)
def test_migration_multiple_users(self):
user1 = UserFactory()
user2 = UserFactory()
api_token = 'api-token-2345'
user1.add_addon('dataverse')
user1_addon = user1.get_addon('dataverse')
user1_addon.api_token = api_token
user1_addon.save()
user2.add_addon('dataverse')
user2_addon = user2.get_addon('dataverse')
user2_addon.api_token = api_token
user2_addon.save()
do_migration([user1_addon, user2_addon], dry=False)
user1_addon.reload()
user2_addon.reload()
assert_equal(
user1_addon.external_accounts[0],
user2_addon.external_accounts[0],
)
def test_get_targets(self):
AddonDataverseUserSettings.remove()
addons = [
AddonDataverseUserSettings(),
AddonDataverseUserSettings(api_token='api-token-1234'),
]
for addon in addons:
addon.save()
targets = get_targets()
assert_equal(targets.count(), 1)
assert_equal(targets[0]._id, addons[-1]._id)
|
opennode/nodeconductor-assembly-waldur
|
refs/heads/develop
|
src/waldur_pid/exceptions.py
|
2
|
class DataciteException(Exception):
pass
|
MCDong/barbican
|
refs/heads/master
|
barbican/api/__init__.py
|
2
|
# Copyright (c) 2013-2015 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
API handler for Cloudkeep's Barbican
"""
import pkgutil
from oslo_policy import policy
from oslo_serialization import jsonutils as json
import pecan
from barbican.common import config
from barbican.common import exception
from barbican.common import utils
from barbican import i18n as u
LOG = utils.getLogger(__name__)
CONF = config.CONF
class ApiResource(object):
"""Base class for API resources."""
pass
def load_body(req, resp=None, validator=None):
"""Helper function for loading an HTTP request body from JSON.
This body is placed into into a Python dictionary.
:param req: The HTTP request instance to load the body from.
:param resp: The HTTP response instance.
:param validator: The JSON validator to enforce.
:return: A dict of values from the JSON request.
"""
try:
body = req.body_file.read(CONF.max_allowed_request_size_in_bytes)
req.body_file.seek(0)
except IOError:
LOG.exception(u._LE("Problem reading request JSON stream."))
pecan.abort(500, u._('Read Error'))
try:
# TODO(jwood): Investigate how to get UTF8 format via openstack
# jsonutils:
# parsed_body = json.loads(raw_json, 'utf-8')
parsed_body = json.loads(body)
strip_whitespace(parsed_body)
except ValueError:
LOG.exception(u._LE("Problem loading request JSON."))
pecan.abort(400, u._('Malformed JSON'))
if validator:
try:
parsed_body = validator.validate(parsed_body)
except exception.BarbicanHTTPException as e:
LOG.exception(e.message)
pecan.abort(e.status_code, e.client_message)
return parsed_body
def generate_safe_exception_message(operation_name, excep):
"""Generates an exception message that is 'safe' for clients to consume.
A 'safe' message is one that doesn't contain sensitive information that
could be used for (say) cryptographic attacks on Barbican. That generally
means that em.CryptoXxxx should be captured here and with a simple
message created on behalf of them.
:param operation_name: Name of attempted operation, with a 'Verb noun'
format (e.g. 'Create Secret).
:param excep: The Exception instance that halted the operation.
:return: (status, message) where 'status' is one of the webob.exc.HTTP_xxx
codes, and 'message' is the sanitized message
associated with the error.
"""
message = None
reason = None
status = 500
try:
raise excep
except policy.PolicyNotAuthorized:
message = u._(
'{operation} attempt not allowed - '
'please review your '
'user/project privileges').format(operation=operation_name)
status = 403
except exception.BarbicanHTTPException as http_exception:
reason = http_exception.client_message
status = http_exception.status_code
except Exception:
message = u._('{operation} failure seen - please contact site '
'administrator.').format(operation=operation_name)
if reason:
message = u._('{operation} issue seen - {reason}.').format(
operation=operation_name, reason=reason)
return status, message
@pkgutil.simplegeneric
def get_items(obj):
"""This is used to get items from either a list or a dictionary.
While false generator is need to process scalar object
"""
while False:
yield None
@get_items.register(dict)
def _json_object(obj):
return obj.iteritems()
@get_items.register(list)
def _json_array(obj):
return enumerate(obj)
def strip_whitespace(json_data):
"""Recursively trim values from the object passed in using get_items()."""
for key, value in get_items(json_data):
if hasattr(value, 'strip'):
json_data[key] = value.strip()
else:
strip_whitespace(value)
|
sandeepkbhat/pylearn2
|
refs/heads/master
|
pylearn2/models/rbm.py
|
44
|
"""
Implementations of Restricted Boltzmann Machines and associated sampling
strategies.
"""
# Standard library imports
import logging
# Third-party imports
import numpy
N = numpy
np = numpy
from theano.compat import six
from theano.compat.six.moves import xrange
import theano
from theano import tensor
from theano.compat.six.moves import zip as izip
T = tensor
from theano.tensor import nnet
# Local imports
from pylearn2.costs.cost import Cost
from pylearn2.blocks import Block, StackedBlocks
from pylearn2.utils import as_floatX, safe_update, sharedX
from pylearn2.models import Model
from pylearn2.expr.nnet import inverse_sigmoid_numpy
from pylearn2.linear.matrixmul import MatrixMul
from pylearn2.space import VectorSpace
from pylearn2.utils import safe_union
from pylearn2.utils.exc import reraise_as
from pylearn2.utils.rng import make_np_rng, make_theano_rng
theano.config.warn.sum_div_dimshuffle_bug = False
logger = logging.getLogger(__name__)
if 0:
logger.warning('using SLOW rng')
RandomStreams = tensor.shared_randomstreams.RandomStreams
else:
import theano.sandbox.rng_mrg
RandomStreams = theano.sandbox.rng_mrg.MRG_RandomStreams
def training_updates(visible_batch, model, sampler, optimizer):
"""
Combine together updates from various sources for RBM training.
Parameters
----------
visible_batch : tensor_like
Theano symbolic representing a minibatch on the visible units,
with the first dimension indexing training examples and the second
indexing data dimensions.
model : object
An instance of `RBM` or a derived class, or one implementing
the RBM interface.
sampler : object
An instance of `Sampler` or a derived class, or one implementing
the sampler interface.
optimizer : object
An instance of `_Optimizer` or a derived class, or one implementing
the optimizer interface (typically an `_SGDOptimizer`).
Returns
-------
WRITEME
"""
# TODO: the Optimizer object got deprecated, and this is the only
# functionality that requires it. We moved the Optimizer
# here with an _ before its name.
# We should figure out how best to refactor the code.
# Optimizer was problematic because people kept using SGDOptimizer
# instead of training_algorithms.sgd.
# Compute negative phase updates.
sampler_updates = sampler.updates()
# Compute SML gradients.
pos_v = visible_batch
#neg_v = sampler_updates[sampler.particles]
neg_v = sampler.particles
grads = model.ml_gradients(pos_v, neg_v)
# Build updates dictionary combining (gradient, sampler) updates.
ups = optimizer.updates(gradients=grads)
safe_update(ups, sampler_updates)
return ups
class Sampler(object):
"""
A sampler is responsible for implementing a sampling strategy on top of
an RBM, which may include retaining state e.g. the negative particles for
Persistent Contrastive Divergence.
Parameters
----------
rbm : object
An instance of `RBM` or a derived class, or one implementing
the `gibbs_step_for_v` interface.
particles : numpy.ndarray
An initial state for the set of persistent Narkov chain particles
that will be updated at every step of learning.
rng : RandomState object
NumPy random number generator object used to initialize a
RandomStreams object used in training.
"""
def __init__(self, rbm, particles, rng):
self.__dict__.update(rbm=rbm)
rng = make_np_rng(rng, which_method="randn")
seed = int(rng.randint(2 ** 30))
self.s_rng = make_theano_rng(seed, which_method="binomial")
self.particles = sharedX(particles, name='particles')
def updates(self):
"""
Get the dictionary of updates for the sampler's persistent state
at each step.
Returns
-------
updates : dict
Dictionary with shared variable instances as keys and symbolic
expressions indicating how they should be updated as values.
Notes
-----
In the `Sampler` base class, this is simply a stub.
"""
raise NotImplementedError()
class BlockGibbsSampler(Sampler):
"""
Implements a persistent Markov chain based on block gibbs sampling
for use with Persistent Contrastive
Divergence, a.k.a. stochastic maximum likelhiood, as described in [1].
.. [1] T. Tieleman. "Training Restricted Boltzmann Machines using
approximations to the likelihood gradient". Proceedings of the 25th
International Conference on Machine Learning, Helsinki, Finland,
2008. http://www.cs.toronto.edu/~tijmen/pcd/pcd.pdf
Parameters
----------
rbm : object
An instance of `RBM` or a derived class, or one implementing
the `gibbs_step_for_v` interface.
particles : ndarray
An initial state for the set of persistent Markov chain particles
that will be updated at every step of learning.
rng : RandomState object
NumPy random number generator object used to initialize a
RandomStreams object used in training.
steps : int, optional
Number of Gibbs steps to run the Markov chain for at each
iteration.
particles_clip : None or (min, max) pair, optional
The values of the returned particles will be clipped between
min and max.
"""
def __init__(self, rbm, particles, rng, steps=1, particles_clip=None):
super(BlockGibbsSampler, self).__init__(rbm, particles, rng)
self.steps = steps
self.particles_clip = particles_clip
def updates(self, particles_clip=None):
"""
Get the dictionary of updates for the sampler's persistent state
at each step.
Parameters
----------
particles_clip : WRITEME
Returns
-------
updates : dict
Dictionary with shared variable instances as keys and symbolic
expressions indicating how they should be updated as values.
"""
steps = self.steps
particles = self.particles
# TODO: do this with scan?
for i in xrange(steps):
particles, _locals = self.rbm.gibbs_step_for_v(
particles,
self.s_rng
)
assert particles.type.dtype == self.particles.type.dtype
if self.particles_clip is not None:
p_min, p_max = self.particles_clip
# The clipped values should still have the same type
dtype = particles.dtype
p_min = tensor.as_tensor_variable(p_min)
if p_min.dtype != dtype:
p_min = tensor.cast(p_min, dtype)
p_max = tensor.as_tensor_variable(p_max)
if p_max.dtype != dtype:
p_max = tensor.cast(p_max, dtype)
particles = tensor.clip(particles, p_min, p_max)
if not hasattr(self.rbm, 'h_sample'):
self.rbm.h_sample = sharedX(numpy.zeros((0, 0)), 'h_sample')
return {
self.particles: particles,
# TODO: self.rbm.h_sample is never used, why is that here?
# Moreover, it does not make sense for things like ssRBM.
self.rbm.h_sample: _locals['h_mean']
}
class RBM(Block, Model):
"""
A base interface for RBMs, implementing the binary-binary case.
Parameters
----------
nvis : int, optional
Number of visible units in the model.
(Specifying this implies that the model acts on a vector,
i.e. it sets vis_space = pylearn2.space.VectorSpace(nvis) )
nhid : int, optional
Number of hidden units in the model.
(Specifying this implies that the model acts on a vector)
vis_space : pylearn2.space.Space, optional
Space object describing what kind of vector space the RBM acts
on. Don't specify if you used nvis / hid
hid_space: pylearn2.space.Space, optional
Space object describing what kind of vector space the RBM's
hidden units live in. Don't specify if you used nvis / nhid
transformer : WRITEME
irange : float, optional
The size of the initial interval around 0 for weights.
rng : RandomState object or seed, optional
NumPy RandomState object to use when initializing parameters
of the model, or (integer) seed to use to create one.
init_bias_vis : array_like, optional
Initial value of the visible biases, broadcasted as necessary.
init_bias_vis_marginals : pylearn2.datasets.dataset.Dataset or None
Optional. Dataset used to initialize the visible biases to the
inverse sigmoid of the data marginals
init_bias_hid : array_like, optional
initial value of the hidden biases, broadcasted as necessary.
base_lr : float, optional
The base learning rate
anneal_start : int, optional
Number of steps after which to start annealing on a 1/t schedule
nchains : int, optional
Number of negative chains
sml_gibbs_steps : int, optional
Number of gibbs steps to take per update
random_patches_src : pylearn2.datasets.dataset.Dataset or None
Optional. Dataset from which to draw random patches in order to
initialize the weights. Patches will be multiplied by irange.
monitor_reconstruction : bool, optional
If True, will request a monitoring channel to monitor
reconstruction error
Notes
-----
The `RBM` class is redundant now that we have a `DBM` class, since
an RBM is just a DBM with one hidden layer. Users of pylearn2 should
use single-layer DBMs when possible. Not all RBM functionality has
been ported to the DBM framework yet, so this is not always possible.
(Examples: spike-and-slab RBMs, score matching, denoising score matching)
pylearn2 developers should not add new features to the RBM class or
add new RBM subclasses. pylearn2 developers should only add documentation
and bug fixes to the RBM class and subclasses. pylearn2 developers should
finish porting all RBM functionality to the DBM framework, then turn
the RBM class into a thin wrapper around the DBM class that allocates
a single layer DBM.
"""
def __init__(self, nvis = None, nhid = None,
vis_space = None,
hid_space = None,
transformer = None,
irange=0.5, rng=None, init_bias_vis = None,
init_bias_vis_marginals = None, init_bias_hid=0.0,
base_lr = 1e-3, anneal_start = None, nchains = 100,
sml_gibbs_steps = 1,
random_patches_src = None,
monitor_reconstruction = False):
Model.__init__(self)
Block.__init__(self)
if init_bias_vis_marginals is not None:
assert init_bias_vis is None
X = init_bias_vis_marginals.X
assert X.min() >= 0.0
assert X.max() <= 1.0
marginals = X.mean(axis=0)
#rescale the marginals a bit to avoid NaNs
init_bias_vis = inverse_sigmoid_numpy(.01 + .98 * marginals)
if init_bias_vis is None:
init_bias_vis = 0.0
rng = make_np_rng(rng, 1001, which_method="uniform")
self.rng = rng
if vis_space is None:
#if we don't specify things in terms of spaces and a transformer,
#assume dense matrix multiplication and work off of nvis, nhid
assert hid_space is None
assert transformer is None or isinstance(transformer,MatrixMul)
assert nvis is not None
assert nhid is not None
if transformer is None:
if random_patches_src is None:
W = rng.uniform(-irange, irange, (nvis, nhid))
else:
if hasattr(random_patches_src, '__array__'):
W = irange * random_patches_src.T
assert W.shape == (nvis, nhid)
else:
W = irange * random_patches_src.get_batch_design(
nhid).T
self.transformer = MatrixMul( sharedX(
W,
name='W',
borrow=True
)
)
else:
self.transformer = transformer
self.vis_space = VectorSpace(nvis)
self.hid_space = VectorSpace(nhid)
else:
assert hid_space is not None
assert transformer is not None
assert nvis is None
assert nhid is None
self.vis_space = vis_space
self.hid_space = hid_space
self.transformer = transformer
try:
b_vis = self.vis_space.get_origin()
b_vis += init_bias_vis
except ValueError:
reraise_as(ValueError("bad shape or value for init_bias_vis"))
self.bias_vis = sharedX(b_vis, name='bias_vis', borrow=True)
try:
b_hid = self.hid_space.get_origin()
b_hid += init_bias_hid
except ValueError:
reraise_as(ValueError('bad shape or value for init_bias_hid'))
self.bias_hid = sharedX(b_hid, name='bias_hid', borrow=True)
self.random_patches_src = random_patches_src
self.register_names_to_del(['random_patches_src'])
self.__dict__.update(nhid=nhid, nvis=nvis)
self._params = safe_union(self.transformer.get_params(),
[self.bias_vis, self.bias_hid])
self.base_lr = base_lr
self.anneal_start = anneal_start
self.nchains = nchains
self.sml_gibbs_steps = sml_gibbs_steps
def get_default_cost(self):
"""
.. todo::
WRITEME
"""
raise NotImplementedError("The RBM class predates the current "
"Cost-based training algorithms (SGD and BGD). To train "
"the RBM with PCD, use DefaultTrainingAlgorithm rather "
"than SGD or BGD. Some RBM subclassess may also be "
"trained with SGD or BGD by using the "
"Cost classes defined in pylearn2.costs.ebm_estimation. "
"Note that it is also possible to make an RBM by allocating "
"a DBM with only one hidden layer. The DBM class is newer "
"and supports training with SGD / BGD. In the long run we "
"should remove the old RBM class and turn it into a wrapper "
"around the DBM class that makes a 1-layer DBM.")
def get_input_dim(self):
"""
Returns
-------
dim : int
The number of elements in the input, if the input is a vector.
"""
if not isinstance(self.vis_space, VectorSpace):
raise TypeError("Can't describe " + str(type(self.vis_space))
+ " as a dimensionality number.")
return self.vis_space.dim
def get_output_dim(self):
"""
Returns
-------
dim : int
The number of elements in the output, if the output is a vector.
"""
if not isinstance(self.hid_space, VectorSpace):
raise TypeError("Can't describe " + str(type(self.hid_space))
+ " as a dimensionality number.")
return self.hid_space.dim
def get_input_space(self):
"""
.. todo::
WRITEME
"""
return self.vis_space
def get_output_space(self):
"""
.. todo::
WRITEME
"""
return self.hid_space
def get_params(self):
"""
.. todo::
WRITEME
"""
return [param for param in self._params]
def get_weights(self, borrow=False):
"""
.. todo::
WRITEME
"""
weights ,= self.transformer.get_params()
return weights.get_value(borrow=borrow)
def get_weights_topo(self):
"""
.. todo::
WRITEME
"""
return self.transformer.get_weights_topo()
def get_weights_format(self):
"""
.. todo::
WRITEME
"""
return ['v', 'h']
def get_monitoring_channels(self, data):
"""
.. todo::
WRITEME
"""
V = data
theano_rng = make_theano_rng(None, 42, which_method="binomial")
H = self.mean_h_given_v(V)
h = H.mean(axis=0)
return { 'bias_hid_min' : T.min(self.bias_hid),
'bias_hid_mean' : T.mean(self.bias_hid),
'bias_hid_max' : T.max(self.bias_hid),
'bias_vis_min' : T.min(self.bias_vis),
'bias_vis_mean' : T.mean(self.bias_vis),
'bias_vis_max': T.max(self.bias_vis),
'h_min' : T.min(h),
'h_mean': T.mean(h),
'h_max' : T.max(h),
'reconstruction_error' : self.reconstruction_error(V,
theano_rng) }
def get_monitoring_data_specs(self):
"""
Get the data_specs describing the data for get_monitoring_channel.
This implementation returns specification corresponding to unlabeled
inputs.
Returns
-------
WRITEME
"""
return (self.get_input_space(), self.get_input_source())
def ml_gradients(self, pos_v, neg_v):
"""
Get the contrastive gradients given positive and negative phase
visible units.
Parameters
----------
pos_v : tensor_like
Theano symbolic representing a minibatch on the visible units,
with the first dimension indexing training examples and the
second indexing data dimensions (usually actual training data).
neg_v : tensor_like
Theano symbolic representing a minibatch on the visible units,
with the first dimension indexing training examples and the
second indexing data dimensions (usually reconstructions of the
data or sampler particles from a persistent Markov chain).
Returns
-------
grads : list
List of Theano symbolic variables representing gradients with
respect to model parameters, in the same order as returned by
`params()`.
Notes
-----
`pos_v` and `neg_v` need not have the same first dimension, i.e.
minibatch size.
"""
# taking the mean over each term independently allows for different
# mini-batch sizes in the positive and negative phase.
ml_cost = (self.free_energy_given_v(pos_v).mean() -
self.free_energy_given_v(neg_v).mean())
grads = tensor.grad(ml_cost, self.get_params(),
consider_constant=[pos_v, neg_v])
return grads
def train_batch(self, dataset, batch_size):
"""
.. todo::
WRITEME properly
A default learning rule based on SML
"""
self.learn_mini_batch(dataset.get_batch_design(batch_size))
return True
def learn_mini_batch(self, X):
"""
.. todo::
WRITEME
A default learning rule based on SML
"""
if not hasattr(self, 'learn_func'):
self.redo_theano()
rval = self.learn_func(X)
return rval
def redo_theano(self):
"""
Compiles the theano function for the default learning rule
"""
init_names = dir(self)
minibatch = tensor.matrix()
optimizer = _SGDOptimizer(self, self.base_lr, self.anneal_start)
sampler = sampler = BlockGibbsSampler(self, 0.5 + np.zeros((
self.nchains, self.get_input_dim())), self.rng,
steps= self.sml_gibbs_steps)
updates = training_updates(visible_batch=minibatch, model=self,
sampler=sampler, optimizer=optimizer)
self.learn_func = theano.function([minibatch], updates=updates)
final_names = dir(self)
self.register_names_to_del([name for name in final_names
if name not in init_names])
def gibbs_step_for_v(self, v, rng):
"""
Do a round of block Gibbs sampling given visible configuration
Parameters
----------
v : tensor_like
Theano symbolic representing the hidden unit states for a batch
of training examples (or negative phase particles), with the
first dimension indexing training examples and the second
indexing data dimensions.
rng : RandomStreams object
Random number generator to use for sampling the hidden and
visible units.
Returns
-------
v_sample : tensor_like
Theano symbolic representing the new visible unit state after one
round of Gibbs sampling.
locals : dict
Contains the following auxiliary state as keys (all symbolics
except shape tuples):
* `h_mean`: the returned value from `mean_h_given_v`
* `h_mean_shape`: shape tuple indicating the size of
`h_mean` and `h_sample`
* `h_sample`: the stochastically sampled hidden units
* `v_mean_shape`: shape tuple indicating the shape of
`v_mean` and `v_sample`
* `v_mean`: the returned value from `mean_v_given_h`
* `v_sample`: the stochastically sampled visible units
"""
h_mean = self.mean_h_given_v(v)
assert h_mean.type.dtype == v.type.dtype
# For binary hidden units
# TODO: factor further to extend to other kinds of hidden units
# (e.g. spike-and-slab)
h_sample = rng.binomial(size = h_mean.shape, n = 1 , p = h_mean,
dtype=h_mean.type.dtype)
assert h_sample.type.dtype == v.type.dtype
# v_mean is always based on h_sample, not h_mean, because we don't
# want h transmitting more than one bit of information per unit.
v_mean = self.mean_v_given_h(h_sample)
assert v_mean.type.dtype == v.type.dtype
v_sample = self.sample_visibles([v_mean], v_mean.shape, rng)
assert v_sample.type.dtype == v.type.dtype
return v_sample, locals()
def sample_visibles(self, params, shape, rng):
"""
Stochastically sample the visible units given hidden unit
configurations for a set of training examples.
Parameters
----------
params : list
List of the necessary parameters to sample :math:`p(v|h)`. In the
case of a binary-binary RBM this is a single-element list
containing the symbolic representing :math:`p(v|h)`, as returned
by `mean_v_given_h`.
Returns
-------
vprime : tensor_like
Theano symbolic representing stochastic samples from :math:`p(v|h)`
"""
v_mean = params[0]
return as_floatX(rng.uniform(size=shape) < v_mean)
def input_to_h_from_v(self, v):
"""
Compute the affine function (linear map plus bias) that serves as
input to the hidden layer in an RBM.
Parameters
----------
v : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the one or several
minibatches on the visible units, with the first dimension
indexing training examples and the second indexing data dimensions.
Returns
-------
a : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input to each
hidden unit for each training example.
"""
if isinstance(v, tensor.Variable):
return self.bias_hid + self.transformer.lmul(v)
else:
return [self.input_to_h_from_v(vis) for vis in v]
def input_to_v_from_h(self, h):
"""
Compute the affine function (linear map plus bias) that serves as
input to the visible layer in an RBM.
Parameters
----------
h : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the one or several
minibatches on the hidden units, with the first dimension
indexing training examples and the second indexing data dimensions.
Returns
-------
a : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input to each
visible unit for each row of h.
"""
if isinstance(h, tensor.Variable):
return self.bias_vis + self.transformer.lmul_T(h)
else:
return [self.input_to_v_from_h(hid) for hid in h]
def upward_pass(self, v):
"""
Wrapper around mean_h_given_v method. Called when RBM is accessed
by mlp.HiddenLayer.
"""
return self.mean_h_given_v(v)
def mean_h_given_v(self, v):
"""
Compute the mean activation of the hidden units given visible unit
configurations for a set of training examples.
Parameters
----------
v : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the hidden unit
states for a batch (or several) of training examples, with the
first dimension indexing training examples and the second
indexing data dimensions.
Returns
-------
h : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the mean
(deterministic) hidden unit activations given the visible units.
"""
if isinstance(v, tensor.Variable):
return nnet.sigmoid(self.input_to_h_from_v(v))
else:
return [self.mean_h_given_v(vis) for vis in v]
def mean_v_given_h(self, h):
"""
Compute the mean activation of the visibles given hidden unit
configurations for a set of training examples.
Parameters
----------
h : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the hidden unit
states for a batch (or several) of training examples, with the
first dimension indexing training examples and the second
indexing hidden units.
Returns
-------
vprime : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the mean
(deterministic) reconstruction of the visible units given the
hidden units.
"""
if isinstance(h, tensor.Variable):
return nnet.sigmoid(self.input_to_v_from_h(h))
else:
return [self.mean_v_given_h(hid) for hid in h]
def free_energy_given_v(self, v):
"""
Calculate the free energy of a visible unit configuration by
marginalizing over the hidden units.
Parameters
----------
v : tensor_like
Theano symbolic representing the hidden unit states for a batch
of training examples, with the first dimension indexing training
examples and the second indexing data dimensions.
Returns
-------
f : tensor_like
1-dimensional tensor (vector) representing the free energy
associated with each row of v.
"""
sigmoid_arg = self.input_to_h_from_v(v)
return (-tensor.dot(v, self.bias_vis) -
nnet.softplus(sigmoid_arg).sum(axis=1))
def free_energy(self, V):
return self.free_energy_given_v(V)
def free_energy_given_h(self, h):
"""
Calculate the free energy of a hidden unit configuration by
marginalizing over the visible units.
Parameters
----------
h : tensor_like
Theano symbolic representing the hidden unit states, with the
first dimension indexing training examples and the second
indexing data dimensions.
Returns
-------
f : tensor_like
1-dimensional tensor (vector) representing the free energy
associated with each row of v.
"""
sigmoid_arg = self.input_to_v_from_h(h)
return (-tensor.dot(h, self.bias_hid) -
nnet.softplus(sigmoid_arg).sum(axis=1))
def __call__(self, v):
"""
Forward propagate (symbolic) input through this module, obtaining
a representation to pass on to layers above.
This just aliases the `mean_h_given_v()` function for syntactic
sugar/convenience.
"""
return self.mean_h_given_v(v)
def reconstruction_error(self, v, rng):
"""
Compute the mean-squared error (mean over examples, sum over units)
across a minibatch after a Gibbs step starting from the training data.
Parameters
----------
v : tensor_like
Theano symbolic representing the hidden unit states for a batch
of training examples, with the first dimension indexing training
examples and the second indexing data dimensions.
rng : RandomStreams object
Random number generator to use for sampling the hidden and
visible units.
Returns
-------
mse : tensor_like
0-dimensional tensor (essentially a scalar) indicating the mean
reconstruction error across the minibatch.
Notes
-----
The reconstruction used to assess error samples only the hidden
units. For the visible units, it uses the conditional mean. No sampling
of the visible units is done, to reduce noise in the estimate.
"""
sample, _locals = self.gibbs_step_for_v(v, rng)
return ((_locals['v_mean'] - v) ** 2).sum(axis=1).mean()
class GaussianBinaryRBM(RBM):
"""
An RBM with Gaussian visible units and binary hidden units.
Parameters
----------
energy_function_class : WRITEME
nvis : int, optional
Number of visible units in the model.
nhid : int, optional
Number of hidden units in the model.
vis_space : WRITEME
hid_space : WRITEME
irange : float, optional
The size of the initial interval around 0 for weights.
rng : RandomState object or seed, optional
NumPy RandomState object to use when initializing parameters
of the model, or (integer) seed to use to create one.
mean_vis : bool, optional
Don't actually sample visibles; make sample method simply return
mean.
init_sigma : float or numpy.ndarray, optional
Initial value of the sigma variable. If init_sigma is a scalar
and sigma is not, will be broadcasted.
learn_sigma : bool, optional
WRITEME
sigma_lr_scale : float, optional
WRITEME
init_bias_hid : scalar or 1-d array of length `nhid`
Initial value for the biases on hidden units.
min_sigma, max_sigma : float, float, optional
Elements of sigma are clipped to this range during learning
"""
def __init__(self, energy_function_class,
nvis = None,
nhid = None,
vis_space = None,
hid_space = None,
transformer = None,
irange=0.5, rng=None,
mean_vis=False, init_sigma=2., learn_sigma=False,
sigma_lr_scale=1., init_bias_hid=0.0,
min_sigma = .1, max_sigma = 10.):
super(GaussianBinaryRBM, self).__init__(nvis = nvis, nhid = nhid,
transformer = transformer,
vis_space = vis_space,
hid_space = hid_space,
irange = irange, rng = rng,
init_bias_hid = init_bias_hid)
self.learn_sigma = learn_sigma
self.init_sigma = init_sigma
self.sigma_lr_scale = float(sigma_lr_scale)
if energy_function_class.supports_vector_sigma():
base = N.ones(nvis)
else:
base = 1
self.sigma_driver = sharedX(
base * init_sigma / self.sigma_lr_scale,
name='sigma_driver',
borrow=True
)
self.sigma = self.sigma_driver * self.sigma_lr_scale
self.min_sigma = min_sigma
self.max_sigma = max_sigma
if self.learn_sigma:
self._params.append(self.sigma_driver)
self.mean_vis = mean_vis
self.energy_function = energy_function_class(
transformer = self.transformer,
sigma=self.sigma,
bias_vis=self.bias_vis,
bias_hid=self.bias_hid
)
def _modify_updates(self, updates):
"""
.. todo::
WRITEME
"""
if self.sigma_driver in updates:
assert self.learn_sigma
updates[self.sigma_driver] = T.clip(
updates[self.sigma_driver],
self.min_sigma / self.sigma_lr_scale,
self.max_sigma / self.sigma_lr_scale
)
def score(self, V):
"""
.. todo::
WRITEME
"""
return self.energy_function.score(V)
def P_H_given_V(self, V):
"""
.. todo::
WRITEME
"""
return self.energy_function.mean_H_given_V(V)
def mean_h_given_v(self, v):
"""
.. todo::
WRITEME
"""
return self.P_H_given_V(v)
def mean_v_given_h(self, h):
"""
Compute the mean activation of the visibles given hidden unit
configurations for a set of training examples.
Parameters
----------
h : tensor_like
Theano symbolic representing the hidden unit states for a batch
of training examples, with the first dimension indexing training
examples and the second indexing hidden units.
Returns
-------
vprime : tensor_like
Theano symbolic representing the mean (deterministic)
reconstruction of the visible units given the hidden units.
"""
return self.energy_function.mean_V_given_H(h)
#return self.bias_vis + self.sigma * tensor.dot(h, self.weights.T)
def free_energy_given_v(self, V):
"""
Calculate the free energy of a visible unit configuration by
marginalizing over the hidden units.
Parameters
----------
v : tensor_like
Theano symbolic representing the hidden unit states for a batch
of training examples, with the first dimension indexing training
examples and the second indexing data dimensions.
Returns
-------
f : tensor_like
1-dimensional tensor representing the free energy of the visible
unit configuration for each example in the batch
"""
"""hid_inp = self.input_to_h_from_v(v)
squared_term = ((self.bias_vis - v) ** 2.) / (2. * self.sigma)
rval = squared_term.sum(axis=1) - nnet.softplus(hid_inp).sum(axis=1)
assert len(rval.type.broadcastable) == 1"""
return self.energy_function.free_energy(V)
def free_energy(self, V):
"""
.. todo::
WRITEME
"""
return self.energy_function.free_energy(V)
def sample_visibles(self, params, shape, rng):
"""
Stochastically sample the visible units given hidden unit
configurations for a set of training examples.
Parameters
----------
params : list
List of the necessary parameters to sample :math:`p(v|h)`.
In the case of a Gaussian-binary RBM this is a single-element
list containing the conditional mean.
shape : WRITEME
rng : WRITEME
Returns
-------
vprime : tensor_like
Theano symbolic representing stochastic samples from
:math:`p(v|h)`
Notes
-----
If `mean_vis` is specified as `True` in the constructor, this is
equivalent to a call to `mean_v_given_h`.
"""
v_mean = params[0]
if self.mean_vis:
return v_mean
else:
# zero mean, std sigma noise
zero_mean = rng.normal(size=shape) * self.sigma
return zero_mean + v_mean
class mu_pooled_ssRBM(RBM):
"""
.. todo::
WRITEME
Parameters
----------
alpha : WRITEME
Vector of length nslab, diagonal precision term on s.
b : WRITEME
Vector of length nhid, hidden unit bias.
B : WRITEME
Vector of length nvis, diagonal precision on v. Lambda in ICML2011
paper.
Lambda : WRITEME
Matrix of shape nvis x nhid, whose i-th column encodes a diagonal
precision on v, conditioned on h_i. phi in ICML2011 paper.
log_alpha : WRITEME
Vector of length nslab, precision on s.
mu : WRITEME
Vector of length nslab, mean parameter on s.
W : WRITEME
Matrix of shape nvis x nslab, weights of the nslab linear filters s.
"""
def __init__(self, nvis, nhid, n_s_per_h,
batch_size,
alpha0, alpha_irange,
b0,
B0,
Lambda0, Lambda_irange,
mu0,
W_irange=None,
rng=None):
rng = make_np_rng(rng, 1001, which_method="rand")
self.nhid = nhid
self.nslab = nhid * n_s_per_h
self.n_s_per_h = n_s_per_h
self.nvis = nvis
self.batch_size = batch_size
# configure \alpha: precision parameter on s
alpha_init = numpy.zeros(self.nslab) + alpha0
if alpha_irange > 0:
alpha_init += (2 * rng.rand(self.nslab) - 1) * alpha_irange
self.log_alpha = sharedX(numpy.log(alpha_init), name='log_alpha')
self.alpha = tensor.exp(self.log_alpha)
self.alpha.name = 'alpha'
self.mu = sharedX(
numpy.zeros(self.nslab) + mu0,
name='mu', borrow=True)
self.b = sharedX(
numpy.zeros(self.nhid) + b0,
name='b', borrow=True)
if W_irange is None:
# Derived closed to Xavier Glorot's magic formula
W_irange = 2 / numpy.sqrt(nvis * nhid)
self.W = sharedX(
(.5 - rng.rand(self.nvis, self.nslab)) * 2 * W_irange,
name='W', borrow=True)
# THE BETA IS IGNORED DURING TRAINING - FIXED AT MARGINAL DISTRIBUTION
self.B = sharedX(numpy.zeros(self.nvis) + B0, name='B', borrow=True)
if Lambda_irange > 0:
L = (rng.rand(self.nvis, self.nhid) * Lambda_irange
+ Lambda0)
else:
L = numpy.zeros((self.nvis, self.nhid)) + Lambda0
self.Lambda = sharedX(L, name='Lambda', borrow=True)
self._params = [
self.mu,
self.B,
self.Lambda,
self.W,
self.b,
self.log_alpha]
#def ml_gradients(self, pos_v, neg_v):
# inherited version is OK.
def gibbs_step_for_v(self, v, rng):
"""
.. todo::
WRITEME
"""
# Sometimes, the number of examples in the data set is not a
# multiple of self.batch_size.
batch_size = v.shape[0]
# sample h given v
h_mean = self.mean_h_given_v(v)
h_mean_shape = (batch_size, self.nhid)
h_sample = rng.binomial(size=h_mean_shape,
n = 1, p = h_mean, dtype = h_mean.dtype)
# sample s given (v,h)
s_mu, s_var = self.mean_var_s_given_v_h1(v)
s_mu_shape = (batch_size, self.nslab)
s_sample = s_mu + rng.normal(size=s_mu_shape) * tensor.sqrt(s_var)
#s_sample=(s_sample.reshape()*h_sample.dimshuffle(0,1,'x')).flatten(2)
# sample v given (s,h)
v_mean, v_var = self.mean_var_v_given_h_s(h_sample, s_sample)
v_mean_shape = (batch_size, self.nvis)
v_sample = rng.normal(size=v_mean_shape) * tensor.sqrt(v_var) + v_mean
del batch_size
return v_sample, locals()
## TODO?
def sample_visibles(self, params, shape, rng):
"""
.. todo::
WRITEME
"""
raise NotImplementedError('mu_pooled_ssRBM.sample_visibles')
def input_to_h_from_v(self, v):
"""
.. todo::
WRITEME
"""
D = self.Lambda
alpha = self.alpha
def sum_s(x):
return x.reshape((
-1,
self.nhid,
self.n_s_per_h)).sum(axis=2)
return tensor.add(
self.b,
-0.5 * tensor.dot(v * v, D),
sum_s(self.mu * tensor.dot(v, self.W)),
sum_s(0.5 * tensor.sqr(tensor.dot(v, self.W)) / alpha))
#def mean_h_given_v(self, v):
# inherited version is OK:
# return nnet.sigmoid(self.input_to_h_from_v(v))
def mean_var_v_given_h_s(self, h, s):
"""
.. todo::
WRITEME
"""
v_var = 1 / (self.B + tensor.dot(h, self.Lambda.T))
s3 = s.reshape((
-1,
self.nhid,
self.n_s_per_h))
hs = h.dimshuffle(0, 1, 'x') * s3
v_mu = tensor.dot(hs.flatten(2), self.W.T) * v_var
return v_mu, v_var
def mean_var_s_given_v_h1(self, v):
"""
.. todo::
WRITEME
"""
alpha = self.alpha
return (self.mu + tensor.dot(v, self.W) / alpha,
1.0 / alpha)
## TODO?
def mean_v_given_h(self, h):
"""
.. todo::
WRITEME
"""
raise NotImplementedError('mu_pooled_ssRBM.mean_v_given_h')
def free_energy_given_v(self, v):
"""
.. todo::
WRITEME
"""
sigmoid_arg = self.input_to_h_from_v(v)
return tensor.add(
0.5 * (self.B * (v ** 2)).sum(axis=1),
-tensor.nnet.softplus(sigmoid_arg).sum(axis=1))
#def __call__(self, v):
# inherited version is OK
#def reconstruction_error:
# inherited version should be OK
#def params(self):
# inherited version is OK.
def build_stacked_RBM(nvis, nhids, batch_size, vis_type='binary',
input_mean_vis=None, irange=1e-3, rng=None):
"""
.. todo::
WRITEME properly
Note from IG:
This method doesn't seem to work correctly with Gaussian RBMs.
In general, this is a difficult function to support, because it
needs to pass the write arguments to the constructor of many kinds
of RBMs. It would probably be better to just construct an instance
of pylearn2.models.mlp.MLP with its hidden layers set to instances
of pylearn2.models.mlp.RBM_Layer. If anyone is working on this kind
of problem, a PR replacing this function with a helper function to
make such an MLP would be very welcome.
Allocate a StackedBlocks containing RBMs.
The visible units of the input RBM can be either binary or gaussian,
the other ones are all binary.
"""
#TODO: not sure this is the right way of dealing with mean_vis.
layers = []
assert vis_type in ['binary', 'gaussian']
if vis_type == 'binary':
assert input_mean_vis is None
elif vis_type == 'gaussian':
assert input_mean_vis in (True, False)
# The number of visible units in each layer is the initial input
# size and the first k-1 hidden unit sizes.
nviss = [nvis] + nhids[:-1]
seq = izip(
xrange(len(nhids)),
nhids,
nviss,
)
for k, nhid, nvis in seq:
if k == 0 and vis_type == 'gaussian':
rbm = GaussianBinaryRBM(nvis=nvis, nhid=nhid,
batch_size=batch_size,
irange=irange,
rng=rng,
mean_vis=input_mean_vis)
else:
rbm = RBM(nvis - nvis, nhid=nhid,
batch_size=batch_size,
irange=irange,
rng=rng)
layers.append(rbm)
# Create the stack
return StackedBlocks(layers)
class L1_ActivationCost(Cost):
"""
.. todo::
WRITEME
Parameters
----------
target : WRITEME
eps : WRITEME
coeff : WRITEME
"""
def __init__(self, target, eps, coeff):
self.__dict__.update(locals())
del self.self
def expr(self, model, data, ** kwargs):
"""
.. todo::
WRITEME
"""
self.get_data_specs(model)[0].validate(data)
X = data
H = model.P_H_given_V(X)
h = H.mean(axis=0)
err = abs(h - self.target)
dead = T.maximum(err - self.eps, 0.)
assert dead.ndim == 1
rval = self.coeff * dead.mean()
return rval
def get_data_specs(self, model):
"""
.. todo::
WRITEME
"""
return (model.get_input_space(), model.get_input_source())
# The following functionality was deprecated, but is evidently
# still needed to make the RBM work
class _Optimizer(object):
"""
Basic abstract class for computing parameter updates of a model.
"""
def updates(self):
"""Return symbolic updates to apply."""
raise NotImplementedError()
class _SGDOptimizer(_Optimizer):
"""
Compute updates by stochastic gradient descent on mini-batches.
Supports constant learning rates, or decreasing like 1/t after an initial
period.
Parameters
----------
params : object or list
Either a Model object with a .get_params() method, or a list of
parameters to be optimized.
base_lr : float
The base learning rate before annealing or parameter-specific
scaling.
anneal_start : int, optional
Number of steps after which to start annealing the learning
rate at a 1/t schedule, where t is the number of stochastic
gradient updates.
use_adagrad : bool, optional
'adagrad' adaptive learning rate scheme is used. If set to True,
base_lr is used as e0.
kwargs : dict
WRITEME
Notes
-----
The formula to compute the effective learning rate on a parameter is:
<paramname>_lr * max(0.0, min(base_lr, lr_anneal_start/(iteration+1)))
Parameter-specific learning rates can be set by passing keyword
arguments <name>_lr, where name is the .name attribute of a given
parameter.
Parameter-specific bounding values can be specified by passing
keyword arguments <param>_clip, which should be a (min, max) pair.
Adagrad is recommended with sparse inputs. It normalizes the base
learning rate of a parameter theta_i by the accumulated 2-norm of its
gradient: e{ti} = e0 / sqrt( sum_t (dL_t / dtheta_i)^2 )
"""
def __init__(self, params, base_lr, anneal_start=None, use_adagrad=False,
** kwargs):
if hasattr(params, '__iter__'):
self.params = params
elif hasattr(params, 'get_params') and hasattr(
params.get_params, '__call__'):
self.params = params.get_params()
else:
raise ValueError("SGDOptimizer couldn't figure out what to do "
"with first argument: '%s'" % str(params))
if anneal_start == None:
self.anneal_start = None
else:
self.anneal_start = as_floatX(anneal_start)
# Create accumulators and epsilon0's
self.use_adagrad = use_adagrad
if self.use_adagrad:
self.accumulators = {}
self.e0s = {}
for param in self.params:
self.accumulators[param] = theano.shared(
value=as_floatX(0.), name='acc_%s' % param.name)
self.e0s[param] = as_floatX(base_lr)
# Set up the clipping values
self.clipping_values = {}
# Keep track of names already seen
clip_names_seen = set()
for parameter in self.params:
clip_name = '%s_clip' % parameter.name
if clip_name in kwargs:
if clip_name in clip_names_seen:
logger.warning('In SGDOptimizer, at least two parameters '
'have the same name. Both will be affected '
'by the keyword argument '
'{0}.'.format(clip_name))
clip_names_seen.add(clip_name)
p_min, p_max = kwargs[clip_name]
assert p_min <= p_max
self.clipping_values[parameter] = (p_min, p_max)
# Check that no ..._clip keyword is being ignored
for clip_name in clip_names_seen:
kwargs.pop(clip_name)
for kw in six.iterkeys(kwargs):
if kw[-5:] == '_clip':
logger.warning('In SGDOptimizer, keyword argument {0} '
'will be ignored, because no parameter '
'was found with name {1}.'.format(kw, kw[:-5]))
self.learning_rates_setup(base_lr, **kwargs)
def learning_rates_setup(self, base_lr, **kwargs):
"""
Initializes parameter-specific learning rate dictionary and shared
variables for the annealed base learning rate and iteration number.
Parameters
----------
base_lr : float
The base learning rate before annealing or parameter-specific
scaling.
kwargs : dict
WRITEME
Notes
-----
Parameter-specific learning rates can be set by passing keyword
arguments <name>_lr, where name is the .name attribute of a given
parameter.
"""
# Take care of learning rate scales for individual parameters
self.learning_rates = {}
# Base learning rate per example.
self.base_lr = theano._asarray(base_lr, dtype=theano.config.floatX)
# Keep track of names already seen
lr_names_seen = set()
for parameter in self.params:
lr_name = '%s_lr' % parameter.name
if lr_name in lr_names_seen:
logger.warning('In SGDOptimizer, '
'at least two parameters have the same name. '
'Both will be affected by the keyword argument '
'{0}.'.format(lr_name))
lr_names_seen.add(lr_name)
thislr = kwargs.get(lr_name, 1.)
self.learning_rates[parameter] = sharedX(thislr, lr_name)
# Verify that no ..._lr keyword argument is ignored
for lr_name in lr_names_seen:
if lr_name in kwargs:
kwargs.pop(lr_name)
for kw in six.iterkeys(kwargs):
if kw[-3:] == '_lr':
logger.warning('In SGDOptimizer, keyword argument {0} '
'will be ignored, because no parameter '
'was found with name {1}.'.format(kw, kw[:-3]))
# A shared variable for storing the iteration number.
self.iteration = sharedX(theano._asarray(0, dtype='int32'),
name='iter')
# A shared variable for storing the annealed base learning rate, used
# to lower the learning rate gradually after a certain amount of time.
self.annealed = sharedX(base_lr, 'annealed')
def learning_rate_updates(self, gradients):
"""
Compute a dictionary of shared variable updates related to annealing
the learning rate.
Parameters
----------
gradients : WRITEME
Returns
-------
updates : dict
A dictionary with the shared variables representing SGD metadata
as keys and a symbolic expression of how they are to be updated as
values.
"""
ups = {}
if self.use_adagrad:
learn_rates = []
for param, gp in zip(self.params, gradients):
acc = self.accumulators[param]
ups[acc] = acc + (gp ** 2).sum()
learn_rates.append(self.e0s[param] / (ups[acc] ** .5))
else:
# Annealing coefficient. Here we're using a formula of
# min(base_lr, anneal_start / (iteration + 1))
if self.anneal_start is None:
annealed = sharedX(self.base_lr)
else:
frac = self.anneal_start / (self.iteration + 1.)
annealed = tensor.minimum(
as_floatX(frac),
self.base_lr # maximum learning rate
)
# Update the shared variable for the annealed learning rate.
ups[self.annealed] = annealed
ups[self.iteration] = self.iteration + 1
# Calculate the learning rates for each parameter, in the order
# they appear in self.params
learn_rates = [annealed * self.learning_rates[p] for p in
self.params]
return ups, learn_rates
def updates(self, gradients):
"""
Return symbolic updates to apply given a set of gradients
on the parameters being optimized.
Parameters
----------
gradients : list of tensor_likes
List of symbolic gradients for the parameters contained
in self.params, in the same order as in self.params.
Returns
-------
updates : dict
A dictionary with the shared variables in self.params as keys
and a symbolic expression of how they are to be updated each
SGD step as values.
Notes
-----
`cost_updates` is a convenient helper function that takes all
necessary gradients with respect to a given symbolic cost.
"""
ups = {}
# Add the learning rate/iteration updates
l_ups, learn_rates = self.learning_rate_updates(gradients)
safe_update(ups, l_ups)
# Get the updates from sgd_updates, a PyLearn library function.
p_up = dict(self.sgd_updates(self.params, gradients, learn_rates))
# Add the things in p_up to ups
safe_update(ups, p_up)
# Clip the values if needed.
# We do not want the clipping values to force an upcast
# of the update: updates should have the same type as params
for param, (p_min, p_max) in six.iteritems(self.clipping_values):
p_min = tensor.as_tensor(p_min)
p_max = tensor.as_tensor(p_max)
dtype = param.dtype
if p_min.dtype != dtype:
p_min = tensor.cast(p_min, dtype)
if p_max.dtype != dtype:
p_max = tensor.cast(p_max, dtype)
ups[param] = tensor.clip(ups[param], p_min, p_max)
# Return the updates dictionary.
return ups
def cost_updates(self, cost):
"""
Return symbolic updates to apply given a cost function.
Parameters
----------
cost : tensor_like
Symbolic cost with respect to which the gradients of
the parameters should be taken. Should be 0-dimensional
(scalar valued).
Returns
-------
updates : dict
A dictionary with the shared variables in self.params as keys
and a symbolic expression of how they are to be updated each
SGD step as values.
"""
grads = [tensor.grad(cost, p) for p in self.params]
return self.updates(gradients=grads)
def sgd_updates(self, params, grads, stepsizes):
"""
Return a list of (pairs) that can be used
as updates in theano.function to
implement stochastic gradient descent.
Parameters
----------
params : list of Variable
variables to adjust in order to minimize some cost
grads : list of Variable
the gradient on each param (with respect to some cost)
stepsizes : symbolic scalar or list of one symbolic scalar per param
step by this amount times the negative gradient on each iteration
"""
try:
iter(stepsizes)
except Exception:
stepsizes = [stepsizes for p in params]
if len(params) != len(grads):
raise ValueError('params and grads have different lens')
updates = [(p, p - step * gp) for (step, p, gp)
in zip(stepsizes, params, grads)]
return updates
def sgd_momentum_updates(self, params, grads, stepsizes, momentum=0.9):
"""
.. todo::
WRITEME
"""
# if stepsizes is just a scalar, expand it to match params
try:
iter(stepsizes)
except Exception:
stepsizes = [stepsizes for p in params]
try:
iter(momentum)
except Exception:
momentum = [momentum for p in params]
if len(params) != len(grads):
raise ValueError('params and grads have different lens')
headings = [theano.shared(numpy.zeros_like(p.get_value(borrow=True)))
for p in params]
updates = []
for s, p, gp, m, h in zip(stepsizes, params, grads, momentum,
headings):
updates.append((p, p + s * h))
updates.append((h, m * h - (1.0 - m) * gp))
return updates
|
plamut/ggrc-core
|
refs/heads/develop
|
src/ggrc_basic_permissions/migrations/versions/20131010221316_3adc42b4f6b9_rename_admin_role.py
|
7
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Rename admin role.
Revision ID: 3adc42b4f6b9
Revises: 10adeac7b693
Create Date: 2013-10-10 22:13:16.470076
"""
# revision identifiers, used by Alembic.
revision = '3adc42b4f6b9'
down_revision = '10adeac7b693'
import json
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column
roles_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
column('updated_at', sa.DateTime),
)
def upgrade():
op.execute(roles_table.update()\
.where(roles_table.c.name == 'System Administrator')\
.values(name = 'gGRC Admin'))
def downgrade():
op.execute(roles_table.update()\
.where(roles_table.c.name == 'gGRC Admin')\
.values(name = 'System Administrator'))
|
willingc/oh-mainline
|
refs/heads/master
|
vendor/packages/twisted/twisted/internet/threads.py
|
25
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Extended thread dispatching support.
For basic support see reactor threading API docs.
Maintainer: Itamar Shtull-Trauring
"""
import Queue
from twisted.python import failure
from twisted.internet import defer
def deferToThreadPool(reactor, threadpool, f, *args, **kwargs):
"""
Call the function C{f} using a thread from the given threadpool and return
the result as a Deferred.
This function is only used by client code which is maintaining its own
threadpool. To run a function in the reactor's threadpool, use
C{deferToThread}.
@param reactor: The reactor in whose main thread the Deferred will be
invoked.
@param threadpool: An object which supports the C{callInThreadWithCallback}
method of C{twisted.python.threadpool.ThreadPool}.
@param f: The function to call.
@param *args: positional arguments to pass to f.
@param **kwargs: keyword arguments to pass to f.
@return: A Deferred which fires a callback with the result of f, or an
errback with a L{twisted.python.failure.Failure} if f throws an
exception.
"""
d = defer.Deferred()
def onResult(success, result):
if success:
reactor.callFromThread(d.callback, result)
else:
reactor.callFromThread(d.errback, result)
threadpool.callInThreadWithCallback(onResult, f, *args, **kwargs)
return d
def deferToThread(f, *args, **kwargs):
"""
Run a function in a thread and return the result as a Deferred.
@param f: The function to call.
@param *args: positional arguments to pass to f.
@param **kwargs: keyword arguments to pass to f.
@return: A Deferred which fires a callback with the result of f,
or an errback with a L{twisted.python.failure.Failure} if f throws
an exception.
"""
from twisted.internet import reactor
return deferToThreadPool(reactor, reactor.getThreadPool(),
f, *args, **kwargs)
def _runMultiple(tupleList):
"""
Run a list of functions.
"""
for f, args, kwargs in tupleList:
f(*args, **kwargs)
def callMultipleInThread(tupleList):
"""
Run a list of functions in the same thread.
tupleList should be a list of (function, argsList, kwargsDict) tuples.
"""
from twisted.internet import reactor
reactor.callInThread(_runMultiple, tupleList)
def blockingCallFromThread(reactor, f, *a, **kw):
"""
Run a function in the reactor from a thread, and wait for the result
synchronously. If the function returns a L{Deferred}, wait for its
result and return that.
@param reactor: The L{IReactorThreads} provider which will be used to
schedule the function call.
@param f: the callable to run in the reactor thread
@type f: any callable.
@param a: the arguments to pass to C{f}.
@param kw: the keyword arguments to pass to C{f}.
@return: the result of the L{Deferred} returned by C{f}, or the result
of C{f} if it returns anything other than a L{Deferred}.
@raise: If C{f} raises a synchronous exception,
C{blockingCallFromThread} will raise that exception. If C{f}
returns a L{Deferred} which fires with a L{Failure},
C{blockingCallFromThread} will raise that failure's exception (see
L{Failure.raiseException}).
"""
queue = Queue.Queue()
def _callFromThread():
result = defer.maybeDeferred(f, *a, **kw)
result.addBoth(queue.put)
reactor.callFromThread(_callFromThread)
result = queue.get()
if isinstance(result, failure.Failure):
result.raiseException()
return result
__all__ = ["deferToThread", "deferToThreadPool", "callMultipleInThread",
"blockingCallFromThread"]
|
vasiliykochergin/euca2ools
|
refs/heads/master
|
euca2ools/commands/s3/deletebucket.py
|
6
|
# Copyright 2013-2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from euca2ools.commands.s3 import (S3Request,
validate_generic_bucket_name)
from requestbuilder import Arg
class DeleteBucket(S3Request):
DESCRIPTION = 'Delete a bucket'
ARGS = [Arg('bucket', route_to=None, help='name of the bucket to delete')]
def configure(self):
S3Request.configure(self)
validate_generic_bucket_name(self.args['bucket'])
def preprocess(self):
self.method = 'DELETE'
self.path = self.args['bucket']
|
christoph-buente/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/botinfo.py
|
127
|
# Copyright (c) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# FIXME: We should consider hanging one of these off the tool object.
class BotInfo(object):
def __init__(self, tool, port_name):
self._tool = tool
self._port_name = port_name
def summary_text(self):
# bot_id is also stored on the options dictionary on the tool.
bot_id = self._tool.status_server.bot_id
bot_id_string = "Bot: %s " % (bot_id) if bot_id else ""
return "%sPort: %s Platform: %s" % (bot_id_string, self._port_name, self._tool.platform.display_name())
|
AVADOLearning/moodle-ubiquitous
|
refs/heads/master
|
_states/mssql_login.py
|
2
|
#
# Ubiquitous Moodle
#
# @author Luke Carrier <luke@carrier.im>
# @copyright 2018 The Ubiquitous Authors
#
def present(name, **kwargs):
ret = {
'name': name,
'result': False,
'changes': {},
'comment': '',
}
if __salt__['mssql.login_exists'](name, **kwargs):
ret['result'] = True
ret['comment'] = 'Login "{}" already exists'.format(name)
return ret
if __salt__['mssql.login_create'](name, **kwargs):
ret['result'] = True
ret['comment'] = 'Login "{}" created'.format(name)
ret['changes'][name] = 'Present'
return ret
|
hortonworks/hortonworks-sandbox
|
refs/heads/master
|
desktop/core/ext-py/Django-1.2.3/django/contrib/gis/tests/relatedapp/tests.py
|
25
|
import os, unittest
from django.contrib.gis.geos import *
from django.contrib.gis.db.models import Collect, Count, Extent, F, Union
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.tests.utils import mysql, oracle, postgis, spatialite, no_mysql, no_oracle, no_spatialite
from django.conf import settings
from models import City, Location, DirectoryEntry, Parcel, Book, Author, Article
cities = (('Aurora', 'TX', -97.516111, 33.058333),
('Roswell', 'NM', -104.528056, 33.387222),
('Kecksburg', 'PA', -79.460734, 40.18476),
)
class RelatedGeoModelTest(unittest.TestCase):
def test01_setup(self):
"Setting up for related model tests."
for name, state, lon, lat in cities:
loc = Location.objects.create(point=Point(lon, lat))
c = City.objects.create(name=name, state=state, location=loc)
def test02_select_related(self):
"Testing `select_related` on geographic models (see #7126)."
qs1 = City.objects.all()
qs2 = City.objects.select_related()
qs3 = City.objects.select_related('location')
for qs in (qs1, qs2, qs3):
for ref, c in zip(cities, qs):
nm, st, lon, lat = ref
self.assertEqual(nm, c.name)
self.assertEqual(st, c.state)
self.assertEqual(Point(lon, lat), c.location.point)
@no_mysql
def test03_transform_related(self):
"Testing the `transform` GeoQuerySet method on related geographic models."
# All the transformations are to state plane coordinate systems using
# US Survey Feet (thus a tolerance of 0 implies error w/in 1 survey foot).
tol = 0
def check_pnt(ref, pnt):
self.assertAlmostEqual(ref.x, pnt.x, tol)
self.assertAlmostEqual(ref.y, pnt.y, tol)
self.assertEqual(ref.srid, pnt.srid)
# Each city transformed to the SRID of their state plane coordinate system.
transformed = (('Kecksburg', 2272, 'POINT(1490553.98959621 314792.131023984)'),
('Roswell', 2257, 'POINT(481902.189077221 868477.766629735)'),
('Aurora', 2276, 'POINT(2269923.2484839 7069381.28722222)'),
)
for name, srid, wkt in transformed:
# Doing this implicitly sets `select_related` select the location.
# TODO: Fix why this breaks on Oracle.
qs = list(City.objects.filter(name=name).transform(srid, field_name='location__point'))
check_pnt(GEOSGeometry(wkt, srid), qs[0].location.point)
@no_mysql
@no_spatialite
def test04a_related_extent_aggregate(self):
"Testing the `extent` GeoQuerySet aggregates on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Extent('location__point'))
# One for all locations, one that excludes Roswell.
all_extent = (-104.528060913086, 33.0583305358887,-79.4607315063477, 40.1847610473633)
txpa_extent = (-97.51611328125, 33.0583305358887,-79.4607315063477, 40.1847610473633)
e1 = City.objects.extent(field_name='location__point')
e2 = City.objects.exclude(name='Roswell').extent(field_name='location__point')
e3 = aggs['location__point__extent']
# The tolerance value is to four decimal places because of differences
# between the Oracle and PostGIS spatial backends on the extent calculation.
tol = 4
for ref, e in [(all_extent, e1), (txpa_extent, e2), (all_extent, e3)]:
for ref_val, e_val in zip(ref, e): self.assertAlmostEqual(ref_val, e_val, tol)
@no_mysql
def test04b_related_union_aggregate(self):
"Testing the `unionagg` GeoQuerySet aggregates on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Union('location__point'))
# These are the points that are components of the aggregate geographic
# union that is returned.
p1 = Point(-104.528056, 33.387222)
p2 = Point(-97.516111, 33.058333)
p3 = Point(-79.460734, 40.18476)
# Creating the reference union geometry depending on the spatial backend,
# as Oracle will have a different internal ordering of the component
# geometries than PostGIS. The second union aggregate is for a union
# query that includes limiting information in the WHERE clause (in other
# words a `.filter()` precedes the call to `.unionagg()`).
if oracle:
ref_u1 = MultiPoint(p3, p1, p2, srid=4326)
ref_u2 = MultiPoint(p3, p2, srid=4326)
else:
ref_u1 = MultiPoint(p1, p2, p3, srid=4326)
ref_u2 = MultiPoint(p2, p3, srid=4326)
u1 = City.objects.unionagg(field_name='location__point')
u2 = City.objects.exclude(name='Roswell').unionagg(field_name='location__point')
u3 = aggs['location__point__union']
self.assertEqual(ref_u1, u1)
self.assertEqual(ref_u2, u2)
self.assertEqual(ref_u1, u3)
def test05_select_related_fk_to_subclass(self):
"Testing that calling select_related on a query over a model with an FK to a model subclass works"
# Regression test for #9752.
l = list(DirectoryEntry.objects.all().select_related())
def test06_f_expressions(self):
"Testing F() expressions on GeometryFields."
# Constructing a dummy parcel border and getting the City instance for
# assigning the FK.
b1 = GEOSGeometry('POLYGON((-97.501205 33.052520,-97.501205 33.052576,-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))', srid=4326)
pcity = City.objects.get(name='Aurora')
# First parcel has incorrect center point that is equal to the City;
# it also has a second border that is different from the first as a
# 100ft buffer around the City.
c1 = pcity.location.point
c2 = c1.transform(2276, clone=True)
b2 = c2.buffer(100)
p1 = Parcel.objects.create(name='P1', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2)
# Now creating a second Parcel where the borders are the same, just
# in different coordinate systems. The center points are also the
# the same (but in different coordinate systems), and this time they
# actually correspond to the centroid of the border.
c1 = b1.centroid
c2 = c1.transform(2276, clone=True)
p2 = Parcel.objects.create(name='P2', city=pcity, center1=c1, center2=c2, border1=b1, border2=b1)
# Should return the second Parcel, which has the center within the
# border.
qs = Parcel.objects.filter(center1__within=F('border1'))
self.assertEqual(1, len(qs))
self.assertEqual('P2', qs[0].name)
if not mysql:
# This time center2 is in a different coordinate system and needs
# to be wrapped in transformation SQL.
qs = Parcel.objects.filter(center2__within=F('border1'))
self.assertEqual(1, len(qs))
self.assertEqual('P2', qs[0].name)
# Should return the first Parcel, which has the center point equal
# to the point in the City ForeignKey.
qs = Parcel.objects.filter(center1=F('city__location__point'))
self.assertEqual(1, len(qs))
self.assertEqual('P1', qs[0].name)
if not mysql:
# This time the city column should be wrapped in transformation SQL.
qs = Parcel.objects.filter(border2__contains=F('city__location__point'))
self.assertEqual(1, len(qs))
self.assertEqual('P1', qs[0].name)
def test07_values(self):
"Testing values() and values_list() and GeoQuerySets."
# GeoQuerySet and GeoValuesQuerySet, and GeoValuesListQuerySet respectively.
gqs = Location.objects.all()
gvqs = Location.objects.values()
gvlqs = Location.objects.values_list()
# Incrementing through each of the models, dictionaries, and tuples
# returned by the different types of GeoQuerySets.
for m, d, t in zip(gqs, gvqs, gvlqs):
# The values should be Geometry objects and not raw strings returned
# by the spatial database.
self.failUnless(isinstance(d['point'], Geometry))
self.failUnless(isinstance(t[1], Geometry))
self.assertEqual(m.point, d['point'])
self.assertEqual(m.point, t[1])
def test08_defer_only(self):
"Testing defer() and only() on Geographic models."
qs = Location.objects.all()
def_qs = Location.objects.defer('point')
for loc, def_loc in zip(qs, def_qs):
self.assertEqual(loc.point, def_loc.point)
def test09_pk_relations(self):
"Ensuring correct primary key column is selected across relations. See #10757."
# Adding two more cities, but this time making sure that their location
# ID values do not match their City ID values.
loc1 = Location.objects.create(point='POINT (-95.363151 29.763374)')
loc2 = Location.objects.create(point='POINT (-96.801611 32.782057)')
dallas = City.objects.create(name='Dallas', state='TX', location=loc2)
houston = City.objects.create(name='Houston', state='TX', location=loc1)
# The expected ID values -- notice the last two location IDs
# are out of order. We want to make sure that the related
# location ID column is selected instead of ID column for
# the city.
city_ids = (1, 2, 3, 4, 5)
loc_ids = (1, 2, 3, 5, 4)
ids_qs = City.objects.order_by('id').values('id', 'location__id')
for val_dict, c_id, l_id in zip(ids_qs, city_ids, loc_ids):
self.assertEqual(val_dict['id'], c_id)
self.assertEqual(val_dict['location__id'], l_id)
def test10_combine(self):
"Testing the combination of two GeoQuerySets. See #10807."
buf1 = City.objects.get(name='Aurora').location.point.buffer(0.1)
buf2 = City.objects.get(name='Kecksburg').location.point.buffer(0.1)
qs1 = City.objects.filter(location__point__within=buf1)
qs2 = City.objects.filter(location__point__within=buf2)
combined = qs1 | qs2
names = [c.name for c in combined]
self.assertEqual(2, len(names))
self.failUnless('Aurora' in names)
self.failUnless('Kecksburg' in names)
def test11_geoquery_pickle(self):
"Ensuring GeoQuery objects are unpickled correctly. See #10839."
import pickle
from django.contrib.gis.db.models.sql import GeoQuery
qs = City.objects.all()
q_str = pickle.dumps(qs.query)
q = pickle.loads(q_str)
self.assertEqual(GeoQuery, q.__class__)
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test12a_count(self):
"Testing `Count` aggregate use with the `GeoManager` on geo-fields."
# Creating a new City, 'Fort Worth', that uses the same location
# as Dallas.
dallas = City.objects.get(name='Dallas')
ftworth = City.objects.create(name='Fort Worth', state='TX', location=dallas.location)
# Count annotation should be 2 for the Dallas location now.
loc = Location.objects.annotate(num_cities=Count('city')).get(id=dallas.location.id)
self.assertEqual(2, loc.num_cities)
def test12b_count(self):
"Testing `Count` aggregate use with the `GeoManager` on non geo-fields. See #11087."
# Creating some data for the Book/Author non-geo models that
# use GeoManager. See #11087.
tp = Author.objects.create(name='Trevor Paglen')
Book.objects.create(title='Torture Taxi', author=tp)
Book.objects.create(title='I Could Tell You But Then You Would Have to be Destroyed by Me', author=tp)
Book.objects.create(title='Blank Spots on the Map', author=tp)
wp = Author.objects.create(name='William Patry')
Book.objects.create(title='Patry on Copyright', author=wp)
# Should only be one author (Trevor Paglen) returned by this query, and
# the annotation should have 3 for the number of books. Also testing
# with a `GeoValuesQuerySet` (see #11489).
qs = Author.objects.annotate(num_books=Count('books')).filter(num_books__gt=1)
vqs = Author.objects.values('name').annotate(num_books=Count('books')).filter(num_books__gt=1)
self.assertEqual(1, len(qs))
self.assertEqual(3, qs[0].num_books)
self.assertEqual(1, len(vqs))
self.assertEqual(3, vqs[0]['num_books'])
# TODO: The phantom model does appear on Oracle.
@no_oracle
def test13_select_related_null_fk(self):
"Testing `select_related` on a nullable ForeignKey via `GeoManager`. See #11381."
no_author = Book.objects.create(title='Without Author')
b = Book.objects.select_related('author').get(title='Without Author')
# Should be `None`, and not a 'dummy' model.
self.assertEqual(None, b.author)
@no_mysql
@no_oracle
@no_spatialite
def test14_collect(self):
"Testing the `collect` GeoQuerySet method and `Collect` aggregate."
# Reference query:
# SELECT AsText(ST_Collect("relatedapp_location"."point")) FROM "relatedapp_city" LEFT OUTER JOIN
# "relatedapp_location" ON ("relatedapp_city"."location_id" = "relatedapp_location"."id")
# WHERE "relatedapp_city"."state" = 'TX';
ref_geom = fromstr('MULTIPOINT(-97.516111 33.058333,-96.801611 32.782057,-95.363151 29.763374,-96.801611 32.782057)')
c1 = City.objects.filter(state='TX').collect(field_name='location__point')
c2 = City.objects.filter(state='TX').aggregate(Collect('location__point'))['location__point__collect']
for coll in (c1, c2):
# Even though Dallas and Ft. Worth share same point, Collect doesn't
# consolidate -- that's why 4 points in MultiPoint.
self.assertEqual(4, len(coll))
self.assertEqual(ref_geom, coll)
def test15_invalid_select_related(self):
"Testing doing select_related on the related name manager of a unique FK. See #13934."
qs = Article.objects.select_related('author__article')
# This triggers TypeError when `get_default_columns` has no `local_only`
# keyword. The TypeError is swallowed if QuerySet is actually
# evaluated as list generation swallows TypeError in CPython.
sql = str(qs.query)
# TODO: Related tests for KML, GML, and distance lookups.
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(RelatedGeoModelTest))
return s
|
ettm2012/MissionPlanner
|
refs/heads/master
|
LogAnalyzer/UnitTest.py
|
160
|
#!/usr/bin/env python
#
#
# Unit and regression tests for the LogAnalyzer code
#
#
# TODO: implement more unit+regression tests
import DataflashLog
import traceback
try:
# test DataflashLog reading 1
logdata = DataflashLog.DataflashLog()
logdata.read("examples/robert_lefebvre_octo_PM.log", ignoreBadlines=False)
assert(logdata.filename == "examples/robert_lefebvre_octo_PM.log")
assert(logdata.vehicleType == "ArduCopter")
assert(logdata.firmwareVersion == "V3.0.1")
assert(logdata.firmwareHash == "5c6503e2")
assert(logdata.freeRAM == 1331)
assert(logdata.hardwareType == "APM 2")
assert(len(logdata.formats) == 27)
assert(logdata.formats['GPS'].labels == ['Status', 'Time', 'NSats', 'HDop', 'Lat', 'Lng', 'RelAlt', 'Alt', 'Spd', 'GCrs'])
assert(logdata.formats['ATT'].labels == ['RollIn', 'Roll', 'PitchIn', 'Pitch', 'YawIn', 'Yaw', 'NavYaw'])
assert(logdata.parameters == {'RC7_REV': 1.0, 'MNT_MODE': 3.0, 'LOITER_LON_P': 1.0, 'FLTMODE1': 1.0, 'FLTMODE3': 0.0, 'FLTMODE2': 6.0, 'TUNE_HIGH': 10000.0, 'FLTMODE4': 5.0, 'FLTMODE6': 2.0, 'SYSID_SW_TYPE': 10.0, 'LOITER_LON_D': 0.0, 'RC5_REV': 1.0, 'THR_RATE_IMAX': 300.0, 'MNT_RC_IN_PAN': 0.0, 'RC2_MIN': 1110.0, 'LOITER_LON_I': 0.5, 'HLD_LON_P': 1.0, 'STB_RLL_I': 0.0, 'LOW_VOLT': 10.5, 'MNT_CONTROL_Y': 0.0, 'MNT_CONTROL_X': 0.0, 'FRAME': 1.0, 'MNT_CONTROL_Z': 0.0, 'OF_PIT_IMAX': 100.0, 'AHRS_ORIENTATION': 0.0, 'SIMPLE': 0.0, 'RC2_MAX': 1929.0, 'MNT_JSTICK_SPD': 0.0, 'RC8_FUNCTION': 0.0, 'INS_ACCSCAL_X': 0.992788, 'ACRO_P': 4.5, 'MNT_ANGMIN_ROL': -4500.0, 'OF_RLL_P': 2.5, 'STB_RLL_P': 3.5, 'STB_YAW_P': 3.0, 'SR0_RAW_SENS': 2.0, 'FLTMODE5': 0.0, 'RATE_YAW_I': 0.02, 'MAG_ENABLE': 1.0, 'MNT_RETRACT_Y': 0.0, 'MNT_RETRACT_X': 0.0, 'RATE_YAW_IMAX': 800.0, 'WPNAV_SPEED_DN': 150.0, 'WP_YAW_BEHAVIOR': 2.0, 'RC11_REV': 1.0, 'SYSID_THISMAV': 1.0, 'SR0_EXTRA1': 10.0, 'SR0_EXTRA2': 10.0, 'ACRO_BAL_PITCH': 200.0, 'STB_YAW_I': 0.0, 'INS_ACCSCAL_Z': 0.97621, 'INS_ACCSCAL_Y': 1.00147, 'LED_MODE': 9.0, 'FS_GCS_ENABLE': 0.0, 'MNT_RC_IN_ROLL': 0.0, 'INAV_TC_Z': 8.0, 'RATE_PIT_IMAX': 4500.0, 'HLD_LON_IMAX': 3000.0, 'THR_RATE_I': 0.0, 'SR3_EXTRA1': 0.0, 'STB_PIT_IMAX': 800.0, 'AHRS_TRIM_Z': 0.0, 'RC2_REV': 1.0, 'INS_MPU6K_FILTER': 20.0, 'THR_MIN': 130.0, 'AHRS_TRIM_Y': 0.021683, 'RC11_DZ': 0.0, 'THR_MAX': 1000.0, 'SR3_EXTRA2': 0.0, 'MNT_NEUTRAL_Z': 0.0, 'THR_MID': 300.0, 'MNT_NEUTRAL_X': 0.0, 'AMP_PER_VOLT': 18.002001, 'SR0_POSITION': 3.0, 'MNT_STAB_PAN': 0.0, 'FS_BATT_ENABLE': 0.0, 'LAND_SPEED': 50.0, 'OF_PIT_D': 0.12, 'SR0_PARAMS': 50.0, 'COMPASS_ORIENT': 0.0, 'WPNAV_ACCEL': 200.0, 'THR_ACCEL_IMAX': 5000.0, 'SR3_POSITION': 0.0, 'WPNAV_RADIUS': 100.0, 'WP_TOTAL': 14.0, 'RC8_MAX': 1856.0, 'OF_PIT_P': 2.5, 'SR3_RAW_SENS': 0.0, 'RTL_ALT_FINAL': 0.0, 'SR3_PARAMS': 0.0, 'SR0_EXTRA3': 2.0, 'LOITER_LAT_I': 0.5, 'RC6_DZ': 0.0, 'RC4_TRIM': 1524.0, 'RATE_RLL_P': 0.07, 'LOITER_LAT_D': 0.0, 'STB_PIT_P': 3.5, 'OF_PIT_I': 0.5, 'RATE_RLL_I': 1.0, 'AHRS_TRIM_X': 0.003997, 'RC3_REV': 1.0, 'STB_PIT_I': 0.0, 'FS_THR_ENABLE': 0.0, 'LOITER_LAT_P': 1.0, 'AHRS_RP_P': 0.1, 'FENCE_ACTION': 1.0, 'TOY_RATE': 1.0, 'RATE_RLL_D': 0.006, 'RC5_MIN': 1151.0, 'RC5_TRIM': 1676.0, 'STB_RLL_IMAX': 800.0, 'RC4_DZ': 40.0, 'AHRS_YAW_P': 0.1, 'RC11_TRIM': 1500.0, 'MOT_TCRV_ENABLE': 1.0, 'CAM_TRIGG_TYPE': 1.0, 'STB_YAW_IMAX': 800.0, 'RC4_MAX': 1942.0, 'LOITER_LAT_IMAX': 400.0, 'CH7_OPT': 9.0, 'RC11_FUNCTION': 7.0, 'SR0_EXT_STAT': 2.0, 'SONAR_TYPE': 0.0, 'RC3_MAX': 1930.0, 'RATE_YAW_D': 0.0, 'FENCE_ALT_MAX': 30.0, 'COMPASS_MOT_Y': 0.0, 'AXIS_ENABLE': 1.0, 'FENCE_ENABLE': 0.0, 'RC10_DZ': 0.0, 'PILOT_VELZ_MAX': 250.0, 'BATT_CAPACITY': 1760.0, 'FS_THR_VALUE': 975.0, 'RC4_MIN': 1115.0, 'MNT_ANGMAX_TIL': 4500.0, 'RTL_LOIT_TIME': 5000.0, 'ARMING_CHECK': 1.0, 'THR_RATE_P': 6.0, 'OF_RLL_IMAX': 100.0, 'RC6_MIN': 971.0, 'SR0_RAW_CTRL': 0.0, 'RC6_MAX': 2078.0, 'RC5_MAX': 1829.0, 'LOITER_LON_IMAX': 400.0, 'MNT_STAB_TILT': 0.0, 'MOT_TCRV_MIDPCT': 52.0, 'COMPASS_OFS_Z': -5.120774, 'COMPASS_OFS_Y': 46.709824, 'COMPASS_OFS_X': -20.490345, 'THR_ALT_I': 0.0, 'RC10_TRIM': 1500.0, 'INS_PRODUCT_ID': 88.0, 'RC11_MIN': 1100.0, 'FS_GPS_ENABLE': 1.0, 'HLD_LAT_IMAX': 3000.0, 'RC3_TRIM': 1476.0, 'RC6_FUNCTION': 0.0, 'TRIM_THROTTLE': 260.0, 'MNT_STAB_ROLL': 0.0, 'INAV_TC_XY': 2.5, 'RC1_DZ': 30.0, 'MNT_RETRACT_Z': 0.0, 'THR_ACC_ENABLE': 1.0, 'LOG_BITMASK': 830.0, 'TUNE_LOW': 0.0, 'CIRCLE_RATE': 5.0, 'CAM_DURATION': 10.0, 'MNT_NEUTRAL_Y': 0.0, 'RC10_MIN': 1100.0, 'INS_ACCOFFS_X': -0.019376, 'THR_RATE_D': 0.0, 'INS_ACCOFFS_Z': 1.370947, 'RC4_REV': 1.0, 'CIRCLE_RADIUS': 10.0, 'RATE_RLL_IMAX': 4500.0, 'HLD_LAT_P': 1.0, 'AHRS_GPS_MINSATS': 6.0, 'FLOW_ENABLE': 0.0, 'RC8_REV': 1.0, 'SONAR_GAIN': 0.2, 'RC2_TRIM': 1521.0, 'WP_INDEX': 0.0, 'RC1_REV': 1.0, 'RC7_DZ': 0.0, 'AHRS_GPS_USE': 1.0, 'MNT_ANGMIN_PAN': -4500.0, 'SR3_RC_CHAN': 0.0, 'COMPASS_LEARN': 0.0, 'ACRO_TRAINER': 1.0, 'CAM_SERVO_OFF': 1100.0, 'RC5_DZ': 0.0, 'SCHED_DEBUG': 0.0, 'RC11_MAX': 1900.0, 'AHRS_WIND_MAX': 0.0, 'SR3_EXT_STAT': 0.0, 'MNT_ANGMAX_PAN': 4500.0, 'MNT_ANGMAX_ROL': 4500.0, 'RC_SPEED': 490.0, 'SUPER_SIMPLE': 0.0, 'VOLT_DIVIDER': 10.0, 'COMPASS_MOTCT': 0.0, 'SR3_RAW_CTRL': 0.0, 'SONAR_ENABLE': 0.0, 'INS_ACCOFFS_Y': 0.362242, 'SYSID_SW_MREV': 120.0, 'WPNAV_LOIT_SPEED': 1000.0, 'BATT_MONITOR': 4.0, 'MNT_RC_IN_TILT': 8.0, 'CH8_OPT': 0.0, 'RTL_ALT': 1000.0, 'SR0_RC_CHAN': 2.0, 'RC1_MIN': 1111.0, 'RSSI_PIN': -1.0, 'MOT_TCRV_MAXPCT': 93.0, 'GND_ABS_PRESS': 101566.97, 'RC1_MAX': 1936.0, 'FENCE_TYPE': 3.0, 'RC5_FUNCTION': 0.0, 'OF_RLL_D': 0.12, 'BATT_VOLT_PIN': 13.0, 'WPNAV_SPEED': 1000.0, 'RC7_MAX': 1884.0, 'CAM_SERVO_ON': 1300.0, 'RATE_PIT_I': 1.0, 'RC7_MIN': 969.0, 'AHRS_COMP_BETA': 0.1, 'OF_RLL_I': 0.5, 'COMPASS_DEC': 0.0, 'RC3_MIN': 1113.0, 'RC2_DZ': 30.0, 'FENCE_RADIUS': 30.0, 'HLD_LON_I': 0.0, 'ACRO_BAL_ROLL': 200.0, 'COMPASS_AUTODEC': 1.0, 'SR3_EXTRA3': 0.0, 'COMPASS_USE': 1.0, 'RC10_MAX': 1900.0, 'RATE_PIT_P': 0.07, 'GND_TEMP': 21.610104, 'RC7_TRIM': 970.0, 'RC10_REV': 1.0, 'RATE_YAW_P': 0.2, 'THR_ALT_P': 1.0, 'RATE_PIT_D': 0.006, 'ESC': 0.0, 'MNT_ANGMIN_TIL': -4500.0, 'SERIAL3_BAUD': 57.0, 'RC8_MIN': 968.0, 'THR_ALT_IMAX': 300.0, 'SYSID_MYGCS': 255.0, 'INS_GYROFFS_Y': 0.581989, 'TUNE': 0.0, 'RC8_TRIM': 970.0, 'RC3_DZ': 30.0, 'AHRS_GPS_GAIN': 1.0, 'THR_ACCEL_D': 0.0, 'TELEM_DELAY': 0.0, 'THR_ACCEL_I': 0.5, 'COMPASS_MOT_X': 0.0, 'COMPASS_MOT_Z': 0.0, 'RC10_FUNCTION': 0.0, 'INS_GYROFFS_X': -0.001698, 'INS_GYROFFS_Z': 0.01517, 'RC6_TRIM': 1473.0, 'THR_ACCEL_P': 1.2, 'RC8_DZ': 0.0, 'HLD_LAT_I': 0.0, 'RC7_FUNCTION': 0.0, 'RC6_REV': 1.0, 'BATT_CURR_PIN': 12.0, 'WPNAV_SPEED_UP': 250.0, 'RC1_TRIM': 1524.0})
assert(logdata.messages == {})
assert(logdata.modeChanges == {2204: ('LOITER', 269), 4594: ('STABILIZE', 269), 644: ('ALT_HOLD', 269), 4404: ('ALT_HOLD', 269)})
assert(logdata.channels['GPS']['NSats'].min() == 6)
assert(logdata.channels['GPS']['NSats'].max() == 8)
assert(logdata.channels['GPS']['HDop'].listData[0] == (552, 4.68))
assert(logdata.channels['GPS']['HDop'].listData[44] == (768, 4.67))
assert(logdata.channels['GPS']['HDop'].listData[157] == (1288, 2.28))
assert(logdata.channels['CTUN']['ThrOut'].listData[5] == (321, 139))
assert(logdata.channels['CTUN']['ThrOut'].listData[45] == (409, 242))
assert(logdata.channels['CTUN']['ThrOut'].listData[125] == (589, 266))
assert(logdata.channels['CTUN']['CRate'].listData[3] == (317, 35))
assert(logdata.channels['CTUN']['CRate'].listData[51] == (421, 31))
assert(logdata.channels['CTUN']['CRate'].listData[115] == (563, -8))
assert(int(logdata.filesizeKB) == 302)
assert(logdata.durationSecs == 155)
assert(logdata.lineCount == 4750)
# test LogIterator class
lit = DataflashLog.LogIterator(logdata)
assert(lit.currentLine == 0)
assert(lit.iterators == {'CURR': (0, 310), 'ERR': (0, 307), 'NTUN': (0, 2206), 'CTUN': (0, 308), 'GPS': (0, 552), 'CMD': (0, 607), 'D32': (0, 305), 'ATT': (0, 311), 'EV': (0, 306), 'DU32': (0, 309), 'PM': (0, 479)})
lit.jump(500)
assert(lit.iterators == {'CURR': (9, 514), 'ERR': (1, 553), 'NTUN': (0, 2206), 'CTUN': (87, 500), 'GPS': (0, 552), 'CMD': (0, 607), 'D32': (0, 305), 'ATT': (83, 501), 'EV': (4, 606), 'DU32': (9, 513), 'PM': (1, 719)})
assert(lit['CTUN']['ThrIn'] == 450)
assert(lit['ATT']['RollIn'] == 11.19)
assert(lit['CURR']['CurrTot'] == 25.827288)
assert(lit['D32']['Value'] == 11122)
lit.next()
assert(lit.iterators == {'CURR': (9, 514), 'ERR': (1, 553), 'NTUN': (0, 2206), 'CTUN': (88, 502), 'GPS': (0, 552), 'CMD': (0, 607), 'D32': (0, 305), 'ATT': (83, 501), 'EV': (4, 606), 'DU32': (9, 513), 'PM': (1, 719)})
lit.jump(4750)
lit.next()
assert(lit.currentLine == 4751)
assert(lit['ATT']['Roll'] == 2.99)
# TODO: unit test DataflashLog reading 2
# ...
# TODO: unit test the log test classes
# ...
print "All unit/regression tests GOOD\n"
except Exception as e:
print "Error found: " + traceback.format_exc()
print "UNIT TEST FAILED\n"
|
kbourgoin/hiicart
|
refs/heads/master
|
hiicart/gateway/paypal2/errors.py
|
2
|
from hiicart.gateway.base import GatewayError
class Paypal2GatewayError(GatewayError):
pass
|
simpeg/simpegem
|
refs/heads/master
|
simpegEM/Tests/test_TDEM_forward_Analytic.py
|
2
|
import unittest
from SimPEG import *
import simpegEM as EM
from scipy.constants import mu_0
import matplotlib.pyplot as plt
try:
from pymatsolver import MumpsSolver
except ImportError, e:
MumpsSolver = SolverLU
def halfSpaceProblemAnaDiff(meshType, sig_half=1e-2, rxOffset=50., bounds=[1e-5,1e-3], showIt=False):
if meshType == 'CYL':
cs, ncx, ncz, npad = 5., 30, 10, 15
hx = [(cs,ncx), (cs,npad,1.3)]
hz = [(cs,npad,-1.3), (cs,ncz), (cs,npad,1.3)]
mesh = Mesh.CylMesh([hx,1,hz], '00C')
elif meshType == 'TENSOR':
cs, nc, npad = 20., 13, 5
hx = [(cs,npad,-1.3), (cs,nc), (cs,npad,1.3)]
hy = [(cs,npad,-1.3), (cs,nc), (cs,npad,1.3)]
hz = [(cs,npad,-1.3), (cs,nc), (cs,npad,1.3)]
mesh = Mesh.TensorMesh([hx,hy,hz], 'CCC')
active = mesh.vectorCCz<0.
actMap = Maps.ActiveCells(mesh, active, np.log(1e-8), nC=mesh.nCz)
mapping = Maps.ExpMap(mesh) * Maps.Vertical1DMap(mesh) * actMap
rx = EM.TDEM.RxTDEM(np.array([[rxOffset, 0., 0.]]), np.logspace(-5,-4, 21), 'bz')
src = EM.TDEM.SrcTDEM_VMD_MVP([rx], loc=np.array([0., 0., 0.]))
# src = EM.TDEM.SrcTDEM([rx], loc=np.array([0., 0., 0.]))
survey = EM.TDEM.SurveyTDEM([src])
prb = EM.TDEM.ProblemTDEM_b(mesh, mapping=mapping)
prb.Solver = MumpsSolver
prb.timeSteps = [(1e-06, 40), (5e-06, 40), (1e-05, 40), (5e-05, 40), (0.0001, 40), (0.0005, 40)]
sigma = np.ones(mesh.nCz)*1e-8
sigma[active] = sig_half
sigma = np.log(sigma[active])
prb.pair(survey)
bz_ana = mu_0*EM.Analytics.hzAnalyticDipoleT(rx.locs[0][0]+1e-3, rx.times, sig_half)
bz_calc = survey.dpred(sigma)
ind = np.logical_and(rx.times > bounds[0],rx.times < bounds[1])
log10diff = np.linalg.norm(np.log10(np.abs(bz_calc[ind])) - np.log10(np.abs(bz_ana[ind])))/np.linalg.norm(np.log10(np.abs(bz_ana[ind])))
print 'Difference: ', log10diff
if showIt == True:
plt.loglog(rx.times[bz_calc>0], bz_calc[bz_calc>0], 'r', rx.times[bz_calc<0], -bz_calc[bz_calc<0], 'r--')
plt.loglog(rx.times, abs(bz_ana), 'b*')
plt.title('sig_half = %e'%sig_half)
plt.show()
return log10diff
class TDEM_bTests(unittest.TestCase):
def test_analytic_p2_CYL_50m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=50., sig_half=1e+2) < 0.01)
def test_analytic_p1_CYL_50m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=50., sig_half=1e+1) < 0.01)
def test_analytic_p0_CYL_50m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=50., sig_half=1e+0) < 0.01)
def test_analytic_m1_CYL_50m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=50., sig_half=1e-1) < 0.01)
def test_analytic_m2_CYL_50m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=50., sig_half=1e-2) < 0.01)
def test_analytic_m3_CYL_50m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=50., sig_half=1e-3) < 0.02)
def test_analytic_p0_CYL_1m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=1.0, sig_half=1e+0) < 0.01)
def test_analytic_m1_CYL_1m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=1.0, sig_half=1e-1) < 0.01)
def test_analytic_m2_CYL_1m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=1.0, sig_half=1e-2) < 0.01)
def test_analytic_m3_CYL_1m(self):
self.assertTrue(halfSpaceProblemAnaDiff('CYL', rxOffset=1.0, sig_half=1e-3) < 0.02)
if __name__ == '__main__':
unittest.main()
|
DEVSENSE/PTVS
|
refs/heads/master
|
Python/Tests/TestData/Classification/MultiLineString.py
|
7
|
x = '''
contents = open(%(filename)r, 'rb').read().replace("\\r\\n", "\\n")
'''
|
cclauss/Pythonista_ui
|
refs/heads/master
|
ValidatingView.py
|
1
|
# coding: utf-8
import ui
# See: https://forum.omz-software.com/topic/2499/textfield-validation-example
class ValidatingView(ui.View):
def __init__(self):
for name in 'lower upper title numeric'.split():
text_field = ui.TextField(name=name)
text_field.delegate = self
text_field.height = 24
text_field.text = name if name != 'numeric' else 'a1b2c3d4e5f'
self.add_subview(text_field)
self.textfield_did_change(text_field)
self.present()
def layout(self):
for i, subview in enumerate(self.subviews):
subview.width = self.width
subview.y = (i + 1) * (subview.height + 10)
def textfield_did_change(self, textfield):
if textfield.name == 'lower':
textfield.text = textfield.text.lower()
elif textfield.name == 'upper':
textfield.text = textfield.text.upper()
elif textfield.name == 'title':
textfield.text = textfield.text.title()
elif textfield.name == 'numeric':
textfield.text = ''.join(c for c in textfield.text if c.isdigit())
ValidatingView()
|
LogicalDash/kivy
|
refs/heads/master
|
kivy/effects/dampedscroll.py
|
70
|
'''
Damped scroll effect
====================
.. versionadded:: 1.7.0
This damped scroll effect will use the
:attr:`~kivy.effects.scroll.ScrollEffect.overscroll` to calculate the scroll
value, and slows going back to the upper or lower limit.
'''
__all__ = ('DampedScrollEffect',)
from kivy.effects.scroll import ScrollEffect
from kivy.properties import NumericProperty, BooleanProperty
from kivy.metrics import sp
class DampedScrollEffect(ScrollEffect):
'''DampedScrollEffect class. See the module documentation for more
information.
'''
edge_damping = NumericProperty(0.25)
'''Edge damping.
:attr:`edge_damping` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.25
'''
spring_constant = NumericProperty(2.0)
'''Spring constant.
:attr:`spring_constant` is a :class:`~kivy.properties.NumericProperty` and
defaults to 2.0
'''
min_overscroll = NumericProperty(.5)
'''An overscroll less than this amount will be normalized to 0.
.. versionadded:: 1.8.0
:attr:`min_overscroll` is a :class:`~kivy.properties.NumericProperty` and
defaults to .5.
'''
round_value = BooleanProperty(True)
'''If True, when the motion stops, :attr:`value` is rounded to the nearest
integer.
.. versionadded:: 1.8.0
:attr:`round_value` is a :class:`~kivy.properties.BooleanProperty` and
defaults to True.
'''
def update_velocity(self, dt):
if abs(self.velocity) <= self.min_velocity and self.overscroll == 0:
self.velocity = 0
# why does this need to be rounded? For now refactored it.
if self.round_value:
self.value = round(self.value)
return
total_force = self.velocity * self.friction
if abs(self.overscroll) > self.min_overscroll:
total_force += self.velocity * self.edge_damping
total_force += self.overscroll * self.spring_constant
else:
self.overscroll = 0
stop_overscroll = ''
if not self.is_manual:
if self.overscroll > 0 and self.velocity < 0:
stop_overscroll = 'max'
elif self.overscroll < 0 and self.velocity > 0:
stop_overscroll = 'min'
self.velocity = self.velocity - total_force
if not self.is_manual:
self.apply_distance(self.velocity * dt)
if stop_overscroll == 'min' and self.value > self.min:
self.value = self.min
self.velocity = 0
return
if stop_overscroll == 'max' and self.value < self.max:
self.value = self.max
self.velocity = 0
return
self.trigger_velocity_update()
def on_value(self, *args):
scroll_min = self.min
scroll_max = self.max
if scroll_min > scroll_max:
scroll_min, scroll_max = scroll_max, scroll_min
if self.value < scroll_min:
self.overscroll = self.value - scroll_min
elif self.value > scroll_max:
self.overscroll = self.value - scroll_max
else:
self.overscroll = 0
self.scroll = self.value
def on_overscroll(self, *args):
self.trigger_velocity_update()
def apply_distance(self, distance):
os = abs(self.overscroll)
if os:
distance /= 1. + os / sp(200.)
super(DampedScrollEffect, self).apply_distance(distance)
|
colinnewell/odoo
|
refs/heads/8.0
|
addons/project/tests/test_project_flow.py
|
198
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from .test_project_base import TestProjectBase
from openerp.exceptions import AccessError
from openerp.tools import mute_logger
EMAIL_TPL = """Return-Path: <whatever-2a840@postmaster.twitter.com>
X-Original-To: {email_to}
Delivered-To: {email_to}
To: {email_to}
Received: by mail1.openerp.com (Postfix, from userid 10002)
id 5DF9ABFB2A; Fri, 10 Aug 2012 16:16:39 +0200 (CEST)
Message-ID: {msg_id}
Date: Tue, 29 Nov 2011 12:43:21 +0530
From: {email_from}
MIME-Version: 1.0
Subject: {subject}
Content-Type: text/plain; charset=ISO-8859-1; format=flowed
Hello,
This email should create a new entry in your module. Please check that it
effectively works.
Thanks,
--
Raoul Boitempoils
Integrator at Agrolait"""
class TestProjectFlow(TestProjectBase):
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_00_project_process(self):
""" Testing project management """
cr, uid, user_projectuser_id, user_projectmanager_id, project_pigs_id = self.cr, self.uid, self.user_projectuser_id, self.user_projectmanager_id, self.project_pigs_id
# ProjectUser: set project as template -> raise
self.assertRaises(AccessError, self.project_project.set_template, cr, user_projectuser_id, [project_pigs_id])
# Other tests are done using a ProjectManager
project = self.project_project.browse(cr, user_projectmanager_id, project_pigs_id)
self.assertNotEqual(project.state, 'template', 'project: incorrect state, should not be a template')
# Set test project as template
self.project_project.set_template(cr, user_projectmanager_id, [project_pigs_id])
project.refresh()
self.assertEqual(project.state, 'template', 'project: set_template: project state should be template')
self.assertEqual(len(project.tasks), 0, 'project: set_template: project tasks should have been set inactive')
# Duplicate template
new_template_act = self.project_project.duplicate_template(cr, user_projectmanager_id, [project_pigs_id])
new_project = self.project_project.browse(cr, user_projectmanager_id, new_template_act['res_id'])
self.assertEqual(new_project.state, 'open', 'project: incorrect duplicate_template')
self.assertEqual(len(new_project.tasks), 2, 'project: duplicating a project template should duplicate its tasks')
# Convert into real project
self.project_project.reset_project(cr, user_projectmanager_id, [project_pigs_id])
project.refresh()
self.assertEqual(project.state, 'open', 'project: resetted project should be in open state')
self.assertEqual(len(project.tasks), 2, 'project: reset_project: project tasks should have been set active')
# Put as pending
self.project_project.set_pending(cr, user_projectmanager_id, [project_pigs_id])
project.refresh()
self.assertEqual(project.state, 'pending', 'project: should be in pending state')
# Re-open
self.project_project.set_open(cr, user_projectmanager_id, [project_pigs_id])
project.refresh()
self.assertEqual(project.state, 'open', 'project: reopened project should be in open state')
# Close project
self.project_project.set_done(cr, user_projectmanager_id, [project_pigs_id])
project.refresh()
self.assertEqual(project.state, 'close', 'project: closed project should be in close state')
# Re-open
self.project_project.set_open(cr, user_projectmanager_id, [project_pigs_id])
project.refresh()
# Re-convert into a template and schedule tasks
self.project_project.set_template(cr, user_projectmanager_id, [project_pigs_id])
self.project_project.schedule_tasks(cr, user_projectmanager_id, [project_pigs_id])
# Copy the project
new_project_id = self.project_project.copy(cr, user_projectmanager_id, project_pigs_id)
new_project = self.project_project.browse(cr, user_projectmanager_id, new_project_id)
self.assertEqual(len(new_project.tasks), 2, 'project: copied project should have copied task')
# Cancel the project
self.project_project.set_cancel(cr, user_projectmanager_id, [project_pigs_id])
self.assertEqual(project.state, 'cancelled', 'project: cancelled project should be in cancel state')
def test_10_task_process(self):
""" Testing task creation and management """
cr, uid, user_projectuser_id, user_projectmanager_id, project_pigs_id = self.cr, self.uid, self.user_projectuser_id, self.user_projectmanager_id, self.project_pigs_id
def format_and_process(template, email_to='project+pigs@mydomain.com, other@gmail.com', subject='Frogs',
email_from='Patrick Ratatouille <patrick.ratatouille@agrolait.com>',
msg_id='<1198923581.41972151344608186760.JavaMail@agrolait.com>'):
self.assertEqual(self.project_task.search(cr, uid, [('name', '=', subject)]), [])
mail = template.format(email_to=email_to, subject=subject, email_from=email_from, msg_id=msg_id)
self.mail_thread.message_process(cr, uid, None, mail)
return self.project_task.search(cr, uid, [('name', '=', subject)])
# Do: incoming mail from an unknown partner on an alias creates a new task 'Frogs'
frogs = format_and_process(EMAIL_TPL)
# Test: one task created by mailgateway administrator
self.assertEqual(len(frogs), 1, 'project: message_process: a new project.task should have been created')
task = self.project_task.browse(cr, user_projectuser_id, frogs[0])
res = self.project_task.get_metadata(cr, uid, [task.id])[0].get('create_uid') or [None]
self.assertEqual(res[0], uid,
'project: message_process: task should have been created by uid as alias_user_id is False on the alias')
# Test: messages
self.assertEqual(len(task.message_ids), 3,
'project: message_process: newly created task should have 2 messages: creation and email')
self.assertEqual(task.message_ids[2].subtype_id.name, 'Task Created',
'project: message_process: first message of new task should have Task Created subtype')
self.assertEqual(task.message_ids[1].subtype_id.name, 'Task Assigned',
'project: message_process: first message of new task should have Task Created subtype')
self.assertEqual(task.message_ids[0].author_id.id, self.email_partner_id,
'project: message_process: second message should be the one from Agrolait (partner failed)')
self.assertEqual(task.message_ids[0].subject, 'Frogs',
'project: message_process: second message should be the one from Agrolait (subject failed)')
# Test: task content
self.assertEqual(task.name, 'Frogs', 'project_task: name should be the email subject')
self.assertEqual(task.project_id.id, self.project_pigs_id, 'project_task: incorrect project')
self.assertEqual(task.stage_id.sequence, 1, 'project_task: should have a stage with sequence=1')
# Open the delegation wizard
delegate_id = self.project_task_delegate.create(cr, user_projectuser_id, {
'user_id': user_projectuser_id,
'planned_hours': 12.0,
'planned_hours_me': 2.0,
}, {'active_id': task.id})
self.project_task_delegate.delegate(cr, user_projectuser_id, [delegate_id], {'active_id': task.id})
# Check delegation details
task.refresh()
self.assertEqual(task.planned_hours, 2, 'project_task_delegate: planned hours is not correct after delegation')
|
Dark-Hacker/horizon
|
refs/heads/master
|
openstack_dashboard/contrib/sahara/content/data_processing/data_sources/tests.py
|
11
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
import six
from openstack_dashboard.contrib.sahara import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:data_processing.data_sources:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.data_sources:details', args=['id'])
CREATE_URL = reverse(
'horizon:project:data_processing.data_sources:create-data-source')
EDIT_URL = reverse(
'horizon:project:data_processing.data_sources:edit-data-source',
args=['id'])
class DataProcessingDataSourceTests(test.TestCase):
@test.create_stubs({api.sahara: ('data_source_list',)})
def test_index(self):
api.sahara.data_source_list(IsA(http.HttpRequest)) \
.AndReturn(self.data_sources.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(
res, 'project/data_processing.data_sources/data_sources.html')
self.assertContains(res, 'Data Sources')
self.assertContains(res, 'Name')
self.assertContains(res, 'sampleOutput')
self.assertContains(res, 'sampleOutput2')
@test.create_stubs({api.sahara: ('data_source_get',)})
def test_details(self):
api.sahara.data_source_get(IsA(http.HttpRequest), IsA(six.text_type)) \
.MultipleTimes().AndReturn(self.data_sources.first())
self.mox.ReplayAll()
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(
res, 'project/data_processing.data_sources/details.html')
self.assertContains(res, 'sampleOutput')
self.assertContains(res, 'Data Source Details')
@test.create_stubs({api.sahara: ('data_source_list',
'data_source_delete')})
def test_delete(self):
data_source = self.data_sources.first()
api.sahara.data_source_list(IsA(http.HttpRequest)) \
.AndReturn(self.data_sources.list())
api.sahara.data_source_delete(IsA(http.HttpRequest), data_source.id)
self.mox.ReplayAll()
form_data = {'action': 'data_sources__delete__%s' % data_source.id}
res = self.client.post(INDEX_URL, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.assertMessageCount(success=1)
@test.create_stubs({api.sahara: ('data_source_create',)})
def test_create(self):
data_source = self.data_sources.first()
api.sahara.data_source_create(IsA(http.HttpRequest),
data_source.name,
data_source.description,
data_source.type,
data_source.url,
"",
"") \
.AndReturn(self.data_sources.first())
self.mox.ReplayAll()
form_data = {
'data_source_url': data_source.url,
'data_source_name': data_source.name,
'data_source_description': data_source.description,
'data_source_type': data_source.type
}
res = self.client.post(CREATE_URL, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.assertMessageCount(success=1)
@test.create_stubs({api.sahara: ('data_source_update',
'data_source_get',)})
def test_edit(self):
data_source = self.data_sources.first()
api_data = {
'url': data_source.url,
'credentials': {'user': '', 'pass': ''},
'type': data_source.type,
'name': data_source.name,
'description': data_source.description
}
api.sahara.data_source_get(IsA(http.HttpRequest),
IsA(unicode)) \
.AndReturn(self.data_sources.first())
api.sahara.data_source_update(IsA(http.HttpRequest),
IsA(unicode),
api_data) \
.AndReturn(self.data_sources.first())
self.mox.ReplayAll()
form_data = {
'data_source_url': data_source.url,
'data_source_name': data_source.name,
'data_source_description': data_source.description,
'data_source_type': data_source.type
}
res = self.client.post(EDIT_URL, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.assertMessageCount(success=1)
|
ibinti/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyCompatibilityInspection/ellipsisInSubscriptionPy2.py
|
83
|
import numpy
x = numpy.zeros((3, 4, 5))
y = x[..., 0] # pass
y = x[..., 0, :] # pass
|
py-geek/City-Air
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/contrib/contenttypes/views.py
|
115
|
from __future__ import unicode_literals
from django import http
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site, get_current_site
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
def shortcut(request, content_type_id, object_id):
"""
Redirect to an object's page based on a content-type ID and an object ID.
"""
# Look up the object, making sure it's got a get_absolute_url() function.
try:
content_type = ContentType.objects.get(pk=content_type_id)
if not content_type.model_class():
raise http.Http404(_("Content type %(ct_id)s object has no associated model") %
{'ct_id': content_type_id})
obj = content_type.get_object_for_this_type(pk=object_id)
except (ObjectDoesNotExist, ValueError):
raise http.Http404(_("Content type %(ct_id)s object %(obj_id)s doesn't exist") %
{'ct_id': content_type_id, 'obj_id': object_id})
try:
get_absolute_url = obj.get_absolute_url
except AttributeError:
raise http.Http404(_("%(ct_name)s objects don't have a get_absolute_url() method") %
{'ct_name': content_type.name})
absurl = get_absolute_url()
# Try to figure out the object's domain, so we can do a cross-site redirect
# if necessary.
# If the object actually defines a domain, we're done.
if absurl.startswith('http://') or absurl.startswith('https://'):
return http.HttpResponseRedirect(absurl)
# Otherwise, we need to introspect the object's relationships for a
# relation to the Site object
object_domain = None
if Site._meta.installed:
opts = obj._meta
# First, look for an many-to-many relationship to Site.
for field in opts.many_to_many:
if field.rel.to is Site:
try:
# Caveat: In the case of multiple related Sites, this just
# selects the *first* one, which is arbitrary.
object_domain = getattr(obj, field.name).all()[0].domain
except IndexError:
pass
if object_domain is not None:
break
# Next, look for a many-to-one relationship to Site.
if object_domain is None:
for field in obj._meta.fields:
if field.rel and field.rel.to is Site:
try:
object_domain = getattr(obj, field.name).domain
except Site.DoesNotExist:
pass
if object_domain is not None:
break
# Fall back to the current site (if possible).
if object_domain is None:
try:
object_domain = get_current_site(request).domain
except Site.DoesNotExist:
pass
# If all that malarkey found an object domain, use it. Otherwise, fall back
# to whatever get_absolute_url() returned.
if object_domain is not None:
protocol = 'https' if request.is_secure() else 'http'
return http.HttpResponseRedirect('%s://%s%s'
% (protocol, object_domain, absurl))
else:
return http.HttpResponseRedirect(absurl)
|
yuvadm/django-storages
|
refs/heads/master
|
storages/backends/couchdb.py
|
11
|
"""
This is a Custom Storage System for Django with CouchDB backend.
Created by Christian Klein.
(c) Copyright 2009 HUDORA GmbH. All Rights Reserved.
"""
import os
from cStringIO import StringIO
from urlparse import urljoin
from urllib import quote_plus
from django.conf import settings
from django.core.files import File
from django.core.files.storage import Storage
from django.core.exceptions import ImproperlyConfigured
try:
import couchdb
except ImportError:
raise ImproperlyConfigured("Could not load couchdb dependency.\
\nSee http://code.google.com/p/couchdb-python/")
DEFAULT_SERVER= getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984')
STORAGE_OPTIONS= getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {})
class CouchDBStorage(Storage):
"""
CouchDBStorage - a Django Storage class for CouchDB.
The CouchDBStorage can be configured in settings.py, e.g.::
COUCHDB_STORAGE_OPTIONS = {
'server': "http://example.org",
'database': 'database_name'
}
Alternatively, the configuration can be passed as a dictionary.
"""
def __init__(self, **kwargs):
kwargs.update(STORAGE_OPTIONS)
self.base_url = kwargs.get('server', DEFAULT_SERVER)
server = couchdb.client.Server(self.base_url)
self.db = server[kwargs.get('database')]
def _put_file(self, name, content):
self.db[name] = {'size': len(content)}
self.db.put_attachment(self.db[name], content, filename='content')
return name
def get_document(self, name):
return self.db.get(name)
def _open(self, name, mode='rb'):
couchdb_file = CouchDBFile(name, self, mode=mode)
return couchdb_file
def _save(self, name, content):
content.open()
if hasattr(content, 'chunks'):
content_str = ''.join(chunk for chunk in content.chunks())
else:
content_str = content.read()
name = name.replace('/', '-')
return self._put_file(name, content_str)
def exists(self, name):
return name in self.db
def size(self, name):
doc = self.get_document(name)
if doc:
return doc['size']
return 0
def url(self, name):
return urljoin(self.base_url,
os.path.join(quote_plus(self.db.name),
quote_plus(name),
'content'))
def delete(self, name):
try:
del self.db[name]
except couchdb.client.ResourceNotFound:
raise IOError("File not found: %s" % name)
#def listdir(self, name):
# _all_docs?
# pass
class CouchDBFile(File):
"""
CouchDBFile - a Django File-like class for CouchDB documents.
"""
def __init__(self, name, storage, mode):
self._name = name
self._storage = storage
self._mode = mode
self._is_dirty = False
try:
self._doc = self._storage.get_document(name)
tmp, ext = os.path.split(name)
if ext:
filename = "content." + ext
else:
filename = "content"
attachment = self._storage.db.get_attachment(self._doc, filename=filename)
self.file = StringIO(attachment)
except couchdb.client.ResourceNotFound:
if 'r' in self._mode:
raise ValueError("The file cannot be reopened.")
else:
self.file = StringIO()
self._is_dirty = True
@property
def size(self):
return self._doc['size']
def write(self, content):
if 'w' not in self._mode:
raise AttributeError("File was opened for read-only access.")
self.file = StringIO(content)
self._is_dirty = True
def close(self):
if self._is_dirty:
self._storage._put_file(self._name, self.file.getvalue())
self.file.close()
|
drnextgis/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/qgis/Eliminate.py
|
2
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
Eliminate.py
---------------------
Date : August 2012
Copyright : (C) 2013 by Bernhard Ströbl
Email : bernhard.stroebl@jena.de
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
from builtins import range
__author__ = 'Bernhard Ströbl'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Bernhard Ströbl'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtCore import QLocale, QDate, QVariant
from qgis.core import QgsFeatureRequest, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.ProcessingLog import ProcessingLog
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterTableField
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterSelection
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class Eliminate(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
MODE = 'MODE'
KEEPSELECTION = 'KEEPSELECTION'
ATTRIBUTE = 'ATTRIBUTE'
COMPARISONVALUE = 'COMPARISONVALUE'
COMPARISON = 'COMPARISON'
MODE_LARGEST_AREA = 0
MODE_SMALLEST_AREA = 1
MODE_BOUNDARY = 2
def getIcon(self):
return QIcon(os.path.join(pluginPath, 'images', 'ftools', 'eliminate.png'))
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Eliminate sliver polygons')
self.group, self.i18n_group = self.trAlgorithm('Vector geometry tools')
self.modes = [self.tr('Largest area'),
self.tr('Smallest Area'),
self.tr('Largest common boundary')]
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [dataobjects.TYPE_VECTOR_POLYGON]))
self.addParameter(ParameterBoolean(self.KEEPSELECTION,
self.tr('Use current selection in input layer (works only if called from toolbox)'), False))
self.addParameter(ParameterTableField(self.ATTRIBUTE,
self.tr('Selection attribute'), self.INPUT))
self.comparisons = [
'==',
'!=',
'>',
'>=',
'<',
'<=',
'begins with',
'contains',
]
self.addParameter(ParameterSelection(self.COMPARISON,
self.tr('Comparison'), self.comparisons, default=0))
self.addParameter(ParameterString(self.COMPARISONVALUE,
self.tr('Value'), default='0'))
self.addParameter(ParameterSelection(self.MODE,
self.tr('Merge selection with the neighbouring polygon with the'),
self.modes))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Cleaned'), datatype=[dataobjects.TYPE_VECTOR_POLYGON]))
def processAlgorithm(self, progress):
inLayer = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
boundary = self.getParameterValue(self.MODE) == self.MODE_BOUNDARY
smallestArea = self.getParameterValue(self.MODE) == self.MODE_SMALLEST_AREA
keepSelection = self.getParameterValue(self.KEEPSELECTION)
processLayer = vector.duplicateInMemory(inLayer)
if not keepSelection:
# Make a selection with the values provided
attribute = self.getParameterValue(self.ATTRIBUTE)
comparison = self.comparisons[self.getParameterValue(self.COMPARISON)]
comparisonvalue = self.getParameterValue(self.COMPARISONVALUE)
selectindex = vector.resolveFieldIndex(processLayer, attribute)
selectType = processLayer.fields()[selectindex].type()
selectionError = False
if selectType in [QVariant.Int, QVariant.LongLong, QVariant.UInt, QVariant.ULongLong]:
try:
y = int(comparisonvalue)
except ValueError:
selectionError = True
msg = self.tr('Cannot convert "%s" to integer' % str(comparisonvalue))
elif selectType == QVariant.Double:
try:
y = float(comparisonvalue)
except ValueError:
selectionError = True
msg = self.tr('Cannot convert "%s" to float' % str(comparisonvalue))
elif selectType == QVariant.String:
# 10: string, boolean
try:
y = str(comparisonvalue)
except ValueError:
selectionError = True
msg = self.tr('Cannot convert "%s" to unicode' % str(comparisonvalue))
elif selectType == QVariant.Date:
# date
dateAndFormat = comparisonvalue.split(' ')
if len(dateAndFormat) == 1:
# QDate object
y = QLocale.system().toDate(dateAndFormat[0])
if y.isNull():
msg = self.tr('Cannot convert "%s" to date with system date format %s' % (str(dateAndFormat), QLocale.system().dateFormat()))
elif len(dateAndFormat) == 2:
y = QDate.fromString(dateAndFormat[0], dateAndFormat[1])
if y.isNull():
msg = self.tr('Cannot convert "%s" to date with format string "%s"' % (str(dateAndFormat[0]), dateAndFormat[1]))
else:
y = QDate()
msg = ''
if y.isNull():
# Conversion was unsuccessfull
selectionError = True
msg += self.tr('Enter the date and the date format, e.g. "07.26.2011" "MM.dd.yyyy".')
if (comparison == 'begins with' or comparison == 'contains') \
and selectType != QVariant.String:
selectionError = True
msg = self.tr('"%s" can only be used with string fields' % comparison)
selected = []
if selectionError:
raise GeoAlgorithmExecutionException(
self.tr('Error in selection input: %s' % msg))
else:
for feature in processLayer.getFeatures():
aValue = feature.attributes()[selectindex]
if aValue is None:
continue
if selectType in [QVariant.Int, QVariant.LongLong, QVariant.UInt, QVariant.ULongLong]:
x = int(aValue)
elif selectType == QVariant.Double:
x = float(aValue)
elif selectType == QVariant.String:
# 10: string, boolean
x = str(aValue)
elif selectType == QVariant.Date:
# date
x = aValue # should be date
match = False
if comparison == '==':
match = x == y
elif comparison == '!=':
match = x != y
elif comparison == '>':
match = x > y
elif comparison == '>=':
match = x >= y
elif comparison == '<':
match = x < y
elif comparison == '<=':
match = x <= y
elif comparison == 'begins with':
match = x.startswith(y)
elif comparison == 'contains':
match = x.find(y) >= 0
if match:
selected.append(feature.id())
processLayer.selectByIds(selected)
if processLayer.selectedFeatureCount() == 0:
ProcessingLog.addToLog(ProcessingLog.LOG_WARNING,
self.tr('%s: (No selection in input layer "%s")' % (self.commandLineName(), self.getParameterValue(self.INPUT))))
# Keep references to the features to eliminate
featToEliminate = []
for aFeat in processLayer.selectedFeatures():
featToEliminate.append(aFeat)
# Delete all features to eliminate in processLayer (we won't save this)
processLayer.startEditing()
processLayer.deleteSelectedFeatures()
# ANALYZE
if len(featToEliminate) > 0: # Prevent zero division
start = 20.00
add = 80.00 / len(featToEliminate)
else:
start = 100
progress.setPercentage(start)
madeProgress = True
# We go through the list and see if we find any polygons we can
# merge the selected with. If we have no success with some we
# merge and then restart the whole story.
while madeProgress: # Check if we made any progress
madeProgress = False
featNotEliminated = []
# Iterate over the polygons to eliminate
for i in range(len(featToEliminate)):
feat = featToEliminate.pop()
geom2Eliminate = feat.geometry()
bbox = geom2Eliminate.boundingBox()
fit = processLayer.getFeatures(
QgsFeatureRequest().setFilterRect(bbox).setSubsetOfAttributes([]))
mergeWithFid = None
mergeWithGeom = None
max = 0
min = -1
selFeat = QgsFeature()
# use prepared geometries for faster intersection tests
engine = QgsGeometry.createGeometryEngine(geom2Eliminate.geometry())
engine.prepareGeometry()
while fit.nextFeature(selFeat):
selGeom = selFeat.geometry()
if engine.intersects(selGeom.geometry()):
# We have a candidate
iGeom = geom2Eliminate.intersection(selGeom)
if not iGeom:
continue
if boundary:
selValue = iGeom.length()
else:
# area. We need a common boundary in
# order to merge
if 0 < iGeom.length():
selValue = selGeom.area()
else:
selValue = -1
if -1 != selValue:
useThis = True
if smallestArea:
if -1 == min:
min = selValue
else:
if selValue < min:
min = selValue
else:
useThis = False
else:
if selValue > max:
max = selValue
else:
useThis = False
if useThis:
mergeWithFid = selFeat.id()
mergeWithGeom = QgsGeometry(selGeom)
# End while fit
if mergeWithFid is not None:
# A successful candidate
newGeom = mergeWithGeom.combine(geom2Eliminate)
if processLayer.changeGeometry(mergeWithFid, newGeom):
madeProgress = True
else:
raise GeoAlgorithmExecutionException(
self.tr('Could not replace geometry of feature with id %s' % mergeWithFid))
start = start + add
progress.setPercentage(start)
else:
featNotEliminated.append(feat)
# End for featToEliminate
featToEliminate = featNotEliminated
# End while
# Create output
output = self.getOutputFromName(self.OUTPUT)
writer = output.getVectorWriter(processLayer.fields(),
processLayer.wkbType(), processLayer.crs())
# Write all features that are left over to output layer
iterator = processLayer.getFeatures()
for feature in iterator:
writer.addFeature(feature)
# Leave processLayer untouched
processLayer.rollBack()
for feature in featNotEliminated:
writer.addFeature(feature)
|
hahnicity/ucdpv_vent_infrastructure
|
refs/heads/master
|
raspi/scripts/__init__.py
|
4
|
"""
ucdv_vent_infrastructure "Platform for collecting, aggregating, and storing ventilator data"
Copyright (C) 2017 Gregory Rehm
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
|
redhat-openstack/django
|
refs/heads/epel7-patches
|
django/contrib/gis/tests/geo3d/tests.py
|
109
|
from __future__ import absolute_import, unicode_literals
import os
import re
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.tests.utils import postgis
from django.test import TestCase
from django.utils._os import upath
from django.utils.unittest import skipUnless
if HAS_GEOS:
from django.contrib.gis.db.models import Union, Extent3D
from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon
from .models import (City3D, Interstate2D, Interstate3D, InterstateProj2D,
InterstateProj3D, Point2D, Point3D, MultiPoint3D, Polygon2D, Polygon3D)
if HAS_GDAL:
from django.contrib.gis.utils import LayerMapping, LayerMapError
data_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data'))
city_file = os.path.join(data_path, 'cities', 'cities.shp')
vrt_file = os.path.join(data_path, 'test_vrt', 'test_vrt.vrt')
# The coordinates of each city, with Z values corresponding to their
# altitude in meters.
city_data = (
('Houston', (-95.363151, 29.763374, 18)),
('Dallas', (-96.801611, 32.782057, 147)),
('Oklahoma City', (-97.521157, 34.464642, 380)),
('Wellington', (174.783117, -41.315268, 14)),
('Pueblo', (-104.609252, 38.255001, 1433)),
('Lawrence', (-95.235060, 38.971823, 251)),
('Chicago', (-87.650175, 41.850385, 181)),
('Victoria', (-123.305196, 48.462611, 15)),
)
# Reference mapping of city name to its altitude (Z value).
city_dict = dict((name, coords) for name, coords in city_data)
# 3D freeway data derived from the National Elevation Dataset:
# http://seamless.usgs.gov/products/9arc.php
interstate_data = (
('I-45',
'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)',
( 11.339, 4.536, 9.762, 12.448, 10.457, 9.418, 14.858,
15.386, 13.168, 15.104, 16.516, 13.923, 14.385, 15.16 ,
15.544, 14.975, 15.688, 16.099, 15.197, 17.268, 19.857,
15.435),
),
)
# Bounding box polygon for inner-loop of Houston (in projected coordinate
# system 32140), with elevation values from the National Elevation Dataset
# (see above).
bbox_data = (
'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,942051.75 4208366.38,941527.97 4225693.20))',
(21.71, 13.21, 9.12, 16.40, 21.71)
)
@skipUnless(HAS_GEOS and HAS_GDAL and postgis, "Geos, GDAL and postgis are required.")
class Geo3DTest(TestCase):
"""
Only a subset of the PostGIS routines are 3D-enabled, and this TestCase
tries to test the features that can handle 3D and that are also
available within GeoDjango. For more information, see the PostGIS docs
on the routines that support 3D:
http://postgis.refractions.net/documentation/manual-1.4/ch08.html#PostGIS_3D_Functions
"""
def _load_interstate_data(self):
# Interstate (2D / 3D and Geographic/Projected variants)
for name, line, exp_z in interstate_data:
line_3d = GEOSGeometry(line, srid=4269)
line_2d = LineString([l[:2] for l in line_3d.coords], srid=4269)
# Creating a geographic and projected version of the
# interstate in both 2D and 3D.
Interstate3D.objects.create(name=name, line=line_3d)
InterstateProj3D.objects.create(name=name, line=line_3d)
Interstate2D.objects.create(name=name, line=line_2d)
InterstateProj2D.objects.create(name=name, line=line_2d)
def _load_city_data(self):
for name, pnt_data in city_data:
City3D.objects.create(name=name, point=Point(*pnt_data, srid=4326))
def _load_polygon_data(self):
bbox_wkt, bbox_z = bbox_data
bbox_2d = GEOSGeometry(bbox_wkt, srid=32140)
bbox_3d = Polygon(tuple((x, y, z) for (x, y), z in zip(bbox_2d[0].coords, bbox_z)), srid=32140)
Polygon2D.objects.create(name='2D BBox', poly=bbox_2d)
Polygon3D.objects.create(name='3D BBox', poly=bbox_3d)
def test_3d_hasz(self):
"""
Make sure data is 3D and has expected Z values -- shouldn't change
because of coordinate system.
"""
self._load_interstate_data()
for name, line, exp_z in interstate_data:
interstate = Interstate3D.objects.get(name=name)
interstate_proj = InterstateProj3D.objects.get(name=name)
for i in [interstate, interstate_proj]:
self.assertTrue(i.line.hasz)
self.assertEqual(exp_z, tuple(i.line.z))
self._load_city_data()
for name, pnt_data in city_data:
city = City3D.objects.get(name=name)
z = pnt_data[2]
self.assertTrue(city.point.hasz)
self.assertEqual(z, city.point.z)
def test_3d_polygons(self):
"""
Test the creation of polygon 3D models.
"""
self._load_polygon_data()
p3d = Polygon3D.objects.get(name='3D BBox')
self.assertTrue(p3d.poly.hasz)
self.assertIsInstance(p3d.poly, Polygon)
self.assertEqual(p3d.poly.srid, 32140)
def test_3d_layermapping(self):
"""
Testing LayerMapping on 3D models.
"""
point_mapping = {'point' : 'POINT'}
mpoint_mapping = {'mpoint' : 'MULTIPOINT'}
# The VRT is 3D, but should still be able to map sans the Z.
lm = LayerMapping(Point2D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point2D.objects.count())
# The city shapefile is 2D, and won't be able to fill the coordinates
# in the 3D model -- thus, a LayerMapError is raised.
self.assertRaises(LayerMapError, LayerMapping,
Point3D, city_file, point_mapping, transform=False)
# 3D model should take 3D data just fine.
lm = LayerMapping(Point3D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point3D.objects.count())
# Making sure LayerMapping.make_multi works right, by converting
# a Point25D into a MultiPoint25D.
lm = LayerMapping(MultiPoint3D, vrt_file, mpoint_mapping, transform=False)
lm.save()
self.assertEqual(3, MultiPoint3D.objects.count())
def test_kml(self):
"""
Test GeoQuerySet.kml() with Z values.
"""
self._load_city_data()
h = City3D.objects.kml(precision=6).get(name='Houston')
# KML should be 3D.
# `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';`
ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$')
self.assertTrue(ref_kml_regex.match(h.kml))
def test_geojson(self):
"""
Test GeoQuerySet.geojson() with Z values.
"""
self._load_city_data()
h = City3D.objects.geojson(precision=6).get(name='Houston')
# GeoJSON should be 3D
# `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';`
ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$')
self.assertTrue(ref_json_regex.match(h.geojson))
def test_union(self):
"""
Testing the Union aggregate of 3D models.
"""
# PostGIS query that returned the reference EWKT for this test:
# `SELECT ST_AsText(ST_Union(point)) FROM geo3d_city3d;`
self._load_city_data()
ref_ewkt = 'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)'
ref_union = GEOSGeometry(ref_ewkt)
union = City3D.objects.aggregate(Union('point'))['point__union']
self.assertTrue(union.hasz)
self.assertEqual(ref_union, union)
def test_extent(self):
"""
Testing the Extent3D aggregate for 3D models.
"""
self._load_city_data()
# `SELECT ST_Extent3D(point) FROM geo3d_city3d;`
ref_extent3d = (-123.305196, -41.315268, 14,174.783117, 48.462611, 1433)
extent1 = City3D.objects.aggregate(Extent3D('point'))['point__extent3d']
extent2 = City3D.objects.extent3d()
def check_extent3d(extent3d, tol=6):
for ref_val, ext_val in zip(ref_extent3d, extent3d):
self.assertAlmostEqual(ref_val, ext_val, tol)
for e3d in [extent1, extent2]:
check_extent3d(e3d)
def test_perimeter(self):
"""
Testing GeoQuerySet.perimeter() on 3D fields.
"""
self._load_polygon_data()
# Reference query for values below:
# `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;`
ref_perim_3d = 76859.2620451
ref_perim_2d = 76859.2577803
tol = 6
self.assertAlmostEqual(ref_perim_2d,
Polygon2D.objects.perimeter().get(name='2D BBox').perimeter.m,
tol)
self.assertAlmostEqual(ref_perim_3d,
Polygon3D.objects.perimeter().get(name='3D BBox').perimeter.m,
tol)
def test_length(self):
"""
Testing GeoQuerySet.length() on 3D fields.
"""
# ST_Length_Spheroid Z-aware, and thus does not need to use
# a separate function internally.
# `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]')
# FROM geo3d_interstate[2d|3d];`
self._load_interstate_data()
tol = 3
ref_length_2d = 4368.1721949481
ref_length_3d = 4368.62547052088
self.assertAlmostEqual(ref_length_2d,
Interstate2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
Interstate3D.objects.length().get(name='I-45').length.m,
tol)
# Making sure `ST_Length3D` is used on for a projected
# and 3D model rather than `ST_Length`.
# `SELECT ST_Length(line) FROM geo3d_interstateproj2d;`
ref_length_2d = 4367.71564892392
# `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;`
ref_length_3d = 4368.16897234101
self.assertAlmostEqual(ref_length_2d,
InterstateProj2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
InterstateProj3D.objects.length().get(name='I-45').length.m,
tol)
def test_scale(self):
"""
Testing GeoQuerySet.scale() on Z values.
"""
self._load_city_data()
# Mapping of City name to reference Z values.
zscales = (-3, 4, 23)
for zscale in zscales:
for city in City3D.objects.scale(1.0, 1.0, zscale):
self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z)
def test_translate(self):
"""
Testing GeoQuerySet.translate() on Z values.
"""
self._load_city_data()
ztranslations = (5.23, 23, -17)
for ztrans in ztranslations:
for city in City3D.objects.translate(0, 0, ztrans):
self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
|
proxysh/Safejumper-for-Mac
|
refs/heads/master
|
buildmac/Resources/env/lib/python2.7/site-packages/twisted/internet/test/test_threads.py
|
13
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for implementations of L{IReactorThreads}.
"""
from __future__ import division, absolute_import
__metaclass__ = type
from weakref import ref
import gc, threading
from twisted.python.threadable import isInIOThread
from twisted.internet.test.reactormixins import ReactorBuilder
from twisted.python.threadpool import ThreadPool
from twisted.internet.interfaces import IReactorThreads
class ThreadTestsBuilder(ReactorBuilder):
"""
Builder for defining tests relating to L{IReactorThreads}.
"""
requiredInterfaces = (IReactorThreads,)
def test_getThreadPool(self):
"""
C{reactor.getThreadPool()} returns an instance of L{ThreadPool} which
starts when C{reactor.run()} is called and stops before it returns.
"""
state = []
reactor = self.buildReactor()
pool = reactor.getThreadPool()
self.assertIsInstance(pool, ThreadPool)
self.assertFalse(
pool.started, "Pool should not start before reactor.run")
def f():
# Record the state for later assertions
state.append(pool.started)
state.append(pool.joined)
reactor.stop()
reactor.callWhenRunning(f)
self.runReactor(reactor, 2)
self.assertTrue(
state[0], "Pool should start after reactor.run")
self.assertFalse(
state[1], "Pool should not be joined before reactor.stop")
self.assertTrue(
pool.joined,
"Pool should be stopped after reactor.run returns")
def test_suggestThreadPoolSize(self):
"""
C{reactor.suggestThreadPoolSize()} sets the maximum size of the reactor
threadpool.
"""
reactor = self.buildReactor()
reactor.suggestThreadPoolSize(17)
pool = reactor.getThreadPool()
self.assertEqual(pool.max, 17)
def test_delayedCallFromThread(self):
"""
A function scheduled with L{IReactorThreads.callFromThread} invoked
from a delayed call is run immediately in the next reactor iteration.
When invoked from the reactor thread, previous implementations of
L{IReactorThreads.callFromThread} would skip the pipe/socket based wake
up step, assuming the reactor would wake up on its own. However, this
resulted in the reactor not noticing an insert into the thread queue at
the right time (in this case, after the thread queue has been processed
for that reactor iteration).
"""
reactor = self.buildReactor()
def threadCall():
reactor.stop()
# Set up the use of callFromThread being tested.
reactor.callLater(0, reactor.callFromThread, threadCall)
before = reactor.seconds()
self.runReactor(reactor, 60)
after = reactor.seconds()
# We specified a timeout of 60 seconds. The timeout code in runReactor
# probably won't actually work, though. If the reactor comes out of
# the event notification API just a little bit early, say after 59.9999
# seconds instead of after 60 seconds, then the queued thread call will
# get processed but the timeout delayed call runReactor sets up won't!
# Then the reactor will stop and runReactor will return without the
# timeout firing. As it turns out, select() and poll() are quite
# likely to return *slightly* earlier than we ask them to, so the
# timeout will rarely happen, even if callFromThread is broken. So,
# instead we'll measure the elapsed time and make sure it's something
# less than about half of the timeout we specified. This is heuristic.
# It assumes that select() won't ever return after 30 seconds when we
# asked it to timeout after 60 seconds. And of course like all
# time-based tests, it's slightly non-deterministic. If the OS doesn't
# schedule this process for 30 seconds, then the test might fail even
# if callFromThread is working.
self.assertTrue(after - before < 30)
def test_callFromThread(self):
"""
A function scheduled with L{IReactorThreads.callFromThread} invoked
from another thread is run in the reactor thread.
"""
reactor = self.buildReactor()
result = []
def threadCall():
result.append(threading.currentThread())
reactor.stop()
reactor.callLater(0, reactor.callInThread,
reactor.callFromThread, threadCall)
self.runReactor(reactor, 5)
self.assertEqual(result, [threading.currentThread()])
def test_stopThreadPool(self):
"""
When the reactor stops, L{ReactorBase._stopThreadPool} drops the
reactor's direct reference to its internal threadpool and removes
the associated startup and shutdown triggers.
This is the case of the thread pool being created before the reactor
is run.
"""
reactor = self.buildReactor()
threadpool = ref(reactor.getThreadPool())
reactor.callWhenRunning(reactor.stop)
self.runReactor(reactor)
gc.collect()
self.assertIsNone(threadpool())
def test_stopThreadPoolWhenStartedAfterReactorRan(self):
"""
We must handle the case of shutting down the thread pool when it was
started after the reactor was run in a special way.
Some implementation background: The thread pool is started with
callWhenRunning, which only returns a system trigger ID when it is
invoked before the reactor is started.
This is the case of the thread pool being created after the reactor
is started.
"""
reactor = self.buildReactor()
threadPoolRefs = []
def acquireThreadPool():
threadPoolRefs.append(ref(reactor.getThreadPool()))
reactor.stop()
reactor.callWhenRunning(acquireThreadPool)
self.runReactor(reactor)
gc.collect()
self.assertIsNone(threadPoolRefs[0]())
def test_cleanUpThreadPoolEvenBeforeReactorIsRun(self):
"""
When the reactor has its shutdown event fired before it is run, the
thread pool is completely destroyed.
For what it's worth, the reason we support this behavior at all is
because Trial does this.
This is the case of the thread pool being created without the reactor
being started at al.
"""
reactor = self.buildReactor()
threadPoolRef = ref(reactor.getThreadPool())
reactor.fireSystemEvent("shutdown")
if reactor.__class__.__name__ == "AsyncioSelectorReactor":
self.assertIsNone(reactor.threadpool)
else:
gc.collect()
self.assertIsNone(threadPoolRef())
def test_isInIOThread(self):
"""
The reactor registers itself as the I/O thread when it runs so that
L{twisted.python.threadable.isInIOThread} returns C{True} if it is
called in the thread the reactor is running in.
"""
results = []
reactor = self.buildReactor()
def check():
results.append(isInIOThread())
reactor.stop()
reactor.callWhenRunning(check)
self.runReactor(reactor)
self.assertEqual([True], results)
def test_isNotInIOThread(self):
"""
The reactor registers itself as the I/O thread when it runs so that
L{twisted.python.threadable.isInIOThread} returns C{False} if it is
called in a different thread than the reactor is running in.
"""
results = []
reactor = self.buildReactor()
def check():
results.append(isInIOThread())
reactor.callFromThread(reactor.stop)
reactor.callInThread(check)
self.runReactor(reactor)
self.assertEqual([False], results)
globals().update(ThreadTestsBuilder.makeTestCaseClasses())
|
ruben2020/codequery
|
refs/heads/master
|
scintilla/scripts/Face.py
|
4
|
# Face.py - module for reading and parsing Scintilla.iface file
# Implemented 2000 by Neil Hodgson neilh@scintilla.org
# Released to the public domain.
# Requires Python 2.5 or later
def sanitiseLine(line):
if line[-1:] == '\n': line = line[:-1]
if line.find("##") != -1:
line = line[:line.find("##")]
line = line.strip()
return line
def decodeFunction(featureVal):
retType, rest = featureVal.split(" ", 1)
nameIdent, params = rest.split("(")
name, value = nameIdent.split("=")
params, rest = params.split(")")
param1, param2 = params.split(",")
return retType, name, value, param1, param2
def decodeEvent(featureVal):
retType, rest = featureVal.split(" ", 1)
nameIdent, params = rest.split("(")
name, value = nameIdent.split("=")
return retType, name, value
def decodeParam(p):
param = p.strip()
type = ""
name = ""
value = ""
if " " in param:
type, nv = param.split(" ")
if "=" in nv:
name, value = nv.split("=")
else:
name = nv
return type, name, value
def IsEnumeration(t):
return t[:1].isupper()
class Face:
def __init__(self):
self.order = []
self.features = {}
self.values = {}
self.events = {}
self.aliases = {}
def ReadFromFile(self, name):
currentCategory = ""
currentComment = []
currentCommentFinished = 0
file = open(name)
for line in file.readlines():
line = sanitiseLine(line)
if line:
if line[0] == "#":
if line[1] == " ":
if currentCommentFinished:
currentComment = []
currentCommentFinished = 0
currentComment.append(line[2:])
else:
currentCommentFinished = 1
featureType, featureVal = line.split(" ", 1)
if featureType in ["fun", "get", "set"]:
try:
retType, name, value, param1, param2 = decodeFunction(featureVal)
except ValueError:
print("Failed to decode %s" % line)
raise
p1 = decodeParam(param1)
p2 = decodeParam(param2)
self.features[name] = {
"FeatureType": featureType,
"ReturnType": retType,
"Value": value,
"Param1Type": p1[0], "Param1Name": p1[1], "Param1Value": p1[2],
"Param2Type": p2[0], "Param2Name": p2[1], "Param2Value": p2[2],
"Category": currentCategory, "Comment": currentComment
}
if value in self.values:
raise Exception("Duplicate value " + value + " " + name)
self.values[value] = 1
self.order.append(name)
currentComment = []
elif featureType == "evt":
retType, name, value = decodeEvent(featureVal)
self.features[name] = {
"FeatureType": featureType,
"ReturnType": retType,
"Value": value,
"Category": currentCategory, "Comment": currentComment
}
if value in self.events:
raise Exception("Duplicate event " + value + " " + name)
self.events[value] = 1
self.order.append(name)
elif featureType == "cat":
currentCategory = featureVal
elif featureType == "val":
try:
name, value = featureVal.split("=", 1)
except ValueError:
print("Failure %s" % featureVal)
raise Exception()
self.features[name] = {
"FeatureType": featureType,
"Category": currentCategory,
"Value": value }
self.order.append(name)
elif featureType == "enu" or featureType == "lex":
name, value = featureVal.split("=", 1)
self.features[name] = {
"FeatureType": featureType,
"Category": currentCategory,
"Value": value,
"Comment": currentComment }
self.order.append(name)
currentComment = []
elif featureType == "ali":
# Enumeration alias
name, value = featureVal.split("=", 1)
self.aliases[name] = value
currentComment = []
|
CameronLonsdale/sec-tools
|
refs/heads/master
|
python2/lib/python2.7/site-packages/setuptools/namespaces.py
|
31
|
import os
from distutils import log
import itertools
from six.moves import map
flatten = itertools.chain.from_iterable
class Installer:
nspkg_ext = '-nspkg.pth'
def install_namespaces(self):
nsp = self._get_all_ns_packages()
if not nsp:
return
filename, ext = os.path.splitext(self._get_target())
filename += self.nspkg_ext
self.outputs.append(filename)
log.info("Installing %s", filename)
lines = map(self._gen_nspkg_line, nsp)
if self.dry_run:
# always generate the lines, even in dry run
list(lines)
return
with open(filename, 'wt') as f:
f.writelines(lines)
def uninstall_namespaces(self):
filename, ext = os.path.splitext(self._get_target())
filename += self.nspkg_ext
if not os.path.exists(filename):
return
log.info("Removing %s", filename)
os.remove(filename)
def _get_target(self):
return self.target
_nspkg_tmpl = (
"import sys, types, os",
"has_mfs = sys.version_info > (3, 5)",
"p = os.path.join(%(root)s, *%(pth)r)",
"importlib = has_mfs and __import__('importlib.util')",
"has_mfs and __import__('importlib.machinery')",
"m = has_mfs and "
"sys.modules.setdefault(%(pkg)r, "
"importlib.util.module_from_spec("
"importlib.machinery.PathFinder.find_spec(%(pkg)r, "
"[os.path.dirname(p)])))",
"m = m or not has_mfs and "
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)",
)
"lines for the namespace installer"
_nspkg_tmpl_multi = (
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
)
"additional line(s) when a parent package is indicated"
def _get_root(self):
return "sys._getframe(1).f_locals['sitedir']"
def _gen_nspkg_line(self, pkg):
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.'))
root = self._get_root()
tmpl_lines = self._nspkg_tmpl
parent, sep, child = pkg.rpartition('.')
if parent:
tmpl_lines += self._nspkg_tmpl_multi
return ';'.join(tmpl_lines) % locals() + '\n'
def _get_all_ns_packages(self):
"""Return sorted list of all package namespaces"""
pkgs = self.distribution.namespace_packages or []
return sorted(flatten(map(self._pkg_names, pkgs)))
@staticmethod
def _pkg_names(pkg):
"""
Given a namespace package, yield the components of that
package.
>>> names = Installer._pkg_names('a.b.c')
>>> set(names) == set(['a', 'a.b', 'a.b.c'])
True
"""
parts = pkg.split('.')
while parts:
yield '.'.join(parts)
parts.pop()
class DevelopInstaller(Installer):
def _get_root(self):
return repr(str(self.egg_path))
def _get_target(self):
return self.egg_link
|
nordaux/tornado
|
refs/heads/master
|
tornado/test/escape_test.py
|
38
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
import tornado.escape
from tornado.escape import utf8, xhtml_escape, xhtml_unescape, url_escape, url_unescape, to_unicode, json_decode, json_encode, squeeze, recursive_unicode
from tornado.util import u, unicode_type
from tornado.test.util import unittest
linkify_tests = [
# (input, linkify_kwargs, expected_output)
("hello http://world.com/!", {},
u('hello <a href="http://world.com/">http://world.com/</a>!')),
("hello http://world.com/with?param=true&stuff=yes", {},
u('hello <a href="http://world.com/with?param=true&stuff=yes">http://world.com/with?param=true&stuff=yes</a>')),
# an opened paren followed by many chars killed Gruber's regex
("http://url.com/w(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", {},
u('<a href="http://url.com/w">http://url.com/w</a>(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')),
# as did too many dots at the end
("http://url.com/withmany.......................................", {},
u('<a href="http://url.com/withmany">http://url.com/withmany</a>.......................................')),
("http://url.com/withmany((((((((((((((((((((((((((((((((((a)", {},
u('<a href="http://url.com/withmany">http://url.com/withmany</a>((((((((((((((((((((((((((((((((((a)')),
# some examples from http://daringfireball.net/2009/11/liberal_regex_for_matching_urls
# plus a fex extras (such as multiple parentheses).
("http://foo.com/blah_blah", {},
u('<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>')),
("http://foo.com/blah_blah/", {},
u('<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>')),
("(Something like http://foo.com/blah_blah)", {},
u('(Something like <a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>)')),
("http://foo.com/blah_blah_(wikipedia)", {},
u('<a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>')),
("http://foo.com/blah_(blah)_(wikipedia)_blah", {},
u('<a href="http://foo.com/blah_(blah)_(wikipedia)_blah">http://foo.com/blah_(blah)_(wikipedia)_blah</a>')),
("(Something like http://foo.com/blah_blah_(wikipedia))", {},
u('(Something like <a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>)')),
("http://foo.com/blah_blah.", {},
u('<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>.')),
("http://foo.com/blah_blah/.", {},
u('<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>.')),
("<http://foo.com/blah_blah>", {},
u('<<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>>')),
("<http://foo.com/blah_blah/>", {},
u('<<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>>')),
("http://foo.com/blah_blah,", {},
u('<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>,')),
("http://www.example.com/wpstyle/?p=364.", {},
u('<a href="http://www.example.com/wpstyle/?p=364">http://www.example.com/wpstyle/?p=364</a>.')),
("rdar://1234",
{"permitted_protocols": ["http", "rdar"]},
u('<a href="rdar://1234">rdar://1234</a>')),
("rdar:/1234",
{"permitted_protocols": ["rdar"]},
u('<a href="rdar:/1234">rdar:/1234</a>')),
("http://userid:password@example.com:8080", {},
u('<a href="http://userid:password@example.com:8080">http://userid:password@example.com:8080</a>')),
("http://userid@example.com", {},
u('<a href="http://userid@example.com">http://userid@example.com</a>')),
("http://userid@example.com:8080", {},
u('<a href="http://userid@example.com:8080">http://userid@example.com:8080</a>')),
("http://userid:password@example.com", {},
u('<a href="http://userid:password@example.com">http://userid:password@example.com</a>')),
("message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e",
{"permitted_protocols": ["http", "message"]},
u('<a href="message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e">message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e</a>')),
(u("http://\u27a1.ws/\u4a39"), {},
u('<a href="http://\u27a1.ws/\u4a39">http://\u27a1.ws/\u4a39</a>')),
("<tag>http://example.com</tag>", {},
u('<tag><a href="http://example.com">http://example.com</a></tag>')),
("Just a www.example.com link.", {},
u('Just a <a href="http://www.example.com">www.example.com</a> link.')),
("Just a www.example.com link.",
{"require_protocol": True},
u('Just a www.example.com link.')),
("A http://reallylong.com/link/that/exceedsthelenglimit.html",
{"require_protocol": True, "shorten": True},
u('A <a href="http://reallylong.com/link/that/exceedsthelenglimit.html" title="http://reallylong.com/link/that/exceedsthelenglimit.html">http://reallylong.com/link...</a>')),
("A http://reallylongdomainnamethatwillbetoolong.com/hi!",
{"shorten": True},
u('A <a href="http://reallylongdomainnamethatwillbetoolong.com/hi" title="http://reallylongdomainnamethatwillbetoolong.com/hi">http://reallylongdomainnametha...</a>!')),
("A file:///passwords.txt and http://web.com link", {},
u('A file:///passwords.txt and <a href="http://web.com">http://web.com</a> link')),
("A file:///passwords.txt and http://web.com link",
{"permitted_protocols": ["file"]},
u('A <a href="file:///passwords.txt">file:///passwords.txt</a> and http://web.com link')),
("www.external-link.com",
{"extra_params": 'rel="nofollow" class="external"'},
u('<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>')),
("www.external-link.com and www.internal-link.com/blogs extra",
{"extra_params": lambda href: 'class="internal"' if href.startswith("http://www.internal-link.com") else 'rel="nofollow" class="external"'},
u('<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a> and <a href="http://www.internal-link.com/blogs" class="internal">www.internal-link.com/blogs</a> extra')),
("www.external-link.com",
{"extra_params": lambda href: ' rel="nofollow" class="external" '},
u('<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>')),
]
class EscapeTestCase(unittest.TestCase):
def test_linkify(self):
for text, kwargs, html in linkify_tests:
linked = tornado.escape.linkify(text, **kwargs)
self.assertEqual(linked, html)
def test_xhtml_escape(self):
tests = [
("<foo>", "<foo>"),
(u("<foo>"), u("<foo>")),
(b"<foo>", b"<foo>"),
("<>&\"'", "<>&"'"),
("&", "&amp;"),
(u("<\u00e9>"), u("<\u00e9>")),
(b"<\xc3\xa9>", b"<\xc3\xa9>"),
]
for unescaped, escaped in tests:
self.assertEqual(utf8(xhtml_escape(unescaped)), utf8(escaped))
self.assertEqual(utf8(unescaped), utf8(xhtml_unescape(escaped)))
def test_url_escape_unicode(self):
tests = [
# byte strings are passed through as-is
(u('\u00e9').encode('utf8'), '%C3%A9'),
(u('\u00e9').encode('latin1'), '%E9'),
# unicode strings become utf8
(u('\u00e9'), '%C3%A9'),
]
for unescaped, escaped in tests:
self.assertEqual(url_escape(unescaped), escaped)
def test_url_unescape_unicode(self):
tests = [
('%C3%A9', u('\u00e9'), 'utf8'),
('%C3%A9', u('\u00c3\u00a9'), 'latin1'),
('%C3%A9', utf8(u('\u00e9')), None),
]
for escaped, unescaped, encoding in tests:
# input strings to url_unescape should only contain ascii
# characters, but make sure the function accepts both byte
# and unicode strings.
self.assertEqual(url_unescape(to_unicode(escaped), encoding), unescaped)
self.assertEqual(url_unescape(utf8(escaped), encoding), unescaped)
def test_url_escape_quote_plus(self):
unescaped = '+ #%'
plus_escaped = '%2B+%23%25'
escaped = '%2B%20%23%25'
self.assertEqual(url_escape(unescaped), plus_escaped)
self.assertEqual(url_escape(unescaped, plus=False), escaped)
self.assertEqual(url_unescape(plus_escaped), unescaped)
self.assertEqual(url_unescape(escaped, plus=False), unescaped)
self.assertEqual(url_unescape(plus_escaped, encoding=None),
utf8(unescaped))
self.assertEqual(url_unescape(escaped, encoding=None, plus=False),
utf8(unescaped))
def test_escape_return_types(self):
# On python2 the escape methods should generally return the same
# type as their argument
self.assertEqual(type(xhtml_escape("foo")), str)
self.assertEqual(type(xhtml_escape(u("foo"))), unicode_type)
def test_json_decode(self):
# json_decode accepts both bytes and unicode, but strings it returns
# are always unicode.
self.assertEqual(json_decode(b'"foo"'), u("foo"))
self.assertEqual(json_decode(u('"foo"')), u("foo"))
# Non-ascii bytes are interpreted as utf8
self.assertEqual(json_decode(utf8(u('"\u00e9"'))), u("\u00e9"))
def test_json_encode(self):
# json deals with strings, not bytes. On python 2 byte strings will
# convert automatically if they are utf8; on python 3 byte strings
# are not allowed.
self.assertEqual(json_decode(json_encode(u("\u00e9"))), u("\u00e9"))
if bytes is str:
self.assertEqual(json_decode(json_encode(utf8(u("\u00e9")))), u("\u00e9"))
self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9")
def test_squeeze(self):
self.assertEqual(squeeze(u('sequences of whitespace chars'))
, u('sequences of whitespace chars'))
def test_recursive_unicode(self):
tests = {
'dict': {b"foo": b"bar"},
'list': [b"foo", b"bar"],
'tuple': (b"foo", b"bar"),
'bytes': b"foo"
}
self.assertEqual(recursive_unicode(tests['dict']), {u("foo"): u("bar")})
self.assertEqual(recursive_unicode(tests['list']), [u("foo"), u("bar")])
self.assertEqual(recursive_unicode(tests['tuple']), (u("foo"), u("bar")))
self.assertEqual(recursive_unicode(tests['bytes']), u("foo"))
|
elkingtowa/pyrake
|
refs/heads/master
|
tests/test_logformatter.py
|
1
|
import unittest
from pyrake.spider import Spider
from pyrake.http import Request, Response
from pyrake.item import Item, Field
from pyrake.logformatter import LogFormatter
class CustomItem(Item):
name = Field()
def __str__(self):
return "name: %s" % self['name']
class LoggingContribTest(unittest.TestCase):
def setUp(self):
self.formatter = LogFormatter()
self.spider = Spider('default')
def test_crawled(self):
req = Request("http://www.example.com")
res = Response("http://www.example.com")
logkws = self.formatter.crawled(req, res, self.spider)
logline = logkws['format'] % logkws
self.assertEqual(logline,
"Crawled (200) <GET http://www.example.com> (referer: None)")
req = Request("http://www.example.com", headers={'referer': 'http://example.com'})
res = Response("http://www.example.com", flags=['cached'])
logkws = self.formatter.crawled(req, res, self.spider)
logline = logkws['format'] % logkws
self.assertEqual(logline,
"Crawled (200) <GET http://www.example.com> (referer: http://example.com) ['cached']")
def test_dropped(self):
item = {}
exception = Exception(u"\u2018")
response = Response("http://www.example.com")
logkws = self.formatter.dropped(item, exception, response, self.spider)
logline = logkws['format'] % logkws
lines = logline.splitlines()
assert all(isinstance(x, unicode) for x in lines)
self.assertEqual(lines, [u"Dropped: \u2018", '{}'])
def test_scraped(self):
item = CustomItem()
item['name'] = u'\xa3'
response = Response("http://www.example.com")
logkws = self.formatter.scraped(item, response, self.spider)
logline = logkws['format'] % logkws
lines = logline.splitlines()
assert all(isinstance(x, unicode) for x in lines)
self.assertEqual(lines, [u"Scraped from <200 http://www.example.com>", u'name: \xa3'])
if __name__ == "__main__":
unittest.main()
|
pichuang/ryu
|
refs/heads/master
|
ryu/ofproto/oxx_fields.py
|
10
|
# Copyright (C) 2015 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# there are two representations of value and mask this module deal with.
#
# "user"
# (value, mask) or value. the latter means no mask.
# value and mask are strings.
#
# "internal"
# value and mask are on-wire bytes.
# mask is None if no mask.
import six
import struct
from ryu.ofproto import ofproto_common
from ryu.lib.pack_utils import msg_pack_into
from ryu.lib import type_desc
if six.PY3:
_ord = int
else:
_ord = ord
# 'OFPXXC_EXPERIMENTER' has not corresponding field in the specification.
# This is transparently value for Experimenter class ID for OXM/OXS.
OFPXXC_EXPERIMENTER = 0xffff
def _get_field_info_by_name(oxx, name_to_field, name):
try:
f = name_to_field[name]
t = f.type
num = f.num
except KeyError:
t = type_desc.UnknownType
if name.startswith('field_'):
num = int(name.split('_')[1])
else:
raise KeyError('unknown %s field: %s' % (oxx.upper(), name))
return num, t
def _from_user_header(oxx, name_to_field, name):
(num, t) = _get_field_info_by_name(oxx, name_to_field, name)
return num
def _from_user(oxx, name_to_field, name, user_value):
(num, t) = _get_field_info_by_name(oxx, name_to_field, name)
# the 'list' case below is a bit hack; json.dumps silently maps
# python tuples into json lists.
if oxx == 'oxm' and isinstance(user_value, (tuple, list)):
(value, mask) = user_value
else:
value = user_value
mask = None
if value is not None:
value = t.from_user(value)
if mask is not None:
mask = t.from_user(mask)
return num, value, mask
def _get_field_info_by_number(oxx, num_to_field, n):
try:
f = num_to_field[n]
t = f.type
name = f.name
except KeyError:
t = type_desc.UnknownType
if isinstance(n, six.integer_types):
name = 'field_%d' % (n,)
else:
raise KeyError('unknown %s field number: %s' % (oxx.upper(), n))
return name, t
def _to_user_header(oxx, num_to_field, n):
(name, t) = _get_field_info_by_number(oxx, num_to_field, n)
return name
def _to_user(oxx, num_to_field, n, v, m):
(name, t) = _get_field_info_by_number(oxx, num_to_field, n)
if v is not None:
if isinstance(v, (tuple, list)):
v_len = len(v) * len(v[0])
else:
v_len = len(v)
if hasattr(t, 'size') and t.size != v_len:
raise Exception(
'Unexpected %s payload length %d for %s (expected %d)'
% (oxx.upper(), v_len, name, t.size))
value = t.to_user(v)
else:
value = None
if m is None:
user_value = value
else:
user_value = (value, t.to_user(m))
return name, user_value
def _field_desc(num_to_field, n):
return num_to_field[n]
def _normalize_user(oxx, mod, k, uv):
try:
from_user = getattr(mod, oxx + '_from_user')
(n, v, m) = from_user(k, uv)
except:
return (k, uv)
# apply mask
if m is not None:
v = b''.join(six.int2byte(_ord(x) & _ord(y)) for (x, y) in zip(v, m))
try:
to_user = getattr(mod, oxx + '_to_user')
(k2, uv2) = to_user(n, v, m)
except:
return (k, uv)
assert k2 == k
return (k2, uv2)
def _parse_header_impl(mod, buf, offset):
hdr_pack_str = '!I'
(header, ) = struct.unpack_from(hdr_pack_str, buf, offset)
hdr_len = struct.calcsize(hdr_pack_str)
oxx_type = header >> 9 # class|field
oxm_hasmask = mod.oxm_tlv_header_extract_hasmask(header)
oxx_class = oxx_type >> 7
oxx_length = header & 0xff
if oxx_class == OFPXXC_EXPERIMENTER:
# Experimenter OXMs/OXSs have 64-bit header.
# (vs 32-bit for other OXMs/OXSs)
exp_hdr_pack_str = '!I' # experimenter_id
(exp_id, ) = struct.unpack_from(exp_hdr_pack_str, buf,
offset + hdr_len)
exp_hdr_len = struct.calcsize(exp_hdr_pack_str)
assert exp_hdr_len == 4
oxx_field = oxx_type & 0x7f
if exp_id == ofproto_common.ONF_EXPERIMENTER_ID and oxx_field == 0:
# XXX
# This block implements EXT-256 style experimenter OXM.
onf_exp_type_pack_str = '!H'
(exp_type, ) = struct.unpack_from(onf_exp_type_pack_str, buf,
offset + hdr_len + exp_hdr_len)
exp_hdr_len += struct.calcsize(onf_exp_type_pack_str)
assert exp_hdr_len == 4 + 2
num = (exp_id, exp_type)
else:
num = (exp_id, oxx_type)
else:
num = oxx_type
exp_hdr_len = 0
value_len = oxx_length - exp_hdr_len
if oxm_hasmask:
value_len //= 2
assert value_len > 0
field_len = hdr_len + oxx_length
total_hdr_len = hdr_len + exp_hdr_len
return num, total_hdr_len, oxm_hasmask, value_len, field_len
def _parse_header(mod, buf, offset):
(oxx_type_num, total_hdr_len, hasmask, value_len,
field_len) = _parse_header_impl(mod, buf, offset)
return oxx_type_num, field_len - value_len
def _parse(mod, buf, offset):
(oxx_type_num, total_hdr_len, hasmask, value_len,
field_len) = _parse_header_impl(mod, buf, offset)
# Note: OXM/OXS payload length (oxx_len) includes Experimenter ID
# (exp_hdr_len) for experimenter OXMs/OXSs.
value_offset = offset + total_hdr_len
value_pack_str = '!%ds' % value_len
assert struct.calcsize(value_pack_str) == value_len
(value, ) = struct.unpack_from(value_pack_str, buf, value_offset)
if hasmask:
(mask, ) = struct.unpack_from(value_pack_str, buf,
value_offset + value_len)
else:
mask = None
return oxx_type_num, value, mask, field_len
def _make_exp_hdr(oxx, mod, n):
exp_hdr = bytearray()
try:
get_desc = getattr(mod, '_' + oxx + '_field_desc')
desc = get_desc(n)
except KeyError:
return n, exp_hdr
if desc._class == OFPXXC_EXPERIMENTER:
(exp_id, exp_type) = n
assert desc.experimenter_id == exp_id
oxx_type = getattr(desc, oxx + '_type')
if desc.exp_type == 2560:
# XXX
# This block implements EXT-256 style experimenter OXM.
exp_hdr_pack_str = '!IH' # experimenter_id, exp_type
msg_pack_into(exp_hdr_pack_str, exp_hdr, 0,
desc.experimenter_id, desc.exp_type)
else:
assert oxx_type == exp_type | (OFPXXC_EXPERIMENTER << 7)
exp_hdr_pack_str = '!I' # experimenter_id
msg_pack_into(exp_hdr_pack_str, exp_hdr, 0,
desc.experimenter_id)
assert len(exp_hdr) == struct.calcsize(exp_hdr_pack_str)
n = oxx_type
assert (n >> 7) == OFPXXC_EXPERIMENTER
return n, exp_hdr
def _serialize_header(oxx, mod, n, buf, offset):
try:
get_desc = getattr(mod, '_' + oxx + '_field_desc')
desc = get_desc(n)
value_len = desc.type.size
except KeyError:
value_len = 0
n, exp_hdr = _make_exp_hdr(oxx, mod, n)
exp_hdr_len = len(exp_hdr)
pack_str = "!I%ds" % (exp_hdr_len,)
msg_pack_into(pack_str, buf, offset,
(n << 9) | (0 << 8) | (exp_hdr_len + value_len),
bytes(exp_hdr))
return struct.calcsize(pack_str)
def _serialize(oxx, mod, n, value, mask, buf, offset):
n, exp_hdr = _make_exp_hdr(oxx, mod, n)
exp_hdr_len = len(exp_hdr)
value_len = len(value)
if mask:
assert value_len == len(mask)
pack_str = "!I%ds%ds%ds" % (exp_hdr_len, value_len, len(mask))
msg_pack_into(pack_str, buf, offset,
(n << 9) | (1 << 8) | (exp_hdr_len + value_len * 2),
bytes(exp_hdr), value, mask)
else:
pack_str = "!I%ds%ds" % (exp_hdr_len, value_len,)
msg_pack_into(pack_str, buf, offset,
(n << 9) | (0 << 8) | (exp_hdr_len + value_len),
bytes(exp_hdr), value)
return struct.calcsize(pack_str)
|
wallnerryan/flocker-profiles
|
refs/heads/profile_metadata
|
flocker/provision/_libcloud.py
|
8
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Helpers for using libcloud.
"""
from zope.interface import (
Attribute as InterfaceAttribute, Interface, implementer)
from characteristic import attributes, Attribute
from twisted.conch.ssh.keys import Key
from flocker.provision._ssh import run_remotely, run_from_args
def get_size(driver, size_id):
"""
Return a ``NodeSize`` corresponding to a given id.
:param driver: The libcloud driver to query for sizes.
"""
try:
return [s for s in driver.list_sizes() if s.id == size_id][0]
except IndexError:
raise ValueError("Unknown size.", size_id)
def get_image(driver, image_name):
"""
Return a ``NodeImage`` corresponding to a given name of size.
:param driver: The libcloud driver to query for images.
"""
try:
return [s for s in driver.list_images() if s.name == image_name][0]
except IndexError:
raise ValueError("Unknown image.", image_name)
class INode(Interface):
"""
Interface for node for running acceptance tests.
"""
address = InterfaceAttribute('Public IP address for node')
private_address = InterfaceAttribute('Private IP address for node')
distribution = InterfaceAttribute('distribution on node')
def get_default_username():
"""
Return the username available by default on a system.
Some cloud systems (e.g. AWS) provide a specific username, which
depends on the OS distribution started. This method returns
the username based on the node distribution.
"""
def provision(package_source, variants):
"""
Provision flocker on this node.
:param PackageSource package_source: The source from which to install
flocker.
:param set variants: The set of variant configurations to use when
provisioning
"""
@implementer(INode)
@attributes([
# _node gets updated, so we can't make this immutable.
Attribute('_node'),
Attribute('_provisioner'),
'address',
'private_address',
'distribution',
])
class LibcloudNode(object):
"""
A node created with libcloud.
:ivar Node _node: The libcloud node object.
:ivar LibcloudProvisioner _provisioner: The provisioner that created this
node.
:ivar bytes address: The IP address of the node.
:ivar str distribution: The distribution installed on the node.
:ivar bytes name: The name of the node.
"""
def destroy(self):
"""
Destroy the node.
"""
self._node.destroy()
def reboot(self):
"""
Reboot the node.
:return Effect:
"""
def do_reboot(_):
self._node.reboot()
self._node, self.addresses = (
self._node.driver.wait_until_running(
[self._node], wait_period=15)[0])
return
return run_remotely(
username="root",
address=self.address,
commands=run_from_args(["sync"])
).on(success=do_reboot)
def get_default_username(self):
"""
Return the default username on this provisioner.
"""
return self._provisioner.get_default_user(self.distribution)
def provision(self, package_source, variants=()):
"""
Provision flocker on this node.
:param PackageSource package_source: The source from which to install
flocker.
:param set variants: The set of variant configurations to use when
provisioning
"""
return self._provisioner.provision(
node=self,
package_source=package_source,
distribution=self.distribution,
variants=variants,
).on(success=lambda _: self.address)
@property
def name(self):
return self._node.name
class CloudKeyNotFound(Exception):
"""
Raised if the cloud provider doesn't have a ssh-key with a given name.
"""
@attributes([
Attribute('_driver'),
Attribute('_keyname'),
Attribute('image_names'),
Attribute('_create_node_arguments'),
Attribute('provision'),
Attribute('default_size'),
Attribute('get_default_user'),
Attribute('use_private_addresses', instance_of=bool, default_value=False),
], apply_immutable=True)
class LibcloudProvisioner(object):
"""
:ivar libcloud.compute.base.NodeDriver driver: The libcloud driver to use.
:ivar bytes _keyname: The name of an existing ssh public key configured
with the cloud provider. The provision step assumes the corresponding
private key is available from an agent.
:ivar dict image_names: Dictionary mapping distributions to cloud image
names.
:ivar callable _create_node_arguments: Extra arguments to pass to
libcloud's ``create_node``.
:ivar callable provision: Function to call to provision a node.
:ivar str default_size: Name of the default size of node to create.
:ivar callable get_default_user: Function to provide the default
username on the node.
:ivar bool use_private_addresses: Whether the `private_address` of nodes
should be populated. This should be specified if the cluster nodes
use the private address for inter-node communication.
"""
def get_ssh_key(self):
"""
Return the public key associated with the provided keyname.
:return Key: The ssh public key or ``None`` if it can't be determined.
"""
try:
key_pair = self._driver.get_key_pair(self._keyname)
except Exception:
raise CloudKeyNotFound(self._keyname)
if key_pair.public_key is not None:
return Key.fromString(key_pair.public_key, type='public_openssh')
else:
# EC2 only provides the SSH2 fingerprint (for uploaded keys)
# or the SHA-1 hash of the private key (for EC2 generated keys)
# https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_KeyPairInfo.html
return None
def create_node(self, name, distribution,
size=None, disk_size=8,
metadata={}):
"""
Create a node.
:param str name: The name of the node.
:param str distribution: The name of the distribution to
install on the node.
:param str size: The name of the size to use.
:param int disk_size: The size of disk to allocate.
:param dict metadata: Metadata to associate with the node.
:return libcloud.compute.base.Node: The created node.
"""
if size is None:
size = self.default_size
image_name = self.image_names[distribution]
create_node_arguments = self._create_node_arguments(
disk_size=disk_size)
node = self._driver.create_node(
name=name,
image=get_image(self._driver, image_name),
size=get_size(self._driver, size),
ex_keyname=self._keyname,
ex_metadata=metadata,
**create_node_arguments
)
node, addresses = self._driver.wait_until_running(
[node], wait_period=15)[0]
public_address = addresses[0]
if self.use_private_addresses:
private_address = node.private_ips[0]
else:
private_address = None
return LibcloudNode(
provisioner=self,
node=node, address=public_address,
private_address=private_address,
distribution=distribution)
|
isrohutamahopetechnik/MissionPlanner
|
refs/heads/master
|
Lib/netrc.py
|
55
|
"""An object-oriented interface to .netrc files."""
# Module and documentation by Eric S. Raymond, 21 Dec 1998
import os, shlex
__all__ = ["netrc", "NetrcParseError"]
class NetrcParseError(Exception):
"""Exception raised on syntax errors in the .netrc file."""
def __init__(self, msg, filename=None, lineno=None):
self.filename = filename
self.lineno = lineno
self.msg = msg
Exception.__init__(self, msg)
def __str__(self):
return "%s (%s, line %s)" % (self.msg, self.filename, self.lineno)
class netrc:
def __init__(self, file=None):
if file is None:
try:
file = os.path.join(os.environ['HOME'], ".netrc")
except KeyError:
raise IOError("Could not find .netrc: $HOME is not set")
self.hosts = {}
self.macros = {}
with open(file) as fp:
self._parse(file, fp)
def _parse(self, file, fp):
lexer = shlex.shlex(fp)
lexer.wordchars += r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
lexer.commenters = lexer.commenters.replace('#', '')
while 1:
# Look for a machine, default, or macdef top-level keyword
toplevel = tt = lexer.get_token()
if not tt:
break
elif tt[0] == '#':
# seek to beginning of comment, in case reading the token put
# us on a new line, and then skip the rest of the line.
pos = len(tt) + 1
lexer.instream.seek(-pos, 1)
lexer.instream.readline()
continue
elif tt == 'machine':
entryname = lexer.get_token()
elif tt == 'default':
entryname = 'default'
elif tt == 'macdef': # Just skip to end of macdefs
entryname = lexer.get_token()
self.macros[entryname] = []
lexer.whitespace = ' \t'
while 1:
line = lexer.instream.readline()
if not line or line == '\012':
lexer.whitespace = ' \t\r\n'
break
self.macros[entryname].append(line)
continue
else:
raise NetrcParseError(
"bad toplevel token %r" % tt, file, lexer.lineno)
# We're looking at start of an entry for a named machine or default.
login = ''
account = password = None
self.hosts[entryname] = {}
while 1:
tt = lexer.get_token()
if (tt.startswith('#') or
tt in {'', 'machine', 'default', 'macdef'}):
if password:
self.hosts[entryname] = (login, account, password)
lexer.push_token(tt)
break
else:
raise NetrcParseError(
"malformed %s entry %s terminated by %s"
% (toplevel, entryname, repr(tt)),
file, lexer.lineno)
elif tt == 'login' or tt == 'user':
login = lexer.get_token()
elif tt == 'account':
account = lexer.get_token()
elif tt == 'password':
password = lexer.get_token()
else:
raise NetrcParseError("bad follower token %r" % tt,
file, lexer.lineno)
def authenticators(self, host):
"""Return a (user, account, password) tuple for given host."""
if host in self.hosts:
return self.hosts[host]
elif 'default' in self.hosts:
return self.hosts['default']
else:
return None
def __repr__(self):
"""Dump the class data in the format of a .netrc file."""
rep = ""
for host in self.hosts.keys():
attrs = self.hosts[host]
rep = rep + "machine "+ host + "\n\tlogin " + repr(attrs[0]) + "\n"
if attrs[1]:
rep = rep + "account " + repr(attrs[1])
rep = rep + "\tpassword " + repr(attrs[2]) + "\n"
for macro in self.macros.keys():
rep = rep + "macdef " + macro + "\n"
for line in self.macros[macro]:
rep = rep + line
rep = rep + "\n"
return rep
if __name__ == '__main__':
print netrc()
|
dfalt974/SickRage
|
refs/heads/master
|
lib/babelfish/converters/countryname.py
|
89
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 the BabelFish authors. All rights reserved.
# Use of this source code is governed by the 3-clause BSD license
# that can be found in the LICENSE file.
#
from __future__ import unicode_literals
from . import CountryReverseConverter, CaseInsensitiveDict
from ..country import COUNTRY_MATRIX
from ..exceptions import CountryConvertError, CountryReverseError
class CountryNameConverter(CountryReverseConverter):
def __init__(self):
self.codes = set()
self.to_name = {}
self.from_name = CaseInsensitiveDict()
for country in COUNTRY_MATRIX:
self.codes.add(country.name)
self.to_name[country.alpha2] = country.name
self.from_name[country.name] = country.alpha2
def convert(self, alpha2):
if alpha2 not in self.to_name:
raise CountryConvertError(alpha2)
return self.to_name[alpha2]
def reverse(self, name):
if name not in self.from_name:
raise CountryReverseError(name)
return self.from_name[name]
|
shownotes/snotes20-restapi
|
refs/heads/master
|
manage.py
|
1
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shownotes.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
kenshay/ImageScripter
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/test/test_file_eintr.py
|
96
|
# Written to test interrupted system calls interfering with our many buffered
# IO implementations. http://bugs.python.org/issue12268
#
# This tests the '_io' module. Similar tests for Python 2.x's older
# default file I/O implementation exist within test_file2k.py.
#
# It was suggested that this code could be merged into test_io and the tests
# made to work using the same method as the existing signal tests in test_io.
# I was unable to get single process tests using alarm or setitimer that way
# to reproduce the EINTR problems. This process based test suite reproduces
# the problems prior to the issue12268 patch reliably on Linux and OSX.
# - gregory.p.smith
import os
import select
import signal
import subprocess
import sys
from test.test_support import run_unittest
import time
import unittest
# Test import all of the things we're about to try testing up front.
from _io import FileIO
@unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.')
class TestFileIOSignalInterrupt(unittest.TestCase):
def setUp(self):
self._process = None
def tearDown(self):
if self._process and self._process.poll() is None:
try:
self._process.kill()
except OSError:
pass
def _generate_infile_setup_code(self):
"""Returns the infile = ... line of code for the reader process.
subclasseses should override this to test different IO objects.
"""
return ('import _io ;'
'infile = _io.FileIO(sys.stdin.fileno(), "rb")')
def fail_with_process_info(self, why, stdout=b'', stderr=b'',
communicate=True):
"""A common way to cleanup and fail with useful debug output.
Kills the process if it is still running, collects remaining output
and fails the test with an error message including the output.
Args:
why: Text to go after "Error from IO process" in the message.
stdout, stderr: standard output and error from the process so
far to include in the error message.
communicate: bool, when True we call communicate() on the process
after killing it to gather additional output.
"""
if self._process.poll() is None:
time.sleep(0.1) # give it time to finish printing the error.
try:
self._process.terminate() # Ensure it dies.
except OSError:
pass
if communicate:
stdout_end, stderr_end = self._process.communicate()
stdout += stdout_end
stderr += stderr_end
self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' %
(why, stdout.decode(), stderr.decode()))
def _test_reading(self, data_to_write, read_and_verify_code):
"""Generic buffered read method test harness to validate EINTR behavior.
Also validates that Python signal handlers are run during the read.
Args:
data_to_write: String to write to the child process for reading
before sending it a signal, confirming the signal was handled,
writing a final newline and closing the infile pipe.
read_and_verify_code: Single "line" of code to read from a file
object named 'infile' and validate the result. This will be
executed as part of a python subprocess fed data_to_write.
"""
infile_setup_code = self._generate_infile_setup_code()
# Total pipe IO in this function is smaller than the minimum posix OS
# pipe buffer size of 512 bytes. No writer should block.
assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.'
# Start a subprocess to call our read method while handling a signal.
self._process = subprocess.Popen(
[sys.executable, '-u', '-c',
'import io, signal, sys ;'
'signal.signal(signal.SIGINT, '
'lambda s, f: sys.stderr.write("$\\n")) ;'
+ infile_setup_code + ' ;' +
'sys.stderr.write("Worm Sign!\\n") ;'
+ read_and_verify_code + ' ;' +
'infile.close()'
],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Wait for the signal handler to be installed.
worm_sign = self._process.stderr.read(len(b'Worm Sign!\n'))
if worm_sign != b'Worm Sign!\n': # See also, Dune by Frank Herbert.
self.fail_with_process_info('while awaiting a sign',
stderr=worm_sign)
self._process.stdin.write(data_to_write)
signals_sent = 0
rlist = []
# We don't know when the read_and_verify_code in our child is actually
# executing within the read system call we want to interrupt. This
# loop waits for a bit before sending the first signal to increase
# the likelihood of that. Implementations without correct EINTR
# and signal handling usually fail this test.
while not rlist:
rlist, _, _ = select.select([self._process.stderr], (), (), 0.05)
self._process.send_signal(signal.SIGINT)
signals_sent += 1
if signals_sent > 200:
self._process.kill()
self.fail('reader process failed to handle our signals.')
# This assumes anything unexpected that writes to stderr will also
# write a newline. That is true of the traceback printing code.
signal_line = self._process.stderr.readline()
if signal_line != b'$\n':
self.fail_with_process_info('while awaiting signal',
stderr=signal_line)
# We append a newline to our input so that a readline call can
# end on its own before the EOF is seen and so that we're testing
# the read call that was interrupted by a signal before the end of
# the data stream has been reached.
stdout, stderr = self._process.communicate(input=b'\n')
if self._process.returncode:
self.fail_with_process_info(
'exited rc=%d' % self._process.returncode,
stdout, stderr, communicate=False)
# PASS!
# String format for the read_and_verify_code used by read methods.
_READING_CODE_TEMPLATE = (
'got = infile.{read_method_name}() ;'
'expected = {expected!r} ;'
'assert got == expected, ('
'"{read_method_name} returned wrong data.\\n"'
'"got data %r\\nexpected %r" % (got, expected))'
)
def test_readline(self):
"""readline() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello, world!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readline',
expected=b'hello, world!\n'))
def test_readlines(self):
"""readlines() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readlines',
expected=[b'hello\n', b'world!\n']))
def test_readall(self):
"""readall() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readall',
expected=b'hello\nworld!\n'))
# read() is the same thing as readall().
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='read',
expected=b'hello\nworld!\n'))
class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt):
def _generate_infile_setup_code(self):
"""Returns the infile = ... line of code to make a BufferedReader."""
return ('infile = io.open(sys.stdin.fileno(), "rb") ;'
'import _io ;assert isinstance(infile, _io.BufferedReader)')
def test_readall(self):
"""BufferedReader.read() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='read',
expected=b'hello\nworld!\n'))
class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt):
def _generate_infile_setup_code(self):
"""Returns the infile = ... line of code to make a TextIOWrapper."""
return ('infile = io.open(sys.stdin.fileno(), "rt", newline=None) ;'
'import _io ;assert isinstance(infile, _io.TextIOWrapper)')
def test_readline(self):
"""readline() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello, world!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readline',
expected='hello, world!\n'))
def test_readlines(self):
"""readlines() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\r\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='readlines',
expected=['hello\n', 'world!\n']))
def test_readall(self):
"""read() must handle signals and not lose data."""
self._test_reading(
data_to_write=b'hello\nworld!',
read_and_verify_code=self._READING_CODE_TEMPLATE.format(
read_method_name='read',
expected="hello\nworld!\n"))
def test_main():
test_cases = [
tc for tc in globals().values()
if isinstance(tc, type) and issubclass(tc, unittest.TestCase)]
run_unittest(*test_cases)
if __name__ == '__main__':
test_main()
|
indictranstech/trufil-frappe
|
refs/heads/develop
|
frappe/website/doctype/blog_category/blog_category.py
|
45
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.website.website_generator import WebsiteGenerator
from frappe.website.render import clear_cache
class BlogCategory(WebsiteGenerator):
def autoname(self):
# to override autoname of WebsiteGenerator
self.name = self.category_name
def on_update(self):
WebsiteGenerator.on_update(self)
clear_cache()
def validate(self):
self.parent_website_route = "blog"
super(BlogCategory, self).validate()
|
mrow4a/smashbox
|
refs/heads/master
|
lib/test_dirDelete.py
|
4
|
from smashbox.utilities import *
__doc__ = """ This test creates a deeply nested directory structure and then removes it
"""
import os.path
NESTING_LEVELS = config.get('dirDel_nestingLevels', 50)
nfiles = int(config.get('dirDel_nfiles', 100))
TEST_FILES = ['test%02d.dat'%i for i in range(nfiles)]
@add_worker
def workerA(step):
#cleanup remote and local test environment - this should be run once by one worker only
reset_owncloud_account()
reset_rundir()
step(0,'create initial content and sync')
# this will be our syncdir (this is different for every worker)
syncdir = make_workdir()
# create a folder and some files in it
path = "0"
for i in xrange(1, NESTING_LEVELS):
path = path + "/" + str(i)
d1 = mkdir(os.path.join(syncdir, path))
for f in TEST_FILES:
fn = os.path.join(d1,f)
createfile(fn,'0',count=1,bs=1000)
run_ocsync(syncdir)
step(2,'delete the folder and sync')
topLevelDir = path.split("/", 1)[0]
d2 = os.path.join(syncdir, topLevelDir)
remove_tree(d2)
#createfile(os.path.join(syncdir,'touch'),'0',count=1,bs=1)
expect_webdav_exist(topLevelDir)
run_ocsync(syncdir)
expect_does_not_exist(d2)
expect_webdav_does_not_exist(topLevelDir)
|
ryfeus/lambda-packs
|
refs/heads/master
|
Skimage_numpy/source/scipy/weave/examples/md5_speed.py
|
100
|
"""
Storing actual strings instead of their md5 value appears to
be about 10 times faster.
>>> md5_speed.run(200,50000)
md5 build(len,sec): 50000 0.870999932289
md5 retrv(len,sec): 50000 0.680999994278
std build(len,sec): 50000 0.259999990463
std retrv(len,sec): 50000 0.0599999427795
This test actually takes several minutes to generate the random
keys used to populate the dictionaries. Here is a smaller run,
but with longer keys.
>>> md5_speed.run(1000,4000)
md5 build(len,sec,per): 4000 0.129999995232 3.24999988079e-005
md5 retrv(len,sec,per): 4000 0.129999995232 3.24999988079e-005
std build(len,sec,per): 4000 0.0500000715256 1.25000178814e-005
std retrv(len,sec,per): 4000 0.00999999046326 2.49999761581e-006
Results are similar, though not statistically to good because of
the short times used and the available clock resolution.
Still, I think it is safe to say that, for speed, it is better
to store entire strings instead of using md5 versions of
their strings. Yeah, the expected result, but it never hurts
to check...
"""
from __future__ import absolute_import, print_function
import random
import md5
import time
import cStringIO
def speed(n,m):
s = 'a'*n
t1 = time.time()
for i in range(m):
q = md5.new(s).digest()
t2 = time.time()
print((t2 - t1) / m)
#speed(50,1e6)
def generate_random(avg_length,count):
all_str = []
alphabet = 'abcdefghijklmnopqrstuvwxyz'
lo,hi = [30,avg_length*2+30]
for i in range(count):
new_str = cStringIO.StringIO()
l = random.randrange(lo,hi)
for i in range(l):
new_str.write(random.choice(alphabet))
all_str.append(new_str.getvalue())
return all_str
def md5_dict(lst):
catalog = {}
t1 = time.time()
for s in lst:
key = md5.new(s).digest()
catalog[key] = None
t2 = time.time()
print('md5 build(len,sec,per):', len(lst), t2 - t1, (t2-t1)/len(lst))
t1 = time.time()
for s in lst:
key = md5.new(s).digest()
val = catalog[key]
t2 = time.time()
print('md5 retrv(len,sec,per):', len(lst), t2 - t1, (t2-t1)/len(lst))
def std_dict(lst):
catalog = {}
t1 = time.time()
for s in lst:
catalog[s] = None
t2 = time.time()
print('std build(len,sec,per):', len(lst), t2 - t1, (t2-t1)/len(lst))
t1 = time.time()
for s in lst:
val = catalog[s]
t2 = time.time()
print('std retrv(len,sec,per):', len(lst), t2 - t1, (t2-t1)/len(lst))
def run(m=200,n=10):
lst = generate_random(m,n)
md5_dict(lst)
std_dict(lst)
run(2000,100)
|
40223125/40223125-2
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/_testcapi.py
|
742
|
CHAR_MAX = 127
CHAR_MIN = -128
DBL_MAX = 1.7976931348623157e+308
DBL_MIN = 2.2250738585072014e-308
FLT_MAX = 3.4028234663852886e+38
FLT_MIN = 1.1754943508222875e-38
INT_MAX = 2147483647
INT_MIN = -2147483648
LLONG_MAX = 9223372036854775807
LLONG_MIN = -9223372036854775808
LONG_MAX = 2147483647
LONG_MIN = -2147483648
PY_SSIZE_T_MAX = 2147483647
PY_SSIZE_T_MIN = -2147483648
SHRT_MAX = 32767
SHRT_MIN = -32768
SIZEOF_PYGC_HEAD = 16
UCHAR_MAX = 255
UINT_MAX = 4294967295
ULLONG_MAX = 18446744073709551615
ULONG_MAX = 4294967295
USHRT_MAX = 65535
__loader__ = "<_frozen_importlib.ExtensionFileLoader object at 0x00C98DD0>"
def _pending_threadfunc(*args,**kw):
pass
class _test_structmembersType(object):
pass
def _test_thread_state(*args,**kw):
pass
def argparsing(*args,**kw):
pass
def code_newempty(*args,**kw):
pass
def codec_incrementaldecoder(*args,**kw):
pass
def codec_incrementalencoder(*args,**kw):
pass
def crash_no_current_thread(*args,**kw):
pass
class error(Exception):
pass
def exception_print(*args,**kw):
pass
def getargs_B(*args,**kw):
pass
def getargs_H(*args,**kw):
pass
def getargs_I(*args,**kw):
pass
def getargs_K(*args,**kw):
pass
def getargs_L(*args,**kw):
pass
def getargs_Z(*args,**kw):
pass
def getargs_Z_hash(*args,**kw):
pass
def getargs_b(*args,**kw):
pass
def getargs_c(*args,**kw):
pass
def getargs_h(*args,**kw):
pass
def getargs_i(*args,**kw):
pass
def getargs_k(*args,**kw):
pass
def getargs_keyword_only(*args,**kw):
pass
def getargs_keywords(*args,**kw):
pass
def getargs_l(*args,**kw):
pass
def getargs_n(*args,**kw):
pass
def getargs_p(*args,**kw):
pass
def getargs_s(*args,**kw):
pass
def getargs_s_hash(*args,**kw):
pass
def getargs_s_star(*args,**kw):
pass
def getargs_tuple(*args,**kw):
pass
def getargs_u(*args,**kw):
pass
def getargs_u_hash(*args,**kw):
pass
def getargs_w_star(*args,**kw):
pass
def getargs_y(*args,**kw):
pass
def getargs_y_hash(*args,**kw):
pass
def getargs_y_star(*args,**kw):
pass
def getargs_z(*args,**kw):
pass
def getargs_z_hash(*args,**kw):
pass
def getargs_z_star(*args,**kw):
pass
class instancemethod(object):
pass
def make_exception_with_doc(*args,**kw):
pass
def make_memoryview_from_NULL_pointer(*args,**kw):
pass
def parse_tuple_and_keywords(*args,**kw):
pass
def pytime_object_to_time_t(*args,**kw):
pass
def pytime_object_to_timespec(*args,**kw):
pass
def pytime_object_to_timeval(*args,**kw):
pass
def raise_exception(*args,**kw):
pass
def raise_memoryerror(*args,**kw):
pass
def run_in_subinterp(*args,**kw):
pass
def set_exc_info(*args,**kw):
pass
def test_L_code(*args,**kw):
pass
def test_Z_code(*args,**kw):
pass
def test_capsule(*args,**kw):
pass
def test_config(*args,**kw):
pass
def test_datetime_capi(*args,**kw):
pass
def test_dict_iteration(*args,**kw):
pass
def test_empty_argparse(*args,**kw):
pass
def test_k_code(*args,**kw):
pass
def test_lazy_hash_inheritance(*args,**kw):
pass
def test_list_api(*args,**kw):
pass
def test_long_and_overflow(*args,**kw):
pass
def test_long_api(*args,**kw):
pass
def test_long_as_double(*args,**kw):
pass
def test_long_as_size_t(*args,**kw):
pass
def test_long_long_and_overflow(*args,**kw):
pass
def test_long_numbits(*args,**kw):
pass
def test_longlong_api(*args,**kw):
pass
def test_null_strings(*args,**kw):
pass
def test_s_code(*args,**kw):
pass
def test_string_from_format(*args,**kw):
pass
def test_string_to_double(*args,**kw):
pass
def test_u_code(*args,**kw):
pass
def test_unicode_compare_with_ascii(*args,**kw):
pass
def test_widechar(*args,**kw):
pass
def test_with_docstring(*args,**kw):
"""This is a pretty normal docstring."""
pass
def traceback_print(*args,**kw):
pass
def unicode_aswidechar(*args,**kw):
pass
def unicode_aswidecharstring(*args,**kw):
pass
def unicode_encodedecimal(*args,**kw):
pass
def unicode_transformdecimaltoascii(*args,**kw):
pass
|
rs2/bokeh
|
refs/heads/master
|
examples/models/file/gauges.py
|
12
|
from __future__ import print_function
from math import pi, sin, cos
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.resources import INLINE
from bokeh.util.browser import view
from bokeh.models.glyphs import Circle, Arc, Ray, Text
from bokeh.models import ColumnDataSource, Range1d, Plot
xdr = Range1d(start=-1.25, end=1.25)
ydr = Range1d(start=-1.25, end=1.25)
plot = Plot(x_range=xdr, y_range=ydr, plot_width=600, plot_height=600)
plot.title.text = "Speedometer"
plot.toolbar_location = None
start_angle = pi + pi/4
end_angle = -pi/4
max_kmh = 250
max_mph = max_kmh*0.621371
major_step, minor_step = 25, 5
plot.add_glyph(Circle(x=0, y=0, radius=1.00, fill_color="white", line_color="black"))
plot.add_glyph(Circle(x=0, y=0, radius=0.05, fill_color="gray", line_color="black"))
plot.add_glyph(Text(x=0, y=+0.15, text=["km/h"], text_color="red", text_align="center", text_baseline="bottom", text_font_style="bold"))
plot.add_glyph(Text(x=0, y=-0.15, text=["mph"], text_color="blue", text_align="center", text_baseline="top", text_font_style="bold"))
def data(value):
"""Shorthand to override default units with "data", for e.g. `Ray.length`. """
return dict(value=value, units="data")
def speed_to_angle(speed, units):
max_speed = max_kmh if units == "kmh" else max_mph
speed = min(max(speed, 0), max_speed)
total_angle = start_angle - end_angle
angle = total_angle*float(speed)/max_speed
return start_angle - angle
def add_needle(speed, units):
angle = speed_to_angle(speed, units)
plot.add_glyph(Ray(x=0, y=0, length=data(0.75), angle=angle, line_color="black", line_width=3))
plot.add_glyph(Ray(x=0, y=0, length=data(0.10), angle=angle-pi, line_color="black", line_width=3))
def polar_to_cartesian(r, alpha):
return r*cos(alpha), r*sin(alpha)
def add_gauge(radius, max_value, length, direction, color, major_step, minor_step):
major_angles, minor_angles = [], []
major_labels, minor_labels = [], []
total_angle = start_angle - end_angle
major_angle_step = float(major_step)/max_value*total_angle
minor_angle_step = float(minor_step)/max_value*total_angle
major_angle = 0
while major_angle <= total_angle:
major_angles.append(start_angle - major_angle)
major_angle += major_angle_step
minor_angle = 0
while minor_angle <= total_angle:
minor_angles.append(start_angle - minor_angle)
minor_angle += minor_angle_step
major_labels = [ major_step*i for i, _ in enumerate(major_angles) ]
minor_labels = [ minor_step*i for i, _ in enumerate(minor_angles) ]
n = major_step/minor_step
minor_angles = [ x for i, x in enumerate(minor_angles) if i % n != 0 ]
minor_labels = [ x for i, x in enumerate(minor_labels) if i % n != 0 ]
glyph = Arc(x=0, y=0, radius=radius, start_angle=start_angle, end_angle=end_angle, direction="clock", line_color=color, line_width=2)
plot.add_glyph(glyph)
rotation = 0 if direction == 1 else -pi
x, y = zip(*[ polar_to_cartesian(radius, angle) for angle in major_angles ])
angles = [ angle + rotation for angle in major_angles ]
source = ColumnDataSource(dict(x=x, y=y, angle=angles))
glyph = Ray(x="x", y="y", length=data(length), angle="angle", line_color=color, line_width=2)
plot.add_glyph(source, glyph)
x, y = zip(*[ polar_to_cartesian(radius, angle) for angle in minor_angles ])
angles = [ angle + rotation for angle in minor_angles ]
source = ColumnDataSource(dict(x=x, y=y, angle=angles))
glyph = Ray(x="x", y="y", length=data(length/2), angle="angle", line_color=color, line_width=1)
plot.add_glyph(source, glyph)
x, y = zip(*[ polar_to_cartesian(radius+2*length*direction, angle) for angle in major_angles ])
text_angles = [ angle - pi/2 for angle in major_angles ]
source = ColumnDataSource(dict(x=x, y=y, angle=text_angles, text=major_labels))
glyph = Text(x="x", y="y", angle="angle", text="text", text_align="center", text_baseline="middle")
plot.add_glyph(source, glyph)
add_gauge(0.75, max_kmh, 0.05, +1, "red", major_step, minor_step)
add_gauge(0.70, max_mph, 0.05, -1, "blue", major_step, minor_step)
add_needle(55, "kmh")
doc = Document()
doc.add_root(plot)
if __name__ == "__main__":
doc.validate()
filename = "gauges.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Gauges"))
print("Wrote %s" % filename)
view(filename)
|
HybridF5/nova
|
refs/heads/master
|
nova/tests/unit/scheduler/weights/test_weights_ioopsweight.py
|
73
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler IoOpsWeigher weights
"""
from nova.scheduler import weights
from nova.scheduler.weights import io_ops
from nova import test
from nova.tests.unit.scheduler import fakes
class IoOpsWeigherTestCase(test.NoDBTestCase):
def setUp(self):
super(IoOpsWeigherTestCase, self).setUp()
self.weight_handler = weights.HostWeightHandler()
self.weighers = [io_ops.IoOpsWeigher()]
def _get_weighed_host(self, hosts, io_ops_weight_multiplier):
if io_ops_weight_multiplier is not None:
self.flags(io_ops_weight_multiplier=io_ops_weight_multiplier)
return self.weight_handler.get_weighed_objects(self.weighers,
hosts, {})[0]
def _get_all_hosts(self):
host_values = [
('host1', 'node1', {'num_io_ops': 1}),
('host2', 'node2', {'num_io_ops': 2}),
('host3', 'node3', {'num_io_ops': 0}),
('host4', 'node4', {'num_io_ops': 4})
]
return [fakes.FakeHostState(host, node, values)
for host, node, values in host_values]
def _do_test(self, io_ops_weight_multiplier, expected_weight,
expected_host):
hostinfo_list = self._get_all_hosts()
weighed_host = self._get_weighed_host(hostinfo_list,
io_ops_weight_multiplier)
self.assertEqual(weighed_host.weight, expected_weight)
if expected_host:
self.assertEqual(weighed_host.obj.host, expected_host)
def test_io_ops_weight_multiplier_by_default(self):
self._do_test(io_ops_weight_multiplier=None,
expected_weight=0.0,
expected_host='host3')
def test_io_ops_weight_multiplier_zero_value(self):
# We do not know the host, all have same weight.
self._do_test(io_ops_weight_multiplier=0.0,
expected_weight=0.0,
expected_host=None)
def test_io_ops_weight_multiplier_positive_value(self):
self._do_test(io_ops_weight_multiplier=2.0,
expected_weight=2.0,
expected_host='host4')
|
dkliban/pulp_puppet
|
refs/heads/master
|
pulp_puppet_plugins/pulp_puppet/forge/urls.py
|
4
|
from django.conf.urls import patterns, url
from pulp_puppet.forge.views.releases import ReleasesView, ReleasesPost36View
from pulp.server.db import connection
connection.initialize()
urlpatterns = patterns('',
url(r'^pulp_puppet/forge/([^/]+)/([^/]+)/api/v1/releases.json', ReleasesView.as_view(),
name='post_33_releases'),
url(r'^api/v1/releases.json', ReleasesView.as_view(), name='pre_33_releases'),
url(r'^v3/releases', ReleasesPost36View.as_view(), name='post_36_releases')
)
|
vjFaLk/frappe
|
refs/heads/bloomstack-production
|
frappe/desk/page/backups/backups.py
|
1
|
from __future__ import unicode_literals
import os
import frappe
from frappe import _
from frappe.utils import get_site_path, cint, get_url
from frappe.utils.data import convert_utc_to_user_timezone
import datetime
def get_context(context):
def get_time(path):
dt = os.path.getmtime(path)
return convert_utc_to_user_timezone(datetime.datetime.utcfromtimestamp(dt)).strftime('%Y-%m-%d %H:%M')
def get_size(path):
size = os.path.getsize(path)
if size > 1048576:
return "{0:.1f}M".format(float(size) / 1048576)
else:
return "{0:.1f}K".format(float(size) / 1024)
path = get_site_path('private', 'backups')
files = [x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x))]
backup_limit = get_scheduled_backup_limit()
if len(files) > backup_limit:
cleanup_old_backups(path, files, backup_limit)
files = [('/backups/' + _file,
get_time(os.path.join(path, _file)),
get_size(os.path.join(path, _file))) for _file in files if _file.endswith('sql.gz')]
files.sort(key=lambda x: x[1], reverse=True)
return {"files": files}
def get_scheduled_backup_limit():
backup_limit = frappe.db.get_singles_value('System Settings', 'backup_limit')
return cint(backup_limit)
def cleanup_old_backups(site_path, files, limit):
backup_paths = []
for f in files:
if f.endswith('sql.gz'):
_path = os.path.abspath(os.path.join(site_path, f))
backup_paths.append(_path)
backup_paths = sorted(backup_paths, key=os.path.getctime)
files_to_delete = len(backup_paths) - limit
for idx in range(0, files_to_delete):
f = os.path.basename(backup_paths[idx])
files.remove(f)
os.remove(backup_paths[idx])
def delete_downloadable_backups():
path = get_site_path('private', 'backups')
files = [x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x))]
backup_limit = get_scheduled_backup_limit()
if len(files) > backup_limit:
cleanup_old_backups(path, files, backup_limit)
@frappe.whitelist()
def schedule_files_backup(user_email):
from frappe.utils.background_jobs import enqueue, get_jobs
queued_jobs = get_jobs(site=frappe.local.site, queue="long")
method = 'frappe.desk.page.backups.backups.backup_files_and_notify_user'
if method not in queued_jobs[frappe.local.site]:
enqueue("frappe.desk.page.backups.backups.backup_files_and_notify_user", queue='long', user_email=user_email)
frappe.msgprint(_("Queued for backup. You will receive an email with the download link"))
else:
frappe.msgprint(_("Backup job is already queued. You will receive an email with the download link"))
def backup_files_and_notify_user(user_email=None):
from frappe.utils.backups import backup
backup_files = backup(with_files=True)
get_downloadable_links(backup_files)
subject = _("File backup is ready")
frappe.sendmail(
recipients=[user_email],
subject=subject,
template="file_backup_notification",
args=backup_files,
header=[subject, 'green']
)
def get_downloadable_links(backup_files):
for key in ['backup_path_files', 'backup_path_private_files']:
path = backup_files[key]
backup_files[key] = get_url('/'.join(path.split('/')[-2:]))
|
anisku11/sublimeku
|
refs/heads/master
|
Packages/CodeComplice/libs/codeintel2/accessor.py
|
1
|
#!python
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is Komodo code.
#
# The Initial Developer of the Original Code is ActiveState Software Inc.
# Portions created by ActiveState Software Inc are Copyright (C) 2000-2007
# ActiveState Software Inc. All Rights Reserved.
#
# Contributor(s):
# ActiveState Software Inc
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
"""The Accessor interface (and implementations) for accessing scintilla
lexer-based styled buffers.
"""
import bisect
import threading
from SilverCity import ScintillaConstants
from codeintel2.common import *
from codeintel2 import util
if _xpcom_:
from xpcom import components
from xpcom.client import WeakReference
from xpcom import COMException
class Accessor(object):
"""Virtual base class for a lexed text accessor. This defines an API
with which lexed text data (the text content, styling info, etc.) is
accessed by trigger/completion/etc. handling. Actual instances will
be one of the subclasses.
"""
def char_at_pos(self, pos):
raise VirtualMethodError()
def style_at_pos(self, pos):
raise VirtualMethodError()
def line_and_col_at_pos(self, pos):
raise VirtualMethodError()
def gen_char_and_style_back(self, start, stop):
"""Generate (char, style) tuples backward from start to stop
a la range(start, stop, -1) -- i.e. exclusive at 'stop' index.
For SciMozAccessor this can be implemented more efficiently than
the naive usage of char_at_pos()/style_at_pos().
"""
raise VirtualMethodError()
def gen_char_and_style(self, start, stop):
"""Generate (char, style) tuples forward from start to stop
a la range(start, stop) -- i.e. exclusive at 'stop' index.
For SciMozAccessor this can be implemented more efficiently than
the naive usage of char_at_pos()/style_at_pos().
"""
raise VirtualMethodError()
def match_at_pos(self, pos, s):
"""Return True if the given string matches the text at the given
position.
"""
raise VirtualMethodError()
def line_from_pos(self, pos):
"""Return the 0-based line number for the given position."""
raise VirtualMethodError()
def lines_from_positions(self, positions):
"""Yield the associate 0-based line for each of a number of
positions. This can be much faster than multiple calls to
`line_from_pos` for some accessors.
"""
for pos in positions:
yield self.line_from_pos(pos)
def line_start_pos_from_pos(self, pos):
"""Return the position of the start of the line of the given pos."""
raise VirtualMethodError()
def pos_from_line_and_col(self, line, col):
"""Return the position of the given line and column."""
raise VirtualMethodError()
@property
def text(self):
"""All buffer content (as a unicode string)."""
raise VirtualMethodError()
def text_range(self, start, end):
raise VirtualMethodError()
def length(self):
"""Return the length of the buffer.
Note that whether this returns a *character* pos or a *byte* pos is
left fuzzy so that SilverCity and SciMoz implementations can be
efficient. All that is guaranteed is that the *_at_pos() methods
work as expected.
"""
raise VirtualMethodError()
# def gen_pos_and_char_fwd(self, start_pos):
# """Generate (<pos>, <char>) tuples forward from the starting
# position until the end of the document.
#
# Note that whether <pos> is a *character* pos or a *byte* pos is
# left fuzzy so that SilverCity and SciMoz implementations can be
# efficient.
# """
# raise VirtualMethodError()
def gen_tokens(self):
"""Generator for all styled tokens in the buffer.
Currently this should yield token dict a la SilverCity's
tokenize_by_style().
"""
raise VirtualMethodError()
def contiguous_style_range_from_pos(self, pos):
"""Returns a 2-tuple (start, end) giving the span of the sequence of
characters with the style at position pos."""
raise VirtualMethodError()
class SilverCityAccessor(Accessor):
def __init__(self, lexer, content):
# Assume buffer encoding is always UTF-8
self.lexer = lexer
self.content = str(content)
def reset_content(self, content):
"""A backdoor specific to this accessor to allow the equivalent of
updating the buffer/file/content.
"""
self.content = str(content)
self.__tokens_cache = None
self.__position_data_cache = None
__tokens_cache = None
@property
def tokens(self):
if self.__tokens_cache is None:
self.__tokens_cache = self.lexer.tokenize_by_style(self.content)
return self.__tokens_cache
def _char_pos_from_byte_pos(self, pos):
line = self.line_from_pos(pos)
byte_offset, char_offset = self.__position_data[line][:2]
next_byte_offset = (byte_offset +
len(self.content[char_offset].encode("utf-8")))
try:
while next_byte_offset <= pos:
byte_offset = next_byte_offset
char_offset += 1
next_byte_offset += len(self.content[
char_offset].encode("utf-8"))
except IndexError:
pass # running past EOF
return char_offset
def char_at_pos(self, pos):
return self.content[self._char_pos_from_byte_pos(pos)]
def _token_at_pos(self, pos):
# XXX Locality of reference should offer an optimization here.
# Binary search for appropriate token.
lower, upper = 0, len(self.tokens) - 1 # [lower-limit, upper-limit)
sentinel = 20
while sentinel > 0:
idx = ((upper - lower) // 2) + lower
token = self.tokens[idx]
# print "_token_at_pos %d: token idx=%d text[%d:%d]=%r"\
# % (pos, idx, token["start_index"], token["end_index"],
# token["text"])
# start, end = token["start_index"], token["end_index"]
if lower == upper:
return token
elif pos < token["start_index"]:
upper = idx
elif pos > token["end_index"]:
lower = idx + 1
else:
return token
sentinel -= 1
else:
raise CodeIntelError("style_at_pos binary search sentinel hit: "
"there is likely a logic problem here!")
def style_at_pos(self, pos):
return self._token_at_pos(pos)["style"]
def line_and_col_at_pos(self, pos):
line = self.line_from_pos(pos)
byte_offset, char_offset = self.__position_data[line][:2]
line_char_offset = char_offset
try:
while byte_offset < pos:
byte_offset += len(self.content[char_offset].encode("utf-8"))
char_offset += 1
except IndexError:
char_offset += 1 # EOF
return line, char_offset - line_char_offset
# PERF: If perf is important for this accessor then could do much
# better with smarter use of _token_at_pos() for these two.
def gen_char_and_style_back(self, start, stop):
assert -1 <= stop <= start, "stop: %r, start: %r" % (stop, start)
for pos in range(start, stop, -1):
yield (self.char_at_pos(pos), self.style_at_pos(pos))
def gen_char_and_style(self, start, stop):
assert 0 <= start <= stop, "start: %r, stop: %r" % (start, stop)
for pos in range(start, stop):
yield (self.char_at_pos(pos), self.style_at_pos(pos))
def match_at_pos(self, pos, s):
char_pos = self._char_pos_from_byte_pos(pos)
return self.content[char_pos:char_pos+len(s)] == s
__position_data_cache = None
@property
def __position_data(self):
"""A list holding the cache of line position data. The index is the
line number; the value is a four-tuple of (start pos in bytes,
start pos in chars, line length in bytes, line length in chars).
"""
if self.__position_data_cache is None:
data = []
byte_offset = 0
char_offset = 0
for line_str in self.content.splitlines(True):
byte_length = len(line_str.encode("utf-8"))
char_length = len(line_str)
data.append((
byte_offset, char_offset, byte_length, char_length))
byte_offset += byte_length
char_offset += char_length
self.__position_data_cache = data
return self.__position_data_cache
def lines_from_char_positions(self, starts):
"""Yield the 0-based lines given the *character* positions."""
line_starts = [p[1] for p in self.__position_data] # in chars
for char_pos in starts:
# see line_from_pos for the adjustments
yield bisect.bisect_left(line_starts, char_pos + 1) - 1
def line_from_pos(self, byte_pos):
r"""
>>> sa = SilverCityAccessor(lexer,
... #0 1 2 3
... #01234567890 123456789 01234567890 12345
... 'import sys\nif True:\nprint "hi"\n# bye')
>>> sa.line_from_pos(0)
0
>>> sa.line_from_pos(9)
0
>>> sa.line_from_pos(10)
0
>>> sa.line_from_pos(11)
1
>>> sa.line_from_pos(22)
2
>>> sa.line_from_pos(34)
3
>>> sa.line_from_pos(35)
3
"""
# Search for (byte_pos,) in the position data so we will always come
# "before" the line we want (i.e. we have the index of the line itself)
# the +1 is to make sure we get the line after (so we can subtract it)
# this is because for a position not at line start, we get the next line
# instead.
return bisect.bisect_left(self.__position_data, (byte_pos + 1,)) - 1
def line_start_pos_from_pos(self, pos):
return self.__position_data[self.line_from_pos(pos)][0]
def pos_from_line_and_col(self, line, col):
byte_offset, char_offset = self.__position_data[line][:2]
substring = self.content[char_offset:char_offset+col].encode("utf-8")
return byte_offset + len(substring)
@property
def text(self):
return self.content
def text_range(self, start, end):
return self.content[self._char_pos_from_byte_pos(start):
self._char_pos_from_byte_pos(end)]
def length(self):
byte_offset, byte_length = self.__position_data[-1][::2]
return byte_offset + byte_length
def gen_tokens(self):
for token in self.tokens:
yield token
def contiguous_style_range_from_pos(self, pos):
token = self._token_at_pos(pos)
return (token["start_index"], token["end_index"] + 1)
class SciMozAccessor(Accessor):
def __init__(self, scimoz, silvercity_lexer):
self.scimoz = WeakReference(scimoz)
self.silvercity_lexer = silvercity_lexer
def char_at_pos(self, pos):
return self.scimoz().getWCharAt(pos)
def style_at_pos(self, pos):
return self.scimoz().getStyleAt(pos)
def line_and_col_at_pos(self, pos):
scimoz = self.scimoz()
line = scimoz.lineFromPosition(pos)
col = pos - scimoz.positionFromLine(line)
return line, col
# These two are *much* faster than repeatedly calling char_at_pos()
# and style_at_pos().
def gen_char_and_style_back(self, start, stop):
if start > stop:
# For scimoz.getStyledText(), it's (inclusive, exclusive)
styled_text = self.scimoz().getStyledText(stop+1, start+1)
for i in range(len(styled_text)-2, -2, -2):
yield (styled_text[i], ord(styled_text[i+1]))
elif start == stop:
pass
else:
raise AssertionError("start (%r) < stop (%r)" % (start, stop))
def gen_char_and_style(self, start, stop):
if start < stop:
# For scimoz.getStyledText(), it's (inclusive, exclusive)
styled_text = self.scimoz().getStyledText(start, stop)
for i in range(0, len(styled_text), 2):
yield (styled_text[i], ord(styled_text[i+1]))
elif start == stop:
pass
else:
raise AssertionError("start (%r) > stop (%r)" % (start, stop))
# XXX def match_at_pos(self, pos, s):...
def line_from_pos(self, pos):
return self.scimoz().lineFromPosition(pos)
def lines_from_positions(self, positions):
# Note: for a large enough set of positions it might be faster
# to use the algorithm in SilverCityAccessor.
scimoz = self.scimoz()
for pos in positions:
yield scimoz.lineFromPosition(pos)
def line_start_pos_from_pos(self, pos):
scimoz = self.scimoz()
return scimoz.positionFromLine(scimoz.lineFromPosition(pos))
def pos_from_line_and_col(self, line, col):
return self.scimoz().positionFromLine(line) + col
@property
def text(self):
return self.scimoz().text
def text_range(self, start, end):
return self.scimoz().getTextRange(start, end)
def length(self):
return self.scimoz().length
# raise NotImplementedError(
# "Calculating the *character* length of a SciMoz buffer can "
# "be expensive. Are you sure you want to use this method? "
# "Try accessor.gen_pos_and_char_fwd() first.")
def gen_tokens(self):
if self.silvercity_lexer:
# PERF: This is not a great solution but see bug 54217.
acc = SilverCityAccessor(self.silvercity_lexer, self.text)
for token in acc.gen_tokens():
yield token
else:
# Silvercity lexer doesn't exist, use styles straight from SciMoz.
scimoz = self.scimoz()
styled_text = scimoz.getStyledText(0, scimoz.length)
text = styled_text[::2]
styles = styled_text[1::2]
start_index = 0
prev_style = -1
last_i = len(styles) - 1
for i in range(len(styles)):
style = styles[i]
if style != prev_style or i == last_i:
token_text = text[start_index:i]
if token_text:
token = {
'style': ord(prev_style),
'text': token_text,
'start_index': start_index,
'end_index': i-1,
'start_column': 0, # unset
'end_column': 0, # unset
'start_line': 0, # unset
'end_line': 0, # unset
}
yield token
start_index = i
prev_style = style
def contiguous_style_range_from_pos(self, pos):
curr_style = self.style_at_pos(pos)
i = pos - 1
while i >= 0 and self.style_at_pos(i) == curr_style:
i -= 1
start_pos = i + 1
last_pos = self.length()
i = pos + 1
while i < last_pos and self.style_at_pos(i) == curr_style:
i += 1
end_pos = i # Point one past the end
return (start_pos, end_pos)
@property
def udl_family_chunk_ranges(self):
"""Generate a list of continguous UDL-family ranges.
Generates 3-tuples:
(<udl-family>, <start-byte-offset>, <end-byte-offset>)
where
<udl-family> is one of "M", "CSS", "CSL", "SSL", "TPL"
<start-byte-offset> is inclusive
<end-byte-offset> is exclusive (like a Python range)
Note: For non-UDL languages this will return on chunk that is the
whole document and <udl-family> will be None.
"""
# LexUDL will set indicator 18 on the start char (or set of chars)
# beginning a new UDL family section.
scimoz = self.scimoz()
# Note: value must match that in LexUDL.cxx and koILinter.idl.
DECORATOR_UDL_FAMILY_TRANSITION = 18
pos = 0
length = scimoz.length
while pos < length:
start = scimoz.indicatorStart(DECORATOR_UDL_FAMILY_TRANSITION, pos)
end = scimoz.indicatorEnd(DECORATOR_UDL_FAMILY_TRANSITION, start+1)
if start == end == 0: # No indicators.
yield (None, 0, length)
break
start = max(start-1, 0)
# print "range: %d (%r) - %d (%r): %s" % (
# start, scimoz.getWCharAt(start),
# end, scimoz.getWCharAt(end-1),
# self._udl_family_from_style(scimoz.getStyleAt(pos)))
# print util.indent(repr(scimoz.getTextRange(start, end)))
yield (self._udl_family_from_style(scimoz.getStyleAt(pos)),
start, end)
pos = end + 1
_udl_family_from_start_style = {
ScintillaConstants.SCE_UDL_M_DEFAULT: "M",
ScintillaConstants.SCE_UDL_CSS_DEFAULT: "CSS",
ScintillaConstants.SCE_UDL_CSL_DEFAULT: "CSL",
ScintillaConstants.SCE_UDL_SSL_DEFAULT: "SSL",
ScintillaConstants.SCE_UDL_TPL_DEFAULT: "TPL",
}
_udl_family_start_styles = list(sorted(
_udl_family_from_start_style.keys()))
@classmethod
def _udl_family_from_style(cls, style):
"""Determine which UDL family this style is in. Returns one
of M, CSS, CSL, SSL or TPL.
"""
idx = bisect.bisect_right(cls._udl_family_start_styles, style)
start_style = cls._udl_family_start_styles[idx-1]
fam = cls._udl_family_from_start_style[start_style]
return fam
class KoDocumentAccessor(SciMozAccessor):
"""An accessor that lazily defers to the first view attached to this
Komodo document object.
"""
def __init__(self, doc, silvercity_lexer):
self.doc = WeakReference(doc)
self.silvercity_lexer = silvercity_lexer
def _get_scimoz_ref(self):
try:
view = self.doc().getView()
except (COMException, AttributeError) as ex:
# Race conditions on file opening in Komodo can result
# in self.doc() being None or an error in .getView().
raise NoBufferAccessorError(str(ex))
return view.scimoz
if _xpcom_:
# The view is implemented in JavaScript, so we need to proxy the
# _get_scimoz_ref() call in order to get the scimoz (plugin)
# object, then we make a proxy for the scimoz object and return
# it.
#
# The ProxyToMainThread decorator is required, to ensure *all*
# the "koDoc" and "view" calls are run on the main thread.
# Without this Komodo can crash in garbage collection,
# complaining that JS objects were used/created on a thread.
@components.ProxyToMainThread
def _get_proxied_scimoz_ref(self):
scimoz = self._get_scimoz_ref()
class SciMozProxy:
def __init__(self, sm):
self.sm = sm
@property
@components.ProxyToMainThread
def length(self):
return self.sm.length
@property
@components.ProxyToMainThread
def text(self):
return self.sm.text
@components.ProxyToMainThread
def getTextRange(self, *args):
return self.sm.getTextRange(*args)
@components.ProxyToMainThread
def getStyledText(self, *args):
return self.sm.getStyledText(*args)
@components.ProxyToMainThread
def getWCharAt(self, *args):
return self.sm.getWCharAt(*args)
@components.ProxyToMainThread
def getStyleAt(self, *args):
return self.sm.getStyleAt(*args)
@components.ProxyToMainThread
def lineFromPosition(self, *args):
return self.sm.lineFromPosition(*args)
@components.ProxyToMainThread
def positionFromLine(self, *args):
return self.sm.positionFromLine(*args)
@components.ProxyToMainThread
def indicatorStart(self, *args):
return self.sm.indicatorStart(*args)
@components.ProxyToMainThread
def indicatorEnd(self, *args):
return self.sm.indicatorEnd(*args)
return SciMozProxy(scimoz)
def scimoz(self):
"""Re-get scimoz every time it's needed.
This ensures scimoz will be properly proxied when calling off
the main thread."""
if not _xpcom_:
return self._get_scimoz_ref()
else:
return self._get_proxied_scimoz_ref()
class AccessorCache:
"""Utility class used to cache buffer styling information"""
def __init__(self, accessor, position, fetchsize=20, debug=False):
"""Document accessor cache contructor. Will cache fetchsize style info
pieces starting from position - 1.
@param accessor {Accessor} a form of document accessor
@param position {int} where in the document to start caching from (exclusive)
@param fetchsize {int} how much cache is stored/retrived at a time
"""
self._accessor = accessor
self._cachefetchsize = fetchsize
self._debug = debug
# self._debug = True
self._reset(position)
# Private
def _reset(self, position):
self._pos = position
self._ch = None
self._style = None
# cachePos is used to store where self._pos is inside the _cache
self._cachePos = 0
self._chCache = []
self._styleCache = []
# cacheXXXBufPos is used to store where cache is relative to the buffer
# _cacheFirstBufPos is inclusive
self._cacheFirstBufPos = position
# _cacheLastBufPos is exclusive
self._cacheLastBufPos = position
def _extendCacheBackwards(self, byAmount=None):
if self._cacheFirstBufPos > 0:
if byAmount is None:
byAmount = self._cachefetchsize
# Generate another n tuples (pos, char, style)
start = max(0, (self._cacheFirstBufPos - byAmount))
# Add more to the start of the cache
extendCount = (self._cacheFirstBufPos - start)
ch_list = []
style_list = []
for ch, style in self._accessor.gen_char_and_style(start, self._cacheFirstBufPos):
ch_list.append(ch)
style_list.append(style)
self._chCache = ch_list + self._chCache
self._styleCache = style_list + self._styleCache
self._cachePos += extendCount
self._cacheFirstBufPos = start
if self._debug:
print("Extended cache by %d, _cachePos: %d, len now: %d" % (
extendCount, self._cachePos, len(self._chCache)))
print("Ch cache now: %r" % (self._chCache))
else:
raise IndexError("No buffer left to examine")
def _extendCacheForwards(self, byAmount=None):
buf_length = self._accessor.length()
if self._cacheLastBufPos < buf_length:
if byAmount is None:
byAmount = self._cachefetchsize
# Generate another n tuples (pos, char, style)
end = min(buf_length, (self._cacheLastBufPos + byAmount))
# Add more to the end of the cache
extendCount = end - self._cacheLastBufPos
for ch, style in self._accessor.gen_char_and_style(self._cacheLastBufPos, end):
self._chCache.append(ch)
self._styleCache.append(style)
self._cacheLastBufPos = end
if self._debug:
print("Extended cache by %d, _cachePos: %d, len now: %d" % (
extendCount, self._cachePos, len(self._chCache)))
print("Ch cache now: %r" % (self._chCache))
else:
raise IndexError("No buffer left to examine")
# Public
def dump(self, limit=20):
if len(self._chCache) > 0:
print(" pos: %r, ch: %r, style: %r, cachePos: %r, cache len: %d\n cache: %r" % (self._cachePos + self._cacheFirstBufPos,
self._chCache[
self._cachePos],
self._styleCache[
self._cachePos],
self._cachePos,
len(self._chCache),
self._chCache))
else:
print("New cache: %r" % (self._chCache[-limit:]))
def setCacheFetchSize(self, size):
self._cachefetchsize = size
def resetToPosition(self, position):
if self._debug:
print("resetToPosition: %d" % (position))
print("self._cacheFirstBufPos: %d" % (self._cacheFirstBufPos))
print("self._cacheLastBufPos: %d" % (self._cacheLastBufPos))
if position >= self._cacheLastBufPos:
if position >= self._cacheLastBufPos + self._cachefetchsize:
# Clear everything
self._reset(position)
return
else:
# Just extend forwards
if self._debug:
print("resetToPosition: extending cache forwards")
self._extendCacheForwards()
elif position < self._cacheFirstBufPos:
if position < self._cacheFirstBufPos - self._cachefetchsize:
# Clear everything
self._reset(position)
return
else:
# Just extend back
if self._debug:
print("resetToPosition: extending cache backwards")
self._extendCacheBackwards()
else:
# It's in the current cache area, we keep that then
pass
self._cachePos = position - self._cacheFirstBufPos
self._ch = self._chCache[self._cachePos]
self._style = self._styleCache[self._cachePos]
self._pos = position
if self._debug:
print("self._cachePos: %d, cacheLen: %d" % (self._cachePos, len(self._chCache)))
print("resetToPosition: p: %r, ch: %r, st: %r" % (self._pos, self._ch, self._style))
# def pushBack(self, numPushed=1):
# """Push back the items that were recetly popped off.
# @returns {int} Number of pushed items
# """
# pushItems = self._popped[-numPushed:]
# pushItems.reverse()
# self._cache += pushItems
# if len(self._popped) > 0:
# self._currentTuple = self._popped[-1]
# else:
# self._currentTuple = (self._currentTuple[0] + numPushed, None, None)
# return len(pushItems)
def getCurrentPosCharStyle(self):
"""Get the current buffer position information.
@returns {tuple} with values (pos, char, style)
"""
return (self._pos, self._ch, self._style)
def getPrevPosCharStyle(self, ignore_styles=None, max_look_back=100):
"""Get the previous buffer position information.
@param ignore_styles {tuple}
@returns {tuple} with values (pos, char, style), these values will
all be None if it exceeds the max_look_back.
@raises IndexError can be raised when nothing left to consume.
"""
count = 0
while count < max_look_back:
count += 1
self._cachePos -= 1
if self._cachePos < 0:
self._extendCacheBackwards()
self._style = self._styleCache[self._cachePos]
if ignore_styles is None or self._style not in ignore_styles:
self._ch = self._chCache[self._cachePos]
break
else:
# Went too far without finding what looking for
return (None, None, None)
self._pos = self._cachePos + self._cacheFirstBufPos
if self._debug:
print("getPrevPosCharStyle:: pos:%d ch:%r style:%d" % (self._pos, self._ch, self._style))
return (self._pos, self._ch, self._style)
def peekPrevPosCharStyle(self, ignore_styles=None, max_look_back=100):
"""Same as getPrevPosCharStyle, but does not move the buffer position.
@param ignore_styles {tuple}
@returns {tuple} with values (pos, char, style), these values will
all be None if it exceeds the max_look_back.
@raises IndexError can be raised when nothing left to consume.
"""
# Store the old values.
old_pos = self._pos
old_ch = self._ch
old_style = self._style
old_cachePos = self._cachePos
old_cacheFirstBufPos = self._cacheFirstBufPos
try:
pos, ch, style = self.getPrevPosCharStyle(
ignore_styles, max_look_back)
finally:
# Restore old values.
self._pos = old_pos
self._ch = old_ch
self._style = old_style
# The cache may have gotten extended (which is fine), but in that
# case the old_cachePos is no longer correct, so update it.
cache_extended_by = old_cacheFirstBufPos - self._cacheFirstBufPos
self._cachePos = old_cachePos + cache_extended_by
if self._debug:
print("peekPrevPosCharStyle:: pos:%d ch:%r style:%d" % (pos, ch, style))
return (pos, ch, style)
def getPrecedingPosCharStyle(self, current_style=None, ignore_styles=None,
max_look_back=200):
"""Go back and get the preceding style.
@returns {tuple} with values (pos, char, style)
Returns None for both char and style, when out of characters to look
at and there is still no previous style found.
"""
if current_style is None:
current_style = self._styleCache[self._cachePos]
try:
new_ignore_styles = [current_style]
if ignore_styles is not None:
new_ignore_styles += list(ignore_styles)
return self.getPrevPosCharStyle(new_ignore_styles, max_look_back)
except IndexError:
pass
# Did not find the necessary style
return None, None, None
def getTextBackWithStyle(self, current_style=None, ignore_styles=None,
max_text_len=200):
"""Go back and get the preceding text, which is of a different style.
@returns {tuple} with values (pos, text), pos is position of first text char
"""
old_p = self._pos
new_p, c, style = self.getPrecedingPosCharStyle(current_style,
ignore_styles,
max_look_back=max_text_len)
# print "Return %d:%d" % (new_p, old_p+1)
if style is None: # Ran out of text to look at
new_p = max(0, old_p - max_text_len)
return new_p, self.text_range(new_p, old_p+1)
else:
# We don't eat the new styling info
self._cachePos += 1
return new_p+1, self.text_range(new_p+1, old_p+1)
def getNextPosCharStyle(self, ignore_styles=None, max_look_ahead=100):
"""Get the next buffer position information.
@param ignore_styles {tuple}
@returns {tuple} with values (pos, char, style), these values will
all be None if it exceeds the max_look_ahead.
@raises IndexError can be raised when nothing left to consume.
"""
max_pos = self._cachePos + max_look_ahead
while self._cachePos < max_pos:
self._cachePos += 1
if self._cachePos >= len(self._chCache):
self._extendCacheForwards()
self._style = self._styleCache[self._cachePos]
if ignore_styles is None or self._style not in ignore_styles:
self._ch = self._chCache[self._cachePos]
break
else:
# Went too far without finding what looking for
return (None, None, None)
self._pos = self._cachePos + self._cacheFirstBufPos
if self._debug:
print("getNextPosCharStyle:: pos:%d ch:%r style:%d" % (self._pos, self._ch, self._style))
return (self._pos, self._ch, self._style)
def getSucceedingPosCharStyle(self, current_style=None, ignore_styles=None,
max_look_ahead=200):
"""Go forward and get the next different style.
@returns {tuple} with values (pos, char, style)
Returns None for both char and style, when out of characters to look
at and there is still no previous style found.
"""
if current_style is None:
current_style = self._styleCache[self._cachePos]
try:
new_ignore_styles = [current_style]
if ignore_styles is not None:
new_ignore_styles += list(ignore_styles)
return self.getNextPosCharStyle(new_ignore_styles, max_look_ahead)
except IndexError:
pass
# Did not find the necessary style
return None, None, None
def getTextForwardWithStyle(self, current_style=None, ignore_styles=None,
max_text_len=200):
"""Go forward and get the succeeding text, which is of a different style.
@returns {tuple} with values (pos, text), pos is position of last text char.
"""
old_p = self._pos
new_p, c, style = self.getSucceedingPosCharStyle(current_style,
ignore_styles,
max_look_ahead=max_text_len)
if style is None: # Ran out of text to look at
new_p = min(self._accessor.length(), old_p + max_text_len)
return new_p, self.text_range(old_p, new_p)
else:
# We don't eat the new styling info
self._cachePos -= 1
return new_p-1, self.text_range(old_p, new_p)
def text_range(self, start, end):
"""Return text in range buf[start:end]
Note: Start position is inclusive, end position is exclusive.
"""
if start >= self._cacheFirstBufPos and end <= self._cacheLastBufPos:
cstart = start - self._cacheFirstBufPos
cend = end - self._cacheFirstBufPos
if self._debug:
print("text_range:: cstart: %d, cend: %d" % (cstart, cend))
print("text_range:: start: %d, end %d" % (start, end))
print("text_range:: _cacheFirstBufPos: %d, _cacheLastBufPos: %d" % (self._cacheFirstBufPos, self._cacheLastBufPos))
# It's all in the cache
return "".join(self._chCache[cstart:cend])
if self._debug:
print("text_range:: using parent text_range: %r - %r" % (start, end))
return self._accessor.text_range(start, end)
# Test function
def _test():
class _TestAccessor(Accessor):
def __init__(self, content, styles):
self.content = content
self.style = styles
def length(self):
return len(self.content)
def char_at_pos(self, pos):
return self.content[pos]
def style_at_pos(self, pos):
return self.style[pos]
def gen_char_and_style_back(self, start, stop):
assert -1 <= stop <= start, "stop: %r, start: %r" % (stop, start)
for pos in range(start, stop, -1):
yield (self.char_at_pos(pos), self.style_at_pos(pos))
def gen_char_and_style(self, start, stop):
assert 0 <= start <= stop, "start: %r, stop: %r" % (start, stop)
for pos in range(start, stop):
yield (self.char_at_pos(pos), self.style_at_pos(pos))
def text_range(self, start, end):
return self.content[start:end]
content = "This is my test buffer\r\nSecond line\r\nThird line\r\n"
styles = "1111011011011110111111 2 21111110001111 2 21111101111 2 2".replace(
" ", "")
ta = _TestAccessor(content, list(map(int, styles)))
pos = len(content) - 2
ac = AccessorCache(ta, pos)
# ac._debug = True
for i in range(2):
assert(ac.getPrevPosCharStyle() == (pos-1, "e", 1))
assert(ac.getPrecedingPosCharStyle(1) == (pos-5, " ", 0))
assert(ac.getPrecedingPosCharStyle(0) == (pos-6, "d", 1))
assert(ac.getPrecedingPosCharStyle(1) == (pos-11, "\n", 2))
assert(ac.getPrecedingPosCharStyle() == (pos-13, "e", 1))
assert(ac.getTextBackWithStyle(1) == (pos-16, "line"))
assert(ac.getPrevPosCharStyle() == (pos-17, " ", 0))
assert(ac.getPrecedingPosCharStyle(0) == (pos-20, "d", 1))
if i == 0:
ac.resetToPosition(pos)
assert(ac.getCurrentPosCharStyle() == (pos-20, "d", 1))
# print pos
# print ac.getSucceedingPosCharStyle()
assert(ac.getNextPosCharStyle() == (pos-19, " ", 0))
assert(ac.getSucceedingPosCharStyle() == (pos-16, "l", 1))
assert(ac.getTextForwardWithStyle(1) == (pos-13, "line"))
assert(ac.getNextPosCharStyle() == (pos-12, "\r", 2))
assert(ac.getNextPosCharStyle() == (pos-11, "\n", 2))
assert(ac.getSucceedingPosCharStyle(2) == (pos-10, "T", 1))
assert(ac.getSucceedingPosCharStyle() == (pos-5, " ", 0))
assert(ac.getSucceedingPosCharStyle() == (pos-4, "l", 1))
assert(ac.getSucceedingPosCharStyle() == (pos, "\r", 2))
assert(ac.getNextPosCharStyle() == (pos+1, "\n", 2))
# Bug: http://bugs.activestate.com/show_bug.cgi?id=64227
# Ensure text_range uses correct parameters in boundary situations
ac.resetToPosition(3)
assert(ac.getTextBackWithStyle(1)[1] == "This")
ac.resetToPosition(len(content) - 2)
assert(ac.getTextForwardWithStyle(2)[1] == "\r\n")
# When run from command line
if __name__ == '__main__':
_test()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.