repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
shinyChen/browserscope | categories/richtext2/tests/queryValue.py | 16 | 16735 |
QUERYVALUE_TESTS = {
'id': 'QV',
'caption': 'queryCommandValue Tests',
'checkAttrs': False,
'checkStyle': False,
'styleWithCSS': False,
'Proposed': [
{ 'desc': '',
'tests': [
]
},
{ 'desc': '[HTML5] query bold value',
'qcvalue': 'bold',
'tests': [
{ 'id': 'B_TEXT_SI',
'desc': 'query the "bold" value',
'pad': 'foo[bar]baz',
'expected': 'false' },
{ 'id': 'B_B-1_SI',
'desc': 'query the "bold" value',
'pad': '<b>foo[bar]baz</b>',
'expected': 'true' },
{ 'id': 'B_STRONG-1_SI',
'desc': 'query the "bold" value',
'pad': '<strong>foo[bar]baz</strong>',
'expected': 'true' },
{ 'id': 'B_SPANs:fw:b-1_SI',
'desc': 'query the "bold" value',
'pad': '<span style="font-weight: bold">foo[bar]baz</span>',
'expected': 'true' },
{ 'id': 'B_SPANs:fw:n-1_SI',
'desc': 'query the "bold" value',
'pad': '<span style="font-weight: normal">foo[bar]baz</span>',
'expected': 'false' },
{ 'id': 'B_Bs:fw:n-1_SI',
'desc': 'query the "bold" value',
'pad': '<b><span style="font-weight: normal">foo[bar]baz</span></b>',
'expected': 'false' },
{ 'id': 'B_SPAN.b-1_SI',
'desc': 'query the "bold" value',
'pad': '<span class="b">foo[bar]baz</span>',
'expected': 'true' },
{ 'id': 'B_MYB-1-SI',
'desc': 'query the "bold" value',
'pad': '<myb>foo[bar]baz</myb>',
'expected': 'true' }
]
},
{ 'desc': '[HTML5] query italic value',
'qcvalue': 'italic',
'tests': [
{ 'id': 'I_TEXT_SI',
'desc': 'query the "bold" value',
'pad': 'foo[bar]baz',
'expected': 'false' },
{ 'id': 'I_I-1_SI',
'desc': 'query the "bold" value',
'pad': '<i>foo[bar]baz</i>',
'expected': 'true' },
{ 'id': 'I_EM-1_SI',
'desc': 'query the "bold" value',
'pad': '<em>foo[bar]baz</em>',
'expected': 'true' },
{ 'id': 'I_SPANs:fs:i-1_SI',
'desc': 'query the "bold" value',
'pad': '<span style="font-style: italic">foo[bar]baz</span>',
'expected': 'true' },
{ 'id': 'I_SPANs:fs:n-1_SI',
'desc': 'query the "bold" value',
'pad': '<span style="font-style: normal">foo[bar]baz</span>',
'expected': 'false' },
{ 'id': 'I_I-SPANs:fs:n-1_SI',
'desc': 'query the "bold" value',
'pad': '<i><span style="font-style: normal">foo[bar]baz</span></i>',
'expected': 'false' },
{ 'id': 'I_SPAN.i-1_SI',
'desc': 'query the "italic" value',
'pad': '<span class="i">foo[bar]baz</span>',
'expected': 'true' },
{ 'id': 'I_MYI-1-SI',
'desc': 'query the "italic" value',
'pad': '<myi>foo[bar]baz</myi>',
'expected': 'true' }
]
},
{ 'desc': '[HTML5] query block formatting value',
'qcvalue': 'formatblock',
'tests': [
{ 'id': 'FB_TEXT-1_SC',
'desc': 'query the "formatBlock" value',
'pad': 'foobar^baz',
'expected': '',
'accept': 'normal' },
{ 'id': 'FB_H1-1_SC',
'desc': 'query the "formatBlock" value',
'pad': '<h1>foobar^baz</h1>',
'expected': 'h1' },
{ 'id': 'FB_PRE-1_SC',
'desc': 'query the "formatBlock" value',
'pad': '<pre>foobar^baz</pre>',
'expected': 'pre' },
{ 'id': 'FB_BQ-1_SC',
'desc': 'query the "formatBlock" value',
'pad': '<blockquote>foobar^baz</blockquote>',
'expected': 'blockquote' },
{ 'id': 'FB_ADDRESS-1_SC',
'desc': 'query the "formatBlock" value',
'pad': '<address>foobar^baz</address>',
'expected': 'address' },
{ 'id': 'FB_H1-H2-1_SC',
'desc': 'query the "formatBlock" value',
'pad': '<h1>foo<h2>ba^r</h2>baz</h1>',
'expected': 'h2' },
{ 'id': 'FB_H1-H2-1_SL',
'desc': 'query the "formatBlock" value on oblique selection (outermost formatting expected)',
'pad': '<h1>fo[o<h2>ba]r</h2>baz</h1>',
'expected': 'h1' },
{ 'id': 'FB_H1-H2-1_SR',
'desc': 'query the "formatBlock" value on oblique selection (outermost formatting expected)',
'pad': '<h1>foo<h2>b[ar</h2>ba]z</h1>',
'expected': 'h1' },
{ 'id': 'FB_TEXT-ADDRESS-1_SL',
'desc': 'query the "formatBlock" value on oblique selection (outermost formatting expected)',
'pad': 'fo[o<ADDRESS>ba]r</ADDRESS>baz',
'expected': '',
'accept': 'normal' },
{ 'id': 'FB_TEXT-ADDRESS-1_SR',
'desc': 'query the "formatBlock" value on oblique selection (outermost formatting expected)',
'pad': 'foo<ADDRESS>b[ar</ADDRESS>ba]z',
'expected': '',
'accept': 'normal' },
{ 'id': 'FB_H1-H2.TEXT.H2-1_SM',
'desc': 'query the "formatBlock" value on oblique selection (outermost formatting expected)',
'pad': '<h1><h2>fo[o</h2>bar<h2>b]az</h2></h1>',
'expected': 'h1' }
]
},
{ 'desc': '[MIDAS] query heading type',
'qcvalue': 'heading',
'tests': [
{ 'id': 'H_H1-1_SC',
'desc': 'query the "heading" type',
'pad': '<h1>foobar^baz</h1>',
'expected': 'h1',
'accept': '<h1>' },
{ 'id': 'H_H3-1_SC',
'desc': 'query the "heading" type',
'pad': '<h3>foobar^baz</h3>',
'expected': 'h3',
'accept': '<h3>' },
{ 'id': 'H_H1-H2-H3-H4-1_SC',
'desc': 'query the "heading" type within nested heading tags',
'pad': '<h1><h2><h3><h4>foobar^baz</h4></h3></h2></h1>',
'expected': 'h4',
'accept': '<h4>' },
{ 'id': 'H_P-1_SC',
'desc': 'query the "heading" type outside of a heading',
'pad': '<p>foobar^baz</p>',
'expected': '' }
]
},
{ 'desc': '[MIDAS] query font name',
'qcvalue': 'fontname',
'tests': [
{ 'id': 'FN_FONTf:a-1_SI',
'rte1-id': 'q-fontname-0',
'desc': 'query the "fontname" value',
'pad': '<font face="arial">foo[bar]baz</font>',
'expected': 'arial' },
{ 'id': 'FN_SPANs:ff:a-1_SI',
'rte1-id': 'q-fontname-1',
'desc': 'query the "fontname" value',
'pad': '<span style="font-family: arial">foo[bar]baz</span>',
'expected': 'arial' },
{ 'id': 'FN_FONTf:a.s:ff:c-1_SI',
'rte1-id': 'q-fontname-2',
'desc': 'query the "fontname" value',
'pad': '<font face="arial" style="font-family: courier">foo[bar]baz</font>',
'expected': 'courier' },
{ 'id': 'FN_FONTf:a-FONTf:c-1_SI',
'rte1-id': 'q-fontname-3',
'desc': 'query the "fontname" value',
'pad': '<font face="arial"><font face="courier">foo[bar]baz</font></font>',
'expected': 'courier' },
{ 'id': 'FN_SPANs:ff:c-FONTf:a-1_SI',
'rte1-id': 'q-fontname-4',
'desc': 'query the "fontname" value',
'pad': '<span style="font-family: courier"><font face="arial">foo[bar]baz</font></span>',
'expected': 'arial' },
{ 'id': 'FN_SPAN.fs18px-1_SI',
'desc': 'query the "fontname" value',
'pad': '<span class="courier">foo[bar]baz</span>',
'expected': 'courier' },
{ 'id': 'FN_MYCOURIER-1-SI',
'desc': 'query the "fontname" value',
'pad': '<mycourier>foo[bar]baz</mycourier>',
'expected': 'courier' }
]
},
{ 'desc': '[MIDAS] query font size',
'qcvalue': 'fontsize',
'tests': [
{ 'id': 'FS_FONTsz:4-1_SI',
'rte1-id': 'q-fontsize-0',
'desc': 'query the "fontsize" value',
'pad': '<font size=4>foo[bar]baz</font>',
'expected': '18px' },
{ 'id': 'FS_FONTs:fs:l-1_SI',
'desc': 'query the "fontsize" value',
'pad': '<font style="font-size: large">foo[bar]baz</font>',
'expected': '18px' },
{ 'id': 'FS_FONT.ass.s:fs:l-1_SI',
'rte1-id': 'q-fontsize-1',
'desc': 'query the "fontsize" value',
'pad': '<font class="Apple-style-span" style="font-size: large">foo[bar]baz</font>',
'expected': '18px' },
{ 'id': 'FS_FONTsz:1.s:fs:xl-1_SI',
'rte1-id': 'q-fontsize-2',
'desc': 'query the "fontsize" value',
'pad': '<font size=1 style="font-size: x-large">foo[bar]baz</font>',
'expected': '24px' },
{ 'id': 'FS_SPAN.large-1_SI',
'desc': 'query the "fontsize" value',
'pad': '<span class="large">foo[bar]baz</span>',
'expected': 'large' },
{ 'id': 'FS_SPAN.fs18px-1_SI',
'desc': 'query the "fontsize" value',
'pad': '<span class="fs18px">foo[bar]baz</span>',
'expected': '18px' },
{ 'id': 'FA_MYLARGE-1-SI',
'desc': 'query the "fontsize" value',
'pad': '<mylarge>foo[bar]baz</mylarge>',
'expected': 'large' },
{ 'id': 'FA_MYFS18PX-1-SI',
'desc': 'query the "fontsize" value',
'pad': '<myfs18px>foo[bar]baz</myfs18px>',
'expected': '18px' }
]
},
{ 'desc': '[MIDAS] query background color',
'qcvalue': 'backcolor',
'tests': [
{ 'id': 'BC_FONTs:bc:fca-1_SI',
'rte1-id': 'q-backcolor-0',
'desc': 'query the "backcolor" value',
'pad': '<font style="background-color: #ffccaa">foo[bar]baz</font>',
'expected': '#ffccaa' },
{ 'id': 'BC_SPANs:bc:abc-1_SI',
'rte1-id': 'q-backcolor-2',
'desc': 'query the "backcolor" value',
'pad': '<span style="background-color: #aabbcc">foo[bar]baz</span>',
'expected': '#aabbcc' },
{ 'id': 'BC_FONTs:bc:084-SPAN-1_SI',
'desc': 'query the "backcolor" value, where the color was set on an ancestor',
'pad': '<font style="background-color: #008844"><span>foo[bar]baz</span></font>',
'expected': '#008844' },
{ 'id': 'BC_SPANs:bc:cde-SPAN-1_SI',
'desc': 'query the "backcolor" value, where the color was set on an ancestor',
'pad': '<span style="background-color: #ccddee"><span>foo[bar]baz</span></span>',
'expected': '#ccddee' },
{ 'id': 'BC_SPAN.ass.s:bc:rgb-1_SI',
'rte1-id': 'q-backcolor-1',
'desc': 'query the "backcolor" value',
'pad': '<span class="Apple-style-span" style="background-color: rgb(255, 0, 0)">foo[bar]baz</span>',
'expected': '#ff0000' },
{ 'id': 'BC_SPAN.bcred-1_SI',
'desc': 'query the "backcolor" value',
'pad': '<span class="bcred">foo[bar]baz</span>',
'expected': 'red' },
{ 'id': 'BC_MYBCRED-1-SI',
'desc': 'query the "backcolor" value',
'pad': '<mybcred>foo[bar]baz</mybcred>',
'expected': 'red' }
]
},
{ 'desc': '[MIDAS] query text color',
'qcvalue': 'forecolor',
'tests': [
{ 'id': 'FC_FONTc:f00-1_SI',
'rte1-id': 'q-forecolor-0',
'desc': 'query the "forecolor" value',
'pad': '<font color="#ff0000">foo[bar]baz</font>',
'expected': '#ff0000' },
{ 'id': 'FC_SPANs:c:0f0-1_SI',
'rte1-id': 'q-forecolor-1',
'desc': 'query the "forecolor" value',
'pad': '<span style="color: #00ff00">foo[bar]baz</span>',
'expected': '#00ff00' },
{ 'id': 'FC_FONTc:333.s:c:999-1_SI',
'rte1-id': 'q-forecolor-2',
'desc': 'query the "forecolor" value',
'pad': '<font color="#333333" style="color: #999999">foo[bar]baz</font>',
'expected': '#999999' },
{ 'id': 'FC_FONTc:641-SPAN-1_SI',
'desc': 'query the "forecolor" value, where the color was set on an ancestor',
'pad': '<font color="#664411"><span>foo[bar]baz</span></font>',
'expected': '#664411' },
{ 'id': 'FC_SPANs:c:d95-SPAN-1_SI',
'desc': 'query the "forecolor" value, where the color was set on an ancestor',
'pad': '<span style="color: #dd9955"><span>foo[bar]baz</span></span>',
'expected': '#dd9955' },
{ 'id': 'FC_SPAN.red-1_SI',
'desc': 'query the "forecolor" value',
'pad': '<span class="red">foo[bar]baz</span>',
'expected': 'red' },
{ 'id': 'FC_MYRED-1-SI',
'desc': 'query the "forecolor" value',
'pad': '<myred>foo[bar]baz</myred>',
'expected': 'red' }
]
},
{ 'desc': '[MIDAS] query hilight color (same as background color)',
'qcvalue': 'hilitecolor',
'tests': [
{ 'id': 'HC_FONTs:bc:fc0-1_SI',
'rte1-id': 'q-hilitecolor-0',
'desc': 'query the "hilitecolor" value',
'pad': '<font style="background-color: #ffcc00">foo[bar]baz</font>',
'expected': '#ffcc00' },
{ 'id': 'HC_SPANs:bc:a0c-1_SI',
'rte1-id': 'q-hilitecolor-2',
'desc': 'query the "hilitecolor" value',
'pad': '<span style="background-color: #aa00cc">foo[bar]baz</span>',
'expected': '#aa00cc' },
{ 'id': 'HC_SPAN.ass.s:bc:rgb-1_SI',
'rte1-id': 'q-hilitecolor-1',
'desc': 'query the "hilitecolor" value',
'pad': '<span class="Apple-style-span" style="background-color: rgb(255, 0, 0)">foo[bar]baz</span>',
'expected': '#ff0000' },
{ 'id': 'HC_FONTs:bc:83e-SPAN-1_SI',
'desc': 'query the "hilitecolor" value, where the color was set on an ancestor',
'pad': '<font style="background-color: #8833ee"><span>foo[bar]baz</span></font>',
'expected': '#8833ee' },
{ 'id': 'HC_SPANs:bc:b12-SPAN-1_SI',
'desc': 'query the "hilitecolor" value, where the color was set on an ancestor',
'pad': '<span style="background-color: #bb1122"><span>foo[bar]baz</span></span>',
'expected': '#bb1122' },
{ 'id': 'HC_SPAN.bcred-1_SI',
'desc': 'query the "hilitecolor" value',
'pad': '<span class="bcred">foo[bar]baz</span>',
'expected': 'red' },
{ 'id': 'HC_MYBCRED-1-SI',
'desc': 'query the "hilitecolor" value',
'pad': '<mybcred>foo[bar]baz</mybcred>',
'expected': 'red' }
]
}
]
}
QUERYVALUE_TESTS_CSS = {
'id': 'QVC',
'caption': 'queryCommandValue Tests, using styleWithCSS',
'checkAttrs': False,
'checkStyle': False,
'styleWithCSS': True,
'Proposed': QUERYVALUE_TESTS['Proposed']
}
| apache-2.0 |
aristanetworks/neutron | neutron/tests/unit/plugins/ml2/test_extension_driver_api.py | 11 | 10642 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from neutron import context
from neutron import manager
from neutron.plugins.ml2 import config
from neutron.tests.unit.plugins.ml2.drivers import ext_test
from neutron.tests.unit.plugins.ml2 import test_plugin
class ExtensionDriverTestCase(test_plugin.Ml2PluginV2TestCase):
_extension_drivers = ['test']
def setUp(self):
config.cfg.CONF.set_override('extension_drivers',
self._extension_drivers,
group='ml2')
super(ExtensionDriverTestCase, self).setUp()
self._plugin = manager.NeutronManager.get_plugin()
self._ctxt = context.get_admin_context()
def test_network_attr(self):
with self.network() as network:
# Test create network
ent = network['network'].get('network_extension')
self.assertIsNotNone(ent)
# Test list networks
res = self._list('networks')
val = res['networks'][0].get('network_extension')
self.assertEqual('Test_Network_Extension_extend', val)
# Test network update
data = {'network':
{'network_extension': 'Test_Network_Extension_Update'}}
res = self._update('networks', network['network']['id'], data)
val = res['network'].get('network_extension')
self.assertEqual('Test_Network_Extension_Update_update', val)
def test_subnet_attr(self):
with self.subnet() as subnet:
# Test create subnet
ent = subnet['subnet'].get('subnet_extension')
self.assertIsNotNone(ent)
# Test list subnets
res = self._list('subnets')
val = res['subnets'][0].get('subnet_extension')
self.assertEqual('Test_Subnet_Extension_extend', val)
# Test subnet update
data = {'subnet':
{'subnet_extension': 'Test_Subnet_Extension_Update'}}
res = self._update('subnets', subnet['subnet']['id'], data)
val = res['subnet'].get('subnet_extension')
self.assertEqual('Test_Subnet_Extension_Update_update', val)
def test_port_attr(self):
with self.port() as port:
# Test create port
ent = port['port'].get('port_extension')
self.assertIsNotNone(ent)
# Test list ports
res = self._list('ports')
val = res['ports'][0].get('port_extension')
self.assertEqual('Test_Port_Extension_extend', val)
# Test port update
data = {'port': {'port_extension': 'Test_Port_Extension_Update'}}
res = self._update('ports', port['port']['id'], data)
val = res['port'].get('port_extension')
self.assertEqual('Test_Port_Extension_Update_update', val)
def test_extend_network_dict(self):
with contextlib.nested(
mock.patch.object(ext_test.TestExtensionDriver,
'process_update_network'),
mock.patch.object(ext_test.TestExtensionDriver,
'extend_network_dict'),
self.network()
) as (ext_update_net, ext_net_dict, network):
net_id = network['network']['id']
net_data = {'network': {'id': net_id}}
self._plugin.update_network(self._ctxt, net_id, net_data)
self.assertTrue(ext_update_net.called)
self.assertTrue(ext_net_dict.called)
def test_extend_subnet_dict(self):
with contextlib.nested(
mock.patch.object(ext_test.TestExtensionDriver,
'process_update_subnet'),
mock.patch.object(ext_test.TestExtensionDriver,
'extend_subnet_dict'),
self.subnet()
) as (ext_update_subnet, ext_subnet_dict, subnet):
subnet_id = subnet['subnet']['id']
subnet_data = {'subnet': {'id': subnet_id}}
self._plugin.update_subnet(self._ctxt, subnet_id, subnet_data)
self.assertTrue(ext_update_subnet.called)
self.assertTrue(ext_subnet_dict.called)
def test_extend_port_dict(self):
with contextlib.nested(
mock.patch.object(ext_test.TestExtensionDriver,
'process_update_port'),
mock.patch.object(ext_test.TestExtensionDriver,
'extend_port_dict'),
self.port()
) as (ext_update_port, ext_port_dict, port):
port_id = port['port']['id']
port_data = {'port': {'id': port_id}}
self._plugin.update_port(self._ctxt, port_id, port_data)
self.assertTrue(ext_update_port.called)
self.assertTrue(ext_port_dict.called)
class DBExtensionDriverTestCase(test_plugin.Ml2PluginV2TestCase):
_extension_drivers = ['testdb']
def setUp(self):
config.cfg.CONF.set_override('extension_drivers',
self._extension_drivers,
group='ml2')
super(DBExtensionDriverTestCase, self).setUp()
self._plugin = manager.NeutronManager.get_plugin()
self._ctxt = context.get_admin_context()
def test_network_attr(self):
with self.network() as network:
# Test create with default value.
net_id = network['network']['id']
val = network['network']['network_extension']
self.assertEqual("", val)
res = self._show('networks', net_id)
val = res['network']['network_extension']
self.assertEqual("", val)
# Test list.
res = self._list('networks')
val = res['networks'][0]['network_extension']
self.assertEqual("", val)
# Test create with explict value.
res = self._create_network(self.fmt,
'test-network', True,
arg_list=('network_extension', ),
network_extension="abc")
network = self.deserialize(self.fmt, res)
net_id = network['network']['id']
val = network['network']['network_extension']
self.assertEqual("abc", val)
res = self._show('networks', net_id)
val = res['network']['network_extension']
self.assertEqual("abc", val)
# Test update.
data = {'network': {'network_extension': "def"}}
res = self._update('networks', net_id, data)
val = res['network']['network_extension']
self.assertEqual("def", val)
res = self._show('networks', net_id)
val = res['network']['network_extension']
self.assertEqual("def", val)
def test_subnet_attr(self):
with self.subnet() as subnet:
# Test create with default value.
net_id = subnet['subnet']['id']
val = subnet['subnet']['subnet_extension']
self.assertEqual("", val)
res = self._show('subnets', net_id)
val = res['subnet']['subnet_extension']
self.assertEqual("", val)
# Test list.
res = self._list('subnets')
val = res['subnets'][0]['subnet_extension']
self.assertEqual("", val)
with self.network() as network:
# Test create with explict value.
data = {'subnet':
{'network_id': network['network']['id'],
'cidr': '10.1.0.0/24',
'ip_version': '4',
'tenant_id': self._tenant_id,
'subnet_extension': 'abc'}}
req = self.new_create_request('subnets', data, self.fmt)
res = req.get_response(self.api)
subnet = self.deserialize(self.fmt, res)
subnet_id = subnet['subnet']['id']
val = subnet['subnet']['subnet_extension']
self.assertEqual("abc", val)
res = self._show('subnets', subnet_id)
val = res['subnet']['subnet_extension']
self.assertEqual("abc", val)
# Test update.
data = {'subnet': {'subnet_extension': "def"}}
res = self._update('subnets', subnet_id, data)
val = res['subnet']['subnet_extension']
self.assertEqual("def", val)
res = self._show('subnets', subnet_id)
val = res['subnet']['subnet_extension']
self.assertEqual("def", val)
def test_port_attr(self):
with self.port() as port:
# Test create with default value.
net_id = port['port']['id']
val = port['port']['port_extension']
self.assertEqual("", val)
res = self._show('ports', net_id)
val = res['port']['port_extension']
self.assertEqual("", val)
# Test list.
res = self._list('ports')
val = res['ports'][0]['port_extension']
self.assertEqual("", val)
with self.network() as network:
# Test create with explict value.
res = self._create_port(self.fmt,
network['network']['id'],
arg_list=('port_extension', ),
port_extension="abc")
port = self.deserialize(self.fmt, res)
port_id = port['port']['id']
val = port['port']['port_extension']
self.assertEqual("abc", val)
res = self._show('ports', port_id)
val = res['port']['port_extension']
self.assertEqual("abc", val)
# Test update.
data = {'port': {'port_extension': "def"}}
res = self._update('ports', port_id, data)
val = res['port']['port_extension']
self.assertEqual("def", val)
res = self._show('ports', port_id)
val = res['port']['port_extension']
self.assertEqual("def", val)
| apache-2.0 |
laurimyllari/xbmc | lib/gtest/test/gtest_throw_on_failure_test.py | 2917 | 5766 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's throw-on-failure mode with exceptions disabled.
This script invokes gtest_throw_on_failure_test_ (a program written with
Google Test) with different environments and command line flags.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Constants.
# The command line flag for enabling/disabling the throw-on-failure mode.
THROW_ON_FAILURE = 'gtest_throw_on_failure'
# Path to the gtest_throw_on_failure_test_ program, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_throw_on_failure_test_')
# Utilities.
def SetEnvVar(env_var, value):
"""Sets an environment variable to a given value; unsets it when the
given value is None.
"""
env_var = env_var.upper()
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def Run(command):
"""Runs a command; returns True/False if its exit code is/isn't 0."""
print 'Running "%s". . .' % ' '.join(command)
p = gtest_test_utils.Subprocess(command)
return p.exited and p.exit_code == 0
# The tests. TODO(wan@google.com): refactor the class to share common
# logic with code in gtest_break_on_failure_unittest.py.
class ThrowOnFailureTest(gtest_test_utils.TestCase):
"""Tests the throw-on-failure mode."""
def RunAndVerify(self, env_var_value, flag_value, should_fail):
"""Runs gtest_throw_on_failure_test_ and verifies that it does
(or does not) exit with a non-zero code.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
variable; None if the variable should be unset.
flag_value: value of the --gtest_break_on_failure flag;
None if the flag should not be present.
should_fail: True iff the program is expected to fail.
"""
SetEnvVar(THROW_ON_FAILURE, env_var_value)
if env_var_value is None:
env_var_value_msg = ' is not set'
else:
env_var_value_msg = '=' + env_var_value
if flag_value is None:
flag = ''
elif flag_value == '0':
flag = '--%s=0' % THROW_ON_FAILURE
else:
flag = '--%s' % THROW_ON_FAILURE
command = [EXE_PATH]
if flag:
command.append(flag)
if should_fail:
should_or_not = 'should'
else:
should_or_not = 'should not'
failed = not Run(command)
SetEnvVar(THROW_ON_FAILURE, None)
msg = ('when %s%s, an assertion failure in "%s" %s cause a non-zero '
'exit code.' %
(THROW_ON_FAILURE, env_var_value_msg, ' '.join(command),
should_or_not))
self.assert_(failed == should_fail, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(env_var_value=None, flag_value=None, should_fail=False)
def testThrowOnFailureEnvVar(self):
"""Tests using the GTEST_THROW_ON_FAILURE environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value=None,
should_fail=False)
self.RunAndVerify(env_var_value='1',
flag_value=None,
should_fail=True)
def testThrowOnFailureFlag(self):
"""Tests using the --gtest_throw_on_failure flag."""
self.RunAndVerify(env_var_value=None,
flag_value='0',
should_fail=False)
self.RunAndVerify(env_var_value=None,
flag_value='1',
should_fail=True)
def testThrowOnFailureFlagOverridesEnvVar(self):
"""Tests that --gtest_throw_on_failure overrides GTEST_THROW_ON_FAILURE."""
self.RunAndVerify(env_var_value='0',
flag_value='0',
should_fail=False)
self.RunAndVerify(env_var_value='0',
flag_value='1',
should_fail=True)
self.RunAndVerify(env_var_value='1',
flag_value='0',
should_fail=False)
self.RunAndVerify(env_var_value='1',
flag_value='1',
should_fail=True)
if __name__ == '__main__':
gtest_test_utils.Main()
| gpl-2.0 |
ryfeus/lambda-packs | Keras_tensorflow/source/numpy/fft/tests/test_fftpack.py | 134 | 6052 | from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.random import random
from numpy.testing import TestCase, run_module_suite, assert_array_almost_equal
from numpy.testing import assert_array_equal
import threading
import sys
if sys.version_info[0] >= 3:
import queue
else:
import Queue as queue
def fft1(x):
L = len(x)
phase = -2j*np.pi*(np.arange(L)/float(L))
phase = np.arange(L).reshape(-1, 1) * phase
return np.sum(x*np.exp(phase), axis=1)
class TestFFTShift(TestCase):
def test_fft_n(self):
self.assertRaises(ValueError, np.fft.fft, [1, 2, 3], 0)
class TestFFT1D(TestCase):
def test_fft(self):
x = random(30) + 1j*random(30)
assert_array_almost_equal(fft1(x), np.fft.fft(x))
assert_array_almost_equal(fft1(x) / np.sqrt(30),
np.fft.fft(x, norm="ortho"))
def test_ifft(self):
x = random(30) + 1j*random(30)
assert_array_almost_equal(x, np.fft.ifft(np.fft.fft(x)))
assert_array_almost_equal(
x, np.fft.ifft(np.fft.fft(x, norm="ortho"), norm="ortho"))
def test_fft2(self):
x = random((30, 20)) + 1j*random((30, 20))
assert_array_almost_equal(np.fft.fft(np.fft.fft(x, axis=1), axis=0),
np.fft.fft2(x))
assert_array_almost_equal(np.fft.fft2(x) / np.sqrt(30 * 20),
np.fft.fft2(x, norm="ortho"))
def test_ifft2(self):
x = random((30, 20)) + 1j*random((30, 20))
assert_array_almost_equal(np.fft.ifft(np.fft.ifft(x, axis=1), axis=0),
np.fft.ifft2(x))
assert_array_almost_equal(np.fft.ifft2(x) * np.sqrt(30 * 20),
np.fft.ifft2(x, norm="ortho"))
def test_fftn(self):
x = random((30, 20, 10)) + 1j*random((30, 20, 10))
assert_array_almost_equal(
np.fft.fft(np.fft.fft(np.fft.fft(x, axis=2), axis=1), axis=0),
np.fft.fftn(x))
assert_array_almost_equal(np.fft.fftn(x) / np.sqrt(30 * 20 * 10),
np.fft.fftn(x, norm="ortho"))
def test_ifftn(self):
x = random((30, 20, 10)) + 1j*random((30, 20, 10))
assert_array_almost_equal(
np.fft.ifft(np.fft.ifft(np.fft.ifft(x, axis=2), axis=1), axis=0),
np.fft.ifftn(x))
assert_array_almost_equal(np.fft.ifftn(x) * np.sqrt(30 * 20 * 10),
np.fft.ifftn(x, norm="ortho"))
def test_rfft(self):
x = random(30)
assert_array_almost_equal(np.fft.fft(x)[:16], np.fft.rfft(x))
assert_array_almost_equal(np.fft.rfft(x) / np.sqrt(30),
np.fft.rfft(x, norm="ortho"))
def test_irfft(self):
x = random(30)
assert_array_almost_equal(x, np.fft.irfft(np.fft.rfft(x)))
assert_array_almost_equal(
x, np.fft.irfft(np.fft.rfft(x, norm="ortho"), norm="ortho"))
def test_rfft2(self):
x = random((30, 20))
assert_array_almost_equal(np.fft.fft2(x)[:, :11], np.fft.rfft2(x))
assert_array_almost_equal(np.fft.rfft2(x) / np.sqrt(30 * 20),
np.fft.rfft2(x, norm="ortho"))
def test_irfft2(self):
x = random((30, 20))
assert_array_almost_equal(x, np.fft.irfft2(np.fft.rfft2(x)))
assert_array_almost_equal(
x, np.fft.irfft2(np.fft.rfft2(x, norm="ortho"), norm="ortho"))
def test_rfftn(self):
x = random((30, 20, 10))
assert_array_almost_equal(np.fft.fftn(x)[:, :, :6], np.fft.rfftn(x))
assert_array_almost_equal(np.fft.rfftn(x) / np.sqrt(30 * 20 * 10),
np.fft.rfftn(x, norm="ortho"))
def test_irfftn(self):
x = random((30, 20, 10))
assert_array_almost_equal(x, np.fft.irfftn(np.fft.rfftn(x)))
assert_array_almost_equal(
x, np.fft.irfftn(np.fft.rfftn(x, norm="ortho"), norm="ortho"))
def test_hfft(self):
x = random(14) + 1j*random(14)
x_herm = np.concatenate((random(1), x, random(1)))
x = np.concatenate((x_herm, x[::-1].conj()))
assert_array_almost_equal(np.fft.fft(x), np.fft.hfft(x_herm))
assert_array_almost_equal(np.fft.hfft(x_herm) / np.sqrt(30),
np.fft.hfft(x_herm, norm="ortho"))
def test_ihttf(self):
x = random(14) + 1j*random(14)
x_herm = np.concatenate((random(1), x, random(1)))
x = np.concatenate((x_herm, x[::-1].conj()))
assert_array_almost_equal(x_herm, np.fft.ihfft(np.fft.hfft(x_herm)))
assert_array_almost_equal(
x_herm, np.fft.ihfft(np.fft.hfft(x_herm, norm="ortho"),
norm="ortho"))
class TestFFTThreadSafe(TestCase):
threads = 16
input_shape = (800, 200)
def _test_mtsame(self, func, *args):
def worker(args, q):
q.put(func(*args))
q = queue.Queue()
expected = func(*args)
# Spin off a bunch of threads to call the same function simultaneously
t = [threading.Thread(target=worker, args=(args, q))
for i in range(self.threads)]
[x.start() for x in t]
[x.join() for x in t]
# Make sure all threads returned the correct value
for i in range(self.threads):
assert_array_equal(q.get(timeout=5), expected,
'Function returned wrong value in multithreaded context')
def test_fft(self):
a = np.ones(self.input_shape) * 1+0j
self._test_mtsame(np.fft.fft, a)
def test_ifft(self):
a = np.ones(self.input_shape) * 1+0j
self._test_mtsame(np.fft.ifft, a)
def test_rfft(self):
a = np.ones(self.input_shape)
self._test_mtsame(np.fft.rfft, a)
def test_irfft(self):
a = np.ones(self.input_shape) * 1+0j
self._test_mtsame(np.fft.irfft, a)
if __name__ == "__main__":
run_module_suite()
| mit |
Xeralux/tensorflow | tensorflow/contrib/learn/python/learn/ops/losses_ops.py | 3 | 3366 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow Ops for loss computation (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework import deprecated
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops as array_ops_
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops.losses import losses
@deprecated('2016-12-01', 'Use `tf.losses.mean_squared_error` '
'and explicit logits computation.')
def mean_squared_error_regressor(tensor_in, labels, weights, biases, name=None):
"""Returns prediction and loss for mean squared error regression."""
with ops.name_scope(name, 'mean_squared_error_regressor',
[tensor_in, labels]):
predictions = nn.xw_plus_b(tensor_in, weights, biases)
if len(labels.get_shape()) == 1 and len(predictions.get_shape()) == 2:
predictions = array_ops_.squeeze(predictions, squeeze_dims=[1])
return predictions, losses.mean_squared_error(labels, predictions)
@deprecated('2016-12-01', 'Use `tf.losses.softmax_cross_entropy` '
'and explicit logits computation.')
def softmax_classifier(tensor_in,
labels,
weights,
biases,
class_weight=None,
name=None):
"""Returns prediction and loss for softmax classifier.
This function returns "probabilities" and a cross entropy loss. To obtain
predictions, use `tf.argmax` on the returned probabilities.
This function requires labels to be passed in one-hot encoding.
Args:
tensor_in: Input tensor, [batch_size, feature_size], features.
labels: Tensor, [batch_size, n_classes], one-hot labels of the output
classes.
weights: Tensor, [batch_size, feature_size], linear transformation
matrix.
biases: Tensor, [batch_size], biases.
class_weight: Tensor, optional, [n_classes], weight for each class.
If not given, all classes are supposed to have weight one.
name: Operation name.
Returns:
`tuple` of softmax predictions and loss `Tensor`s.
"""
with ops.name_scope(name, 'softmax_classifier', [tensor_in, labels]):
logits = nn.xw_plus_b(tensor_in, weights, biases)
if class_weight is not None:
logits = math_ops.multiply(logits, class_weight)
return nn.softmax(logits), losses.softmax_cross_entropy(labels, logits)
| apache-2.0 |
kaku289/paparazzi | sw/tools/airframe_editor/airframe_editor.py | 74 | 12033 | #!/usr/bin/env python
from __future__ import print_function
import pygtk
import gtk
pygtk.require('2.0')
from os import path
# Owm Modules
import gui_dialogs
import xml_airframe
import paparazzi
# Airframe File
airframe_file = path.join(paparazzi.airframes_dir, "examples/quadrotor_lisa_m_2_pwm_spektrum.xml")
class AirframeEditor:
# General Functions
def load_airframe_xml(self):
global airframe_file
self.tvcolumn.set_title(airframe_file.replace(paparazzi.airframes_dir, ""))
[e, self.xml, self.xml_header] = xml_airframe.load(airframe_file)
if e:
gui_dialogs.error_loading_xml(e.__str__())
raise e
xml_airframe.fill_tree(self.xml, self.treestore)
def update_combo(self, combo, c_list):
combo.set_sensitive(False)
combo.get_model().clear()
for i in c_list:
combo.append_text(i)
combo.set_active(0)
combo.set_sensitive(True)
# CallBack Functions
def find_firmwares(self, widget):
list_of_firmwares = paparazzi.get_list_of_firmwares()
self.update_combo(self.firmwares_combo, list_of_firmwares)
def find_modules(self, widget):
list_of_modules = paparazzi.get_list_of_modules()
self.update_combo(self.modules_combo, list_of_modules)
def find_subsystems(self, widget):
self.textbox.set_text(self.firmwares_combo.get_active_text())
list_of_subsystems = paparazzi.get_list_of_subsystems(self.firmwares_combo.get_active_text())
self.update_combo(self.subsystems_combo, list_of_subsystems)
def find_boards(self, widget):
list_of_boards = paparazzi.get_list_of_boards()
self.update_combo(self.boards_combo, list_of_boards)
def find_module_defines(self, widget):
mod = paparazzi.get_module_information(self.modules_combo.get_active_text())
print(mod.description)
txt = mod.description + "\n"
for d in mod.defines:
txt += "define: " + d[0].__str__() + " = " + d[1].__str__() + "; [" + d[2].__str__() + "] // " + d[3].__str__() + "\n"
for c in mod.configures:
txt += "configure: " + c[0].__str__() + " = " + c[1].__str__() + "; [" + c[2].__str__() + "] // " + c[3].__str__() + "\n"
self.text_box.set_text(txt)
self.gridstore.clear()
for d in mod.defines:
self.gridstore.append(["define", d[0], d[1], d[2], d[3]])
def reorganize_xml(self, widget):
self.xml = xml_airframe.reorganize_airframe_xml(self.xml)
xml_airframe.fill_tree(self.xml, self.treestore)
def about(self, widget):
gui_dialogs.about(paparazzi.home_dir)
def open(self, widget):
global airframe_file
filename = gui_dialogs.filechooser(paparazzi.airframes_dir)
if filename == "":
print("No file selected")
return
airframe_file = filename
self.load_airframe_xml()
def search(self, widget):
ret = paparazzi.search(self.textbox.get_text())
self.text_box.set_text(ret)
print(ret)
# Tree Callbacks
def select_section(self, widget):
#get data from highlighted selection
treeselection = self.datagrid.get_selection()
(model, row_iter) = treeselection.get_selected()
if row_iter is not None:
name_of_data = self.gridstore.get_value(row_iter, 1)
#print("Selected ",name_of_data)
self.textbox.set_text(name_of_data)
# xml_airframe.defines(self.treestore.get_value(row_iter, 1), self.gridstore)
def select(self, widget):
#get data from highlighted selection
treeselection = self.treeview.get_selection()
(model, row_iter) = treeselection.get_selected()
if row_iter is not None:
name_of_data = self.treestore.get_value(row_iter, 0)
#print("Selected ",name_of_data)
self.textbox.set_text(name_of_data)
xml_airframe.defines(self.treestore.get_value(row_iter, 1), self.gridstore)
# Constructor Functions
def fill_tree_from_airframe(self):
# create a TreeStore with one string column to use as the model
self.treestore = gtk.TreeStore(str, object)
# create the TreeView using treestore
self.treeview = gtk.TreeView(self.treestore)
# create the TreeViewColumn to display the data
self.tvcolumn = gtk.TreeViewColumn('')
# add self.tvcolumn to treeview
self.treeview.append_column(self.tvcolumn)
self.treeview.connect("cursor-changed", self.select)
self.cell = gtk.CellRendererText()
self.tvcolumn.pack_start(self.cell, True)
self.tvcolumn.add_attribute(self.cell, 'text', 0)
self.treeview.set_reorderable(True)
def fill_datagrid_from_section(self):
# create a TreeStore with one string column to use as the model
self.gridstore = gtk.ListStore(str, str, str, str, str)
self.datagrid = gtk.TreeView(self.gridstore)
self.type_column = gtk.TreeViewColumn('Type')
self.name_column = gtk.TreeViewColumn('Name')
self.value_column = gtk.TreeViewColumn('Value')
self.unit_column = gtk.TreeViewColumn('Unit')
self.desc_column = gtk.TreeViewColumn('Description')
self.datagrid.append_column(self.type_column)
self.datagrid.append_column(self.name_column)
self.datagrid.append_column(self.value_column)
self.datagrid.append_column(self.unit_column)
self.datagrid.append_column(self.desc_column)
self.datagrid.connect("cursor-changed", self.select_section)
self.type_cell = gtk.CellRendererText()
self.type_cell.Editable = False
self.name_cell = gtk.CellRendererText()
self.name_cell.Editable = False
self.value_cell = gtk.CellRendererText()
self.value_cell.Editable = True
self.value_cell.set_property("editable", True)
self.unit_cell = gtk.CellRendererText()
self.unit_cell.Editable = False
self.desc_cell = gtk.CellRendererText()
self.desc_cell.Editable = False
self.type_column.pack_start(self.type_cell, True)
self.type_column.add_attribute(self.type_cell, 'text', 0)
self.name_column.pack_start(self.name_cell, True)
self.name_column.add_attribute(self.name_cell, 'text', 1)
self.value_column.pack_start(self.value_cell, True)
self.value_column.add_attribute(self.value_cell, 'text', 2)
self.unit_column.pack_start(self.unit_cell, True)
self.unit_column.add_attribute(self.unit_cell, 'text', 3)
self.desc_column.pack_start(self.desc_cell, True)
self.desc_column.add_attribute(self.desc_cell, 'text', 4)
self.datagrid.set_search_column(1)
self.name_column.set_sort_column_id(0)
self.datagrid.set_reorderable(True)
def destroy(self, widget, data=None):
gtk.main_quit()
def __init__(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_title("Paparazzi Airframe File Editor")
self.my_vbox = gtk.VBox()
# MenuBar
mb = gtk.MenuBar()
# File
filemenu = gtk.Menu()
# File Title
filem = gtk.MenuItem("File")
filem.set_submenu(filemenu)
openm = gtk.MenuItem("Open")
openm.connect("activate", self.open)
filemenu.append(openm)
exitm = gtk.MenuItem("Exit")
exitm.connect("activate", gtk.main_quit)
filemenu.append(exitm)
mb.append(filem)
# Help
helpmenu = gtk.Menu()
# Help Title
helpm = gtk.MenuItem("Help")
helpm.set_submenu(helpmenu)
aboutm = gtk.MenuItem("About")
aboutm.connect("activate", self.about)
helpmenu.append(aboutm)
mb.append(helpm)
self.my_vbox.pack_start(mb, False)
##### Buttons
self.btnExit = gtk.Button("Exit")
self.btnExit.connect("clicked", self.destroy)
self.btnExit.set_tooltip_text("Close application")
self.btnOpen = gtk.Button("Open")
self.btnOpen.connect("clicked", self.open)
self.btnRun = gtk.Button("Reorganize XML")
self.btnRun.connect("clicked", self.reorganize_xml)
self.btnFirmwares = gtk.Button("Firmwares")
self.btnFirmwares.connect("clicked", self.find_firmwares)
self.btnSubSystem = gtk.Button("SubSystems")
self.btnSubSystem.connect("clicked", self.find_subsystems)
self.btnModules = gtk.Button("Add Modules")
self.btnModules.connect("clicked", self.find_modules)
self.btnModuleDefines = gtk.Button("Define")
self.btnModuleDefines.connect("clicked", self.find_module_defines)
self.btnAbout = gtk.Button("About")
self.btnAbout.connect("clicked", self.about)
self.toolbar = gtk.HBox()
self.toolbar.pack_start(self.btnOpen)
self.toolbar.pack_start(self.btnRun)
self.toolbar.pack_start(self.btnAbout)
self.toolbar.pack_start(self.btnExit)
self.my_vbox.pack_start(self.toolbar, False)
self.firmwares_combo = gtk.combo_box_entry_new_text()
self.find_firmwares(self.firmwares_combo)
self.firmwares_combo.connect("changed", self.find_subsystems)
self.subsystems_combo = gtk.combo_box_entry_new_text()
self.boards_combo = gtk.combo_box_entry_new_text()
self.find_boards(self.boards_combo)
self.firmwarebar = gtk.HBox()
self.firmwarebar.pack_start(self.btnFirmwares)
self.firmwarebar.pack_start(self.btnSubSystem)
self.firmwarebar.pack_start(self.firmwares_combo)
self.firmwarebar.pack_start(self.boards_combo)
self.firmwarebar.pack_start(self.subsystems_combo)
self.modules_combo = gtk.combo_box_entry_new_text()
self.find_modules(self.modules_combo)
self.modules_combo.connect("changed", self.find_module_defines)
#self.modulebar = gtk.HBox()
self.firmwarebar.pack_start(self.btnModules)
self.firmwarebar.pack_start(self.btnModuleDefines)
self.firmwarebar.pack_start(self.modules_combo)
#self.my_vbox.pack_start(self.modulebar)
self.my_vbox.pack_start(self.firmwarebar, False)
##### Middle
self.editor = gtk.HBox()
self.fill_tree_from_airframe()
self.scrolltree = gtk.ScrolledWindow()
self.scrolltree.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.scrolltree.add(self.treeview)
self.scrolltree.set_size_request(400,600)
self.editor.pack_start(self.scrolltree)
self.fill_datagrid_from_section()
self.datagrid.set_size_request(900, 600)
self.editor.pack_start(self.datagrid)
self.my_vbox.pack_start(self.editor)
self.text_box = gtk.Label("")
self.text_box.set_size_request(600, 1000)
self.scrolltext = gtk.ScrolledWindow()
self.scrolltext.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.scrolltext.add_with_viewport(self.text_box)
self.scrolltext.set_size_request(400, 100)
self.my_vbox.pack_start(self.scrolltext)
self.load_airframe_xml()
##### Bottom
self.searchbar = gtk.HBox()
self.textbox = gtk.Entry()
#self.textbox.connect("changed",self.textchanged)
self.btnSearch = gtk.Button("Search...")
self.btnSearch.connect("clicked", self.search)
self.searchbar.pack_start(self.textbox)
self.searchbar.pack_start(self.btnSearch)
self.my_vbox.pack_start(self.searchbar, False)
self.window.add(self.my_vbox)
self.window.show_all()
self.window.connect("destroy", self.destroy)
def main(self):
gtk.main()
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
airframe_file = sys.argv[1]
gui = AirframeEditor()
gui.main()
| gpl-2.0 |
rlr/standup | standup/apps/api/views.py | 3 | 6498 | from flask import Blueprint, current_app, request
from sqlalchemy import desc
from standup import csrf, OrderedDict
from standup.apps.api.decorators import api_key_required
from standup.apps.status.models import Project, Status
from standup.apps.users.models import User
from standup.database import get_session
from standup.utils import slugify, jsonify
blueprint = Blueprint('api_v1', __name__, url_prefix='/api/v1')
@blueprint.route('/feed/', methods=['GET'])
def get_statuses():
"""Get all status updates.
Returns id, user (the name), project name and timestamp of statuses.
The amount of items to return is determined by the limit argument
(defaults to 20)::
/api/v1/feed/?limit=20
An example of the JSON::
{
"1": {
"user": "r1cky",
"content": "working on bug 123456",
"project": "sumodev",
"timestamp": "2013-01-11T21:13:30.806236"
}
}
"""
db = get_session(current_app)
limit = request.args.get('limit', 20)
statuses = db.query(Status).filter_by(reply_to=None)\
.order_by(desc(Status.created)).limit(limit)
data = OrderedDict()
for row in statuses:
id = row.id
created = row.created.isoformat()
if row.project is not None:
project_name = row.project.name
else:
project_name = None
data[id] = (dict(author=row.user.name, content=row.content,
timestamp=created, project=project_name))
return jsonify(data)
@blueprint.route('/status/', methods=['POST'])
@csrf.exempt
@api_key_required
def create_status():
"""Post a new status.
The status should be posted as JSON using 'application/json' as
the content type. The posted JSON needs to have 3 required fields:
* user (the username)
* content
* api_key
An example of the JSON::
{
"user": "r1cky",
"project": "sumodev",
"content": "working on bug 123456",
"api_key": "qwertyuiopasdfghjklzxcvbnm1234567890"
}
"""
db = get_session(current_app)
# The data we need
username = request.json.get('user')
project_slug = request.json.get('project')
content = request.json.get('content')
reply_to = request.json.get('reply_to')
# Validate we have the required fields.
if not (username and content):
return jsonify(dict(error='Missing required fields.')), 400
# If this is a reply make sure that the status being replied to
# exists and is not itself a reply
if reply_to:
replied = db.query(Status).filter_by(id=reply_to).first()
if not replied:
return jsonify(dict(error='Status does not exist.')), 400
elif replied.reply_to:
return jsonify(dict(error='Cannot reply to a reply.')), 400
else:
replied = None
# Get the user
user = db.query(User).filter_by(username=username).first()
if not user:
return jsonify(dict(error='User does not exist.')), 400
# Get or create the project (but not if this is a reply)
if project_slug and not replied:
# This forces the slug to be slug-like.
project_slug = slugify(project_slug)
project = db.query(Project).filter_by(slug=project_slug).first()
if not project:
project = Project(slug=project_slug, name=project_slug)
db.add(project)
db.commit()
# Create the status
status = Status(user_id=user.id, content=content, content_html=content)
if project_slug and project:
status.project_id = project.id
if replied:
status.reply_to_id = replied.id
db.add(status)
db.commit()
return jsonify(dict(id=status.id, content=content))
@blueprint.route('/status/<id>/', methods=['DELETE'])
@csrf.exempt
@api_key_required
def delete_status(id):
"""Delete an existing status
The status to be deleted should be posted as JSON using
'application/json as the content type. The posted JSON needs to
have 2 required fields:
* user (the username)
* api_key
An example of the JSON::
{
"user": "r1cky",
"api_key": "qwertyuiopasdfghjklzxcvbnm1234567890"
}
"""
db = get_session(current_app)
# The data we need
user = request.json.get('user')
if not (id and user):
return jsonify(dict(error='Missing required fields.')), 400
status = db.query(Status).filter_by(id=id)
if not status.count():
return jsonify(dict(error='Status does not exist.')), 400
if not status[0].user.username == user:
return jsonify(dict(error='You cannot delete this status.')), 403
status.delete()
db.commit()
return jsonify(dict(id=id))
@blueprint.route('/user/<username>/', methods=['POST'])
@csrf.exempt
@api_key_required
def update_user(username):
"""Update settings for an existing user.
The settings to be deleted should be posted as JSON using
'application/json as the content type. The posted JSON needs to
have 2 required fields:
* user (the username of the IRC user)
* api_key
You may optionally supply the following settings to overwrite
their values:
* name
* email
* github_handle
An example of the JSON::
{
"user": "r1cky",
"email": "ricky@email.com"
"api_key": "qwertyuiopasdfghjklzxcvbnm1234567890"
}
"""
db = get_session(current_app)
# The data we need
authorname = request.json.get('user')
# Optional data
name = request.json.get('name')
email = request.json.get('email')
github_handle = request.json.get('github_handle')
if not (username and authorname and (name or email or github_handle)):
return jsonify(dict(error='Missing required fields')), 400
author = db.query(User).filter_by(username=authorname).first()
user = db.query(User).filter_by(username=username).first()
if not user or not author:
return jsonify(dict(error='User does not exist.')), 400
if author.username != user.username and not author.is_admin:
return jsonify(dict(error='You cannot modify this user.')), 403
if name:
user.name = name
if email:
user.email = email
if github_handle:
user.github_handle = github_handle
db.commit()
return jsonify(dict(id=user.id))
| bsd-3-clause |
jasonseminara/OpenSourceFinal | venv/lib/python2.7/site-packages/pip/_vendor/html5lib/trie/py.py | 1323 | 1775 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
from bisect import bisect_left
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
if not all(isinstance(x, text_type) for x in data.keys()):
raise TypeError("All keys must be strings")
self._data = data
self._keys = sorted(data.keys())
self._cachestr = ""
self._cachepoints = (0, len(data))
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
if prefix is None or prefix == "" or not self._keys:
return set(self._keys)
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
start = i = bisect_left(self._keys, prefix, lo, hi)
else:
start = i = bisect_left(self._keys, prefix)
keys = set()
if start == len(self._keys):
return keys
while self._keys[i].startswith(prefix):
keys.add(self._keys[i])
i += 1
self._cachestr = prefix
self._cachepoints = (start, i)
return keys
def has_keys_with_prefix(self, prefix):
if prefix in self._data:
return True
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
i = bisect_left(self._keys, prefix, lo, hi)
else:
i = bisect_left(self._keys, prefix)
if i == len(self._keys):
return False
return self._keys[i].startswith(prefix)
| mit |
scylladb/scylla-artifact-tests | scylla_private_repo.py | 1 | 5238 | #!/usr/bin/env python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright (c) 2017 ScyllaDB
import logging
import tempfile
import json
import re
from avocado import Test
from avocado.utils import process
from avocado import main
from check_version import CheckVersionDB
class PrivateRepo(object):
def __init__(self, sw_repo, pkginfo_url, redirect_url):
self.sw_repo = sw_repo
self.pkginfo_url = pkginfo_url
self.redirect_url = redirect_url
self.uuid = self._get_uuid()
def _get_uuid(self):
match = re.findall('https://repositories.scylladb.com/scylla/repo/([\w_-]*)/', self.sw_repo)
if len(match) == 1:
return match[0]
return None
class RHELPrivateRepo(PrivateRepo):
def __init__(self, sw_repo, pkginfo_url, redirect_url):
super(RHELPrivateRepo, self).__init__(sw_repo, pkginfo_url, redirect_url)
self.body_prefix = ['[scylla', 'name=', 'baseurl=', 'enabled=', 'gpgcheck=', 'type=',
'skip_if_unavailable=', 'gpgkey=', 'repo_gpgcheck=', 'enabled_metadata=']
class DebianPrivateRepo(PrivateRepo):
def __init__(self, sw_repo, pkginfo_url, redirect_url):
super(DebianPrivateRepo, self).__init__(sw_repo, pkginfo_url, redirect_url)
self.body_prefix = ['deb']
class ScyllaPrivateRepoSanity(Test):
"""
Useful repo can be got from private link.
Verify redirection works.
Verify download info can be collected to housekeeping db.
:avocado: enable
"""
def __init__(self, *args, **kwargs):
super(ScyllaPrivateRepoSanity, self).__init__(*args, **kwargs)
self.log = logging.getLogger('scylla_private_repo')
def setUp(self):
sw_repo = self.params.get('sw_repo')
pkginfo_url = self.params.get('pkginfo_url')
redirect_url = self.params.get('redirect_url')
name = self.params.get('name', default='centos7')
if 'centos' in name or 'rhel' in name:
self.private_repo = RHELPrivateRepo(sw_repo, pkginfo_url, redirect_url)
elif 'ubuntu' in name or 'debian' in name:
self.private_repo = DebianPrivateRepo(sw_repo, pkginfo_url, redirect_url)
self.cvdb = CheckVersionDB(self.params.get('host'),
self.params.get('user'),
self.params.get('passwd'))
self.log.debug(self.cvdb.execute('show tables'))
def tearDown(self):
self.cvdb.close()
def check_collect_info(self):
pass
def test_generate_repo(self):
# get last id of test uuid
last_id = 0
ret = self.cvdb.execute('select * from housekeeping.repo where uuid="{}" order by -dt limit 1'.format(self.private_repo.uuid))
if len(ret) > 0:
last_id = ret[0][0]
tmp = tempfile.mktemp(prefix='scylla_private_repo')
process.run('curl {} -o {} -L'.format(self.private_repo.sw_repo, tmp), verbose=True)
with open(tmp, 'r') as f:
repo_body = f.read()
for line in repo_body.split('\n'):
valid_prefix = False
for prefix in self.private_repo.body_prefix:
if line.startswith(prefix) or len(line.strip()) == 0:
valid_prefix = True
break
self.log.debug(line)
assert valid_prefix, 'repo content has invalid line: {}'.format(line)
# verify download repo of test uuid is collected to repo table
self.cvdb.commit()
ret = self.cvdb.execute('select * from housekeeping.repo where uuid="{}" and id > {}'.format(self.private_repo.uuid, last_id))
assert len(ret) > 0
def test_redirect(self):
# get last id of test uuid
last_id = 0
ret = self.cvdb.execute('select * from housekeeping.repodownload where uuid="{}" order by -dt limit 1'.format(self.private_repo.uuid))
if len(ret) > 0:
last_id = ret[0][0]
tmp = tempfile.mktemp(prefix='scylla_private_repo')
result = process.run('curl {} -o {}'.format(self.private_repo.pkginfo_url, tmp), verbose=True)
print result
with open(tmp, 'r') as f:
tmp_content = f.read()
response = json.loads(tmp_content)
self.log.debug(response)
assert response['errorMessage'] == u'HandlerDemo.ResponseFound Redirection: Resource found elsewhere'
assert response['errorType'] == self.private_repo.redirect_url
# verify download info of test uuid is collected to repodownload table
self.cvdb.commit()
ret = self.cvdb.execute('select * from housekeeping.repodownload where uuid="{}" and id > {}'.format(self.private_repo.uuid, last_id))
assert len(ret) > 0
if __name__ == '__main__':
main()
| apache-2.0 |
Motwani/firefox-ui-tests | firefox_ui_tests/functional/locationbar/test_escape_autocomplete.py | 3 | 2269 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from marionette_driver import Wait
from firefox_ui_harness.decorators import skip_under_xvfb
from firefox_puppeteer.testcases import FirefoxTestCase
class TestEscapeAutocomplete(FirefoxTestCase):
def setUp(self):
FirefoxTestCase.setUp(self)
# Clear complete history so there's no interference from previous entries.
self.places.remove_all_history()
self.test_urls = [
'layout/mozilla.html',
'layout/mozilla_community.html',
]
self.test_urls = [self.marionette.absolute_url(t)
for t in self.test_urls]
self.test_string = 'mozilla'
self.locationbar = self.browser.navbar.locationbar
self.autocomplete_results = self.locationbar.autocomplete_results
def tearDown(self):
self.autocomplete_results.close(force=True)
FirefoxTestCase.tearDown(self)
@skip_under_xvfb
def test_escape_autocomplete(self):
# Open some local pages
def load_urls():
with self.marionette.using_context('content'):
for url in self.test_urls:
self.marionette.navigate(url)
self.places.wait_for_visited(self.test_urls, load_urls)
# Clear the location bar, type the test string, check that autocomplete list opens
self.locationbar.clear()
self.locationbar.urlbar.send_keys(self.test_string)
self.assertEqual(self.locationbar.value, self.test_string)
Wait(self.marionette).until(lambda _: self.autocomplete_results.is_open)
# Press escape, check location bar value, check autocomplete list closed
self.locationbar.urlbar.send_keys(self.keys.ESCAPE)
self.assertEqual(self.locationbar.value, self.test_string)
Wait(self.marionette).until(lambda _: not self.autocomplete_results.is_open)
# Press escape again and check that locationbar returns to the page url
self.locationbar.urlbar.send_keys(self.keys.ESCAPE)
self.assertEqual(self.locationbar.value, self.test_urls[-1])
| mpl-2.0 |
vveerava/Openstack | neutron/extensions/multiprovidernet.py | 18 | 4327 | # Copyright (c) 2013 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.common import exceptions as qexception
from neutron.extensions import providernet as pnet
SEGMENTS = 'segments'
class SegmentsSetInConjunctionWithProviders(qexception.InvalidInput):
message = _("Segments and provider values cannot both be set.")
class SegmentsContainDuplicateEntry(qexception.InvalidInput):
message = _("Duplicate segment entry in request.")
def _convert_and_validate_segments(segments, valid_values=None):
for segment in segments:
segment.setdefault(pnet.NETWORK_TYPE, attr.ATTR_NOT_SPECIFIED)
segment.setdefault(pnet.PHYSICAL_NETWORK, attr.ATTR_NOT_SPECIFIED)
segmentation_id = segment.get(pnet.SEGMENTATION_ID)
if segmentation_id:
segment[pnet.SEGMENTATION_ID] = attr.convert_to_int(
segmentation_id)
else:
segment[pnet.SEGMENTATION_ID] = attr.ATTR_NOT_SPECIFIED
if len(segment.keys()) != 3:
msg = (_("Unrecognized attribute(s) '%s'") %
', '.join(set(segment.keys()) -
set([pnet.NETWORK_TYPE, pnet.PHYSICAL_NETWORK,
pnet.SEGMENTATION_ID])))
raise webob.exc.HTTPBadRequest(msg)
def check_duplicate_segments(segments, is_partial_func=None):
"""Helper function checking duplicate segments.
If is_partial_funcs is specified and not None, then
SegmentsContainDuplicateEntry is raised if two segments are identical and
non partially defined (is_partial_func(segment) == False).
Otherwise SegmentsContainDuplicateEntry is raised if two segment are
identical.
"""
if is_partial_func is not None:
segments = [s for s in segments if not is_partial_func(s)]
fully_specifieds = [tuple(sorted(s.items())) for s in segments]
if len(set(fully_specifieds)) != len(fully_specifieds):
raise SegmentsContainDuplicateEntry()
attr.validators['type:convert_segments'] = (
_convert_and_validate_segments)
EXTENDED_ATTRIBUTES_2_0 = {
'networks': {
SEGMENTS: {'allow_post': True, 'allow_put': True,
'validate': {'type:convert_segments': None},
'convert_list_to': attr.convert_kvp_list_to_dict,
'default': attr.ATTR_NOT_SPECIFIED,
'enforce_policy': True,
'is_visible': True},
}
}
class Multiprovidernet(extensions.ExtensionDescriptor):
"""Extension class supporting multiple provider networks.
This class is used by neutron's extension framework to make
metadata about the multiple provider network extension available to
clients. No new resources are defined by this extension. Instead,
the existing network resource's request and response messages are
extended with 'segments' attribute.
With admin rights, network dictionaries returned will also include
'segments' attribute.
"""
@classmethod
def get_name(cls):
return "Multi Provider Network"
@classmethod
def get_alias(cls):
return "multi-provider"
@classmethod
def get_description(cls):
return ("Expose mapping of virtual networks to multiple physical "
"networks")
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/multi-provider/api/v1.0"
@classmethod
def get_updated(cls):
return "2013-06-27T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
| apache-2.0 |
kangkot/arangodb | 3rdParty/V8-4.3.61/third_party/binutils/download.py | 25 | 2889 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# vim: set ts=2 sw=2 et sts=2 ai:
"""Minimal tool to download binutils from Google storage.
TODO(mithro): Replace with generic download_and_extract tool.
"""
import os
import platform
import re
import shutil
import subprocess
import sys
BINUTILS_DIR = os.path.abspath(os.path.dirname(__file__))
BINUTILS_FILE = 'binutils.tar.bz2'
BINUTILS_TOOLS = ['bin/ld.gold', 'bin/objcopy', 'bin/objdump']
BINUTILS_OUT = 'Release'
DETECT_HOST_ARCH = os.path.abspath(os.path.join(
BINUTILS_DIR, '../../build/detect_v8_host_arch.py'))
def ReadFile(filename):
with file(filename, 'r') as f:
return f.read().strip()
def WriteFile(filename, content):
assert not os.path.exists(filename)
with file(filename, 'w') as f:
f.write(content)
f.write('\n')
def GetArch():
gyp_host_arch = re.search(
'host_arch=(\S*)', os.environ.get('GYP_DEFINES', ''))
if gyp_host_arch:
arch = gyp_host_arch.group(1)
# This matches detect_host_arch.py.
if arch == 'x86_64':
return 'x64'
return arch
return subprocess.check_output(['python', DETECT_HOST_ARCH]).strip()
def FetchAndExtract(arch):
archdir = os.path.join(BINUTILS_DIR, 'Linux_' + arch)
tarball = os.path.join(archdir, BINUTILS_FILE)
outdir = os.path.join(archdir, BINUTILS_OUT)
sha1file = tarball + '.sha1'
if not os.path.exists(sha1file):
print "WARNING: No binutils found for your architecture (%s)!" % arch
return 0
checksum = ReadFile(sha1file)
stampfile = tarball + '.stamp'
if os.path.exists(stampfile):
if (os.path.exists(tarball) and
os.path.exists(outdir) and
checksum == ReadFile(stampfile)):
return 0
else:
os.unlink(stampfile)
print "Downloading", tarball
subprocess.check_call([
'download_from_google_storage',
'--no_resume',
'--no_auth',
'--bucket', 'chromium-binutils',
'-s', sha1file])
assert os.path.exists(tarball)
if os.path.exists(outdir):
shutil.rmtree(outdir)
assert not os.path.exists(outdir)
os.makedirs(outdir)
assert os.path.exists(outdir)
print "Extracting", tarball
subprocess.check_call(['tar', 'axf', tarball], cwd=outdir)
for tool in BINUTILS_TOOLS:
assert os.path.exists(os.path.join(outdir, tool))
WriteFile(stampfile, checksum)
return 0
def main(args):
if not sys.platform.startswith('linux'):
return 0
arch = GetArch()
if arch == 'x64':
return FetchAndExtract(arch)
if arch == 'ia32':
ret = FetchAndExtract(arch)
if ret != 0:
return ret
# Fetch the x64 toolchain as well for official bots with 64-bit kernels.
return FetchAndExtract('x64')
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 |
alexlo03/ansible | lib/ansible/module_utils/network/netconf/netconf.py | 36 | 3910 | #
# (c) 2018 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
from copy import deepcopy
from contextlib import contextmanager
try:
from lxml.etree import fromstring, tostring
except ImportError:
from xml.etree.ElementTree import fromstring, tostring
from ansible.module_utils._text import to_text, to_bytes
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.module_utils.network.common.netconf import NetconfConnection
IGNORE_XML_ATTRIBUTE = ()
def get_connection(module):
if hasattr(module, '_netconf_connection'):
return module._netconf_connection
capabilities = get_capabilities(module)
network_api = capabilities.get('network_api')
if network_api == 'netconf':
module._netconf_connection = NetconfConnection(module._socket_path)
else:
module.fail_json(msg='Invalid connection type %s' % network_api)
return module._netconf_connection
def get_capabilities(module):
if hasattr(module, '_netconf_capabilities'):
return module._netconf_capabilities
capabilities = Connection(module._socket_path).get_capabilities()
module._netconf_capabilities = json.loads(capabilities)
return module._netconf_capabilities
def lock_configuration(module, target=None):
conn = get_connection(module)
return conn.lock(target=target)
def unlock_configuration(module, target=None):
conn = get_connection(module)
return conn.unlock(target=target)
@contextmanager
def locked_config(module, target=None):
try:
lock_configuration(module, target=target)
yield
finally:
unlock_configuration(module, target=target)
def get_config(module, source, filter, lock=False):
conn = get_connection(module)
try:
locked = False
if lock:
conn.lock(target=source)
locked = True
response = conn.get_config(source=source, filter=filter)
except ConnectionError as e:
module.fail_json(msg=to_text(e, errors='surrogate_then_replace').strip())
finally:
if locked:
conn.unlock(target=source)
return response
def get(module, filter, lock=False):
conn = get_connection(module)
try:
locked = False
if lock:
conn.lock(target='running')
locked = True
response = conn.get(filter=filter)
except ConnectionError as e:
module.fail_json(msg=to_text(e, errors='surrogate_then_replace').strip())
finally:
if locked:
conn.unlock(target='running')
return response
def dispatch(module, request):
conn = get_connection(module)
try:
response = conn.dispatch(request)
except ConnectionError as e:
module.fail_json(msg=to_text(e, errors='surrogate_then_replace').strip())
return response
def sanitize_xml(data):
tree = fromstring(to_bytes(deepcopy(data), errors='surrogate_then_replace'))
for element in tree.getiterator():
# remove attributes
attribute = element.attrib
if attribute:
for key in list(attribute):
if key not in IGNORE_XML_ATTRIBUTE:
attribute.pop(key)
return to_text(tostring(tree), errors='surrogate_then_replace').strip()
| gpl-3.0 |
ferno/greenery | greenery/lego.py | 1 | 55607 | # -*- coding: utf-8 -*-
'''
LEGO:
Classes and methods for the creation and manipulation of regular expression
objects and components.
* A regular expression is a "pattern" object.
* Each pattern alternates (with a pipe, "|") between zero or more "conc"
(concatenation) objects.
* Each conc is a concatenation of zero or more "mult" (multiplication)
objects.
* Each mult consists of a multiplicand and a multiplier. A multiplier consists
of a minimum and a maximum, e.g. min = 0, max = 1 indicates the "?"
multiplier. The multiplicand is either a nested pattern object, or a
charclass object.
* A charclass is a set of chars, such as "a", "[a-z]", "\\d", ".", with a
possible "negated" flag as in "[^a]".
* Since these can be combined together freely they are, in the absence of a
better metaphor, collectively referred to as lego pieces.
We also include methods for parsing a string into a pattern object,
serialising a pattern object out as a string (or "regular expression", if you
will), and for concatenating or alternating between arbitrary "pieces of
lego", using overloaded operators.
If the FSM module is available, call lego.to_fsm() on any lego piece to return
a finite state machine capable of accepting strings described by that piece.
Most important are the reduce() methods present in charclass, mult, conc and
pattern. While there is no such thing as a canonical form for a given regex
pattern, these procedures can drastically simplify a regex structure for
readability. They're also pretty extensible.
'''
from greenery import fsm
class nomatch(Exception):
'''Thrown when parsing fails. Almost always caught and almost never fatal'''
pass
def reduce_after(method):
'''reduce() the result of this method call (unless you already reduced it).'''
def new_method(self, *args, **kwargs):
result = method(self, *args, **kwargs)
if result == self:
return result
return result.reduce()
return new_method
def call_fsm(method):
'''
Take a method which acts on 0 or more regular expression objects... return a
new method which simply converts them all to FSMs, calls the FSM method
on them instead, then converts the result back to a regular expression.
We do this for several of the more annoying operations.
'''
fsm_method = getattr(fsm.fsm, method.__name__)
def new_method(*legos):
alphabet = set().union(*[lego.alphabet() for lego in legos])
return from_fsm(fsm_method(*[lego.to_fsm(alphabet) for lego in legos]))
return new_method
def parse(string):
'''
Parse a full string and return a lego piece. Fail if the whole string
wasn't parsed
'''
return pattern.parse(string)
def from_fsm(f):
'''
Turn the supplied finite state machine into a `lego` object. This is
accomplished using the Brzozowski algebraic method.
'''
# Make sure the supplied alphabet is kosher. It must contain only single-
# character strings or `fsm.anything_else`.
for symbol in f.alphabet:
if symbol == fsm.anything_else:
continue
if isinstance(symbol, str) and len(symbol) == 1:
continue
raise Exception("Symbol " + repr(symbol) + " cannot be used in a regular expression")
# We need a new state not already used
outside = object()
# The set of strings that would be accepted by this FSM if you started
# at state i is represented by the regex R_i.
# If state i has a sole transition "a" to state j, then we know R_i = a R_j.
# If state i is final, then the empty string is also accepted by this regex.
# And so on...
# From this we can build a set of simultaneous equations in len(f.states)
# variables. This system is easily solved for all variables, but we only
# need one: R_a, where a is the starting state.
# The first thing we need to do is organise the states into order of depth,
# so that when we perform our back-substitutions, we can start with the
# last (deepest) state and therefore finish with R_a.
states = [f.initial]
i = 0
while i < len(states):
current = states[i]
if current in f.map:
for symbol in sorted(f.map[current], key=fsm.key):
next = f.map[current][symbol]
if next not in states:
states.append(next)
i += 1
# Our system of equations is represented like so:
brz = {}
for a in f.states:
brz[a] = {}
for b in f.states | {outside}:
brz[a][b] = nothing
# Populate it with some initial data.
for a in f.map:
for symbol in f.map[a]:
b = f.map[a][symbol]
if symbol == fsm.anything_else:
brz[a][b] |= ~charclass(f.alphabet - {fsm.anything_else})
else:
brz[a][b] |= charclass({symbol})
if a in f.finals:
brz[a][outside] |= emptystring
# Now perform our back-substitution
for i in reversed(range(len(states))):
a = states[i]
# Before the equation for R_a can be substituted into the other
# equations, we need to resolve the self-transition (if any).
# e.g. R_a = 0 R_a | 1 R_b | 2 R_c
# becomes R_a = 0*1 R_b | 0*2 R_c
loop = brz[a][a] * star # i.e. "0*"
del brz[a][a]
for right in brz[a]:
brz[a][right] = loop + brz[a][right]
# Note: even if we're down to our final equation, the above step still
# needs to be performed before anything is returned.
# Now we can substitute this equation into all of the previous ones.
for j in range(i):
b = states[j]
# e.g. substituting R_a = 0*1 R_b | 0*2 R_c
# into R_b = 3 R_a | 4 R_c | 5 R_d
# yields R_b = 30*1 R_b | (30*2|4) R_c | 5 R_d
univ = brz[b][a] # i.e. "3"
del brz[b][a]
for right in brz[a]:
brz[b][right] |= univ + brz[a][right]
return brz[f.initial][outside].reduce()
def static(string, i, static):
j = i + len(static)
if string[i:j] == static:
return j
raise nomatch
def select_static(string, i, *statics):
for st in statics:
j = i+len(st)
if string[i:j] == st:
return j, st
raise nomatch
def read_until(string, i, stop_char):
start = i
while True:
if i >= len(string):
raise nomatch
if string[i] == stop_char:
break
i += 1
return i + 1, string[start:i]
class lego:
'''
Parent class for all lego pieces.
All lego pieces have some things in common. This parent class mainly
hosts documentation though.
'''
def __setattr__(self, name, value):
'''
Lego pieces are immutable. It caused some pretty serious problems when
I didn't have this.
'''
raise Exception("This object is immutable.")
def to_fsm(self, alphabet):
'''
Return the present lego piece in the form of a finite state machine,
as imported from the fsm module.
If no alphabet is explicitly supplied, which seems quite probable,
we use the lego.alphabet() method (later) to list all the characters
mentioned in self. However, if we intend to connect this FSM to another
one which uses different characters, we may need to supply an alphabet
which is a superset of both sets.
'''
raise Exception("Not implemented")
def __repr__(self):
'''
Return a string approximating the instantiation line
for the present lego piece.
'''
raise Exception("Not implemented")
def __str__(self):
'''
Render the present lego piece in the form of a regular expression.
Some lego pieces may be created which cannot be rendered in this way.
In particular: a pattern containing no concs; a multiplier of zero.
'''
raise Exception("Not implemented")
@classmethod
def match(cls, string, i = 0):
'''
Start at index i in the supplied string and try to match one of the
present class. Elementary recursive descent parsing with very little
need for flair. The opposite of __str__(), above. (In most cases.)
Throws a nomatch in the event of failure.
'''
raise Exception("Not implemented")
@classmethod
def parse(cls, string):
'''
Parse the entire supplied string as an instance of the present class.
Mainly for internal use in unit tests because it drops through to match()
in a convenient way.
'''
obj, i = cls.match(string, 0)
if i != len(string):
raise Exception("Could not parse '" + string + "' beyond index " + str(i))
return obj
@reduce_after
def reduce(self):
'''
The most important and algorithmically complex method. Takes the current
lego piece and simplifies it in every way possible, returning a simpler
lego piece which is quite probably not of the same class as the original.
Approaches vary by the class of the present lego piece.
It is critically important to (1) always call reduce() on whatever you're
returning before you return it and therefore (2) always return something
STRICTLY SIMPLER than the current object. Otherwise, infinite loops become
possible in reduce() calls.
'''
raise Exception("Not implemented")
@call_fsm
def concatenate(*legos):
'''
Concatenate a sequence of lego pieces, regardless of differing classes.
Call using "a = b + c"
'''
pass
def __add__(self, other):
return self.concatenate(other)
@call_fsm
def times(self, multiplier):
'''
Equivalent to repeated concatenation. Multiplier consists of a minimum
and a maximum; maximum may be infinite (for Kleene star closure).
Call using "a = b * qm"
'''
raise Exception("Not implemented")
def __mul__(self, multiplier):
return self.times(multiplier)
@call_fsm
def union(*legos):
'''
Alternate between any two lego pieces, regardless of differing classes.
Call using "a = b | c".
This method MUST NOT call the to_fsm() method, because this method is used
in turn when converting an FSM back to a regex.
'''
pass
def __or__(self, other):
return self.union(other)
@call_fsm
def intersection(self, other):
'''
Intersection function. Return a lego piece that can match any string
that both self and other can match. Fairly elementary results relating
to regular languages and finite state machines show that this is
possible, but implementation is a BEAST in many cases. Here, we convert
both lego pieces to FSMs (see to_fsm(), above) for the intersection, then
back to lego afterwards.
Call using "a = b & c"
'''
pass
def __and__(self, other):
return self.intersection(other)
@call_fsm
def difference(*legos):
'''
Return a regular expression which matches any string which `self` matches
but none of the strings which `other` matches.
'''
pass
def __sub__(self, other):
return self.difference(other)
@call_fsm
def symmetric_difference(*legos):
'''
Return a regular expression matching only the strings recognised by
`self` or `other` but not both.
'''
pass
def __xor__(self, other):
return self.symmetric_difference(other)
def equivalent(self, other):
'''
Two lego objects are equivalent if they recognise the same strings. Note
that in the general case this is actually quite an intensive calculation,
but far from unsolvable, as we demonstrate here:
'''
return self.to_fsm().equivalent(other.to_fsm())
def alphabet(self):
'''
Return a set of all unique characters used in this lego piece.
In theory this could be a static property, self.alphabet, not
a function, self.alphabet(), but in the vast majority of cases
this will never be queried so it's a waste of computation to
calculate it every time a lego piece is instantiated.
By convention, fsm.anything_else is always included in this result.
'''
raise Exception("Not implemented")
@call_fsm
def everythingbut(self):
'''
Return a lego object which will match any string not matched by self,
and which will not match any string matched by self.
Another task which is very difficult in general (and typically returns
utter garbage when actually printed), but becomes trivial to code
thanks to FSM routines.
'''
pass
def reversed(self):
'''
Return a lego object which will match any string which, when reversed,
self would match. E.g. if self matches "beer" then reversed(self) will
match "reeb".
'''
raise Exception("Not implemented")
def __reversed__(self):
return self.reversed()
def empty(self):
'''
Return False if there exists a string which the present lego piece
can match. Return True if no such string exists. Examples of empty
lego pieces are charclass() and pattern()
'''
raise Exception("Not implemented")
def matches(self, string):
return self.to_fsm().accepts(string)
def __contains__(self, string):
'''
This lets you use the syntax `"a" in pattern1` to see whether the string
"a" is in the set of strings matched by `pattern1`.
'''
return self.matches(string)
def strings(self, otherchar=None):
'''
Each time next() is called on this iterator, a new string is returned
which will the present lego piece can match. StopIteration is raised once
all such strings have been returned, although a regex with a * in may
match infinitely many strings.
'''
# In the case of a regex like "[^abc]", there are infinitely many (well, a
# very large finite number of) single characters which will match. It's not
# productive to iterate over all of these giving every single example.
# You must supply your own "otherchar" to stand in for all of these
# possibilities.
for string in self.to_fsm().strings():
# Have to represent `fsm.anything_else` somehow.
if fsm.anything_else in string:
if otherchar == None:
raise Exception("Please choose an 'otherchar'")
string = [
otherchar if char == fsm.anything_else else char
for char in string
]
yield "".join(string)
def __iter__(self):
'''
This allows you to do `for string in pattern1` as a list comprehension!
'''
return self.strings()
def cardinality(self):
'''
Consider the regular expression as a set of strings and return the
cardinality of that set, or raise an OverflowError if there are infinitely
many.
'''
# There is no way to do this other than converting to an FSM, because the
# pattern may allow duplicate routes, such as "a|a".
return self.to_fsm().cardinality()
def __len__(self):
return self.cardinality()
@call_fsm
def isdisjoint(self, other):
'''
Treat `self` and `other` as sets of strings and see if they are disjoint
'''
pass
def copy(self):
'''
For completeness only, since `set.copy()` also exists. Regular expression
objects are immutable, so I can see only very odd reasons to need this.
'''
raise Exception("Not implemented")
def __hash__(self):
'''For dictionaries'''
raise Exception("Not implemented")
def derive(self, string):
return from_fsm(self.to_fsm().derive(string))
class charclass(lego):
'''
A charclass is basically a frozenset of symbols. The reason for the
charclass object instead of using frozenset directly is to allow us to
set a "negated" flag. A charclass with the negation flag set is assumed
to contain every symbol that is in the alphabet of all symbols but not
explicitly listed inside the frozenset. e.g. [^a]. This is very handy
if the full alphabet is extremely large, but also requires dedicated
combination functions.
'''
def __init__(self, chars=set(), negateMe=False):
chars = frozenset(chars)
# chars should consist only of chars
if fsm.anything_else in chars:
raise Exception("Can't put " + repr(fsm.anything_else) + " in a charclass")
self.__dict__["chars"] = chars
self.__dict__["negated"] = negateMe
def __eq__(self, other):
try:
return self.chars == other.chars and self.negated == other.negated
except AttributeError:
return False
def __hash__(self):
return hash((self.chars, self.negated))
def times(self, multiplier):
# e.g. "a" * {0,1} = "a?"
if multiplier == one:
return self
return mult(self, multiplier)
# These are the characters carrying special meanings when they appear "outdoors"
# within a regular expression. To be interpreted literally, they must be
# escaped with a backslash.
allSpecial = set("\\[]|().?*+{}")
# These are the characters carrying special meanings when they appear INSIDE a
# character class (delimited by square brackets) within a regular expression.
# To be interpreted literally, they must be escaped with a backslash.
# Notice how much smaller this class is than the one above; note also that the
# hyphen and caret do NOT appear above.
classSpecial = set("\\[]^-")
# Shorthand codes for use inside charclasses e.g. [abc\d]
w = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz"
d = "0123456789"
s = "\t\n\v\f\r "
shorthand = {
w : "\\w",
d : "\\d",
s : "\\s",
}
def __str__(self):
# e.g. \w
if self in shorthand.keys():
return shorthand[self]
# e.g. [^a]
if self.negated:
return "[^" + self.escape() + "]"
# single character, not contained inside square brackets.
if len(self.chars) == 1:
# Python lacks the Axiom of Choice
char = "".join(self.chars)
# e.g. if char is "\t", return "\\t"
if char in escapes.keys():
return escapes[char]
if char in charclass.allSpecial:
return "\\" + char
# If char is an ASCII control character, don't print it directly,
# return a hex escape sequence e.g. "\\x00". Note that this includes
# tab and other characters already handled above
if 0 <= ord(char) <= 0x1F or ord(char) == 0x7f:
return "\\x" + "{0:02x}".format(ord(char))
return char
# multiple characters (or possibly 0 characters)
return "[" + self.escape() + "]"
def escape(self):
def escapeChar(char):
if char in charclass.classSpecial:
return "\\" + char
if char in escapes.keys():
return escapes[char]
# If char is an ASCII control character, don't print it directly,
# return a hex escape sequence e.g. "\\x00". Note that this includes
# tab and other characters already handled above
if 0 <= ord(char) <= 0x1F or ord(char) == 0x7f:
return "\\x" + "{0:02x}".format(ord(char))
return char
def recordRange():
# there's no point in putting a range when the whole thing is
# 3 characters or fewer. "abc" -> "abc" but "abcd" -> "a-d"
strs = [
# "ab" or "abc" or "abcd"
"".join(escapeChar(char) for char in currentRange),
# "a-b" or "a-c" or "a-d"
escapeChar(currentRange[0]) + "-" + escapeChar(currentRange[-1]),
]
return sorted(strs, key=lambda str: len(str))[0]
output = ""
# use shorthand for known character ranges
# note the nested processing order. DO NOT process \d before processing
# \w. if more character class constants arise which do not nest nicely,
# a problem will arise because there is no clear ordering to use...
# look for ranges
currentRange = ""
for char in sorted(self.chars, key=ord):
# range is not empty: new char must fit after previous one
if len(currentRange) > 0:
i = ord(char)
# char doesn't fit old range: restart
if i != ord(currentRange[-1]) + 1:
output += recordRange()
currentRange = ""
currentRange += char
if len(currentRange) > 0:
output += recordRange()
return output
def to_fsm(self, alphabet=None):
if alphabet is None:
alphabet = self.alphabet()
# 0 is initial, 1 is final
# If negated, make a singular FSM accepting any other characters
if self.negated:
map = {
0: dict([(symbol, 1) for symbol in alphabet - self.chars]),
}
# If normal, make a singular FSM accepting only these characters
else:
map = {
0: dict([(symbol, 1) for symbol in self.chars]),
}
return fsm.fsm(
alphabet = alphabet,
states = {0, 1},
initial = 0,
finals = {1},
map = map,
)
def __repr__(self):
string = ""
if self.negated is True:
string += "~"
string += "charclass("
if len(self.chars) > 0:
string += repr("".join(str(char) for char in sorted(self.chars, key=str)))
string += ")"
return string
@reduce_after
def reduce(self):
# Charclasses cannot be reduced().
return self
def concatenate(self, other):
return mult(self, one) + other
def alphabet(self):
return {fsm.anything_else} | self.chars
def empty(self):
return len(self.chars) == 0 and self.negated == False
@classmethod
def match(cls, string, i = 0):
if i >= len(string):
raise nomatch
# Turn e.g. "\\x40" into "@". Exactly two hex digits
def unescapeHex(string, i):
hex_digits = "0123456789AaBbCcDdEeFf"
j = static(string, i, "\\x")
hex1 = string[j] # e.g. "4"
if not hex1 in hex_digits:
raise nomatch
j += len(hex1)
hex2 = string[j] # e.g. "0"
if not hex2 in hex_digits:
raise nomatch
j += len(hex2)
codepoint = int(hex1 + hex2, 16) # e.g. 64
char = chr(codepoint) # "@"
return char, j
def matchInternalChar(string, i):
# e.g. if we see "\\t", return "\t"
for key in escapes.keys():
try:
return key, static(string, i, escapes[key])
except nomatch:
pass
# special chars e.g. "\\-" returns "-"
for char in charclass.classSpecial:
try:
return char, static(string, i, "\\" + char)
except nomatch:
pass
# hex escape e.g. "\\x40" returns "@"
try:
return unescapeHex(string, i)
except nomatch:
pass
# single non-special character, not contained
# inside square brackets
char, j = string[i], i+1
if char in charclass.classSpecial:
raise nomatch
return char, j
def matchClassInterior1(string, i):
# Attempt 1: shorthand e.g. "\w"
for key in charclass.shorthand:
try:
return key, static(string, i, charclass.shorthand[key])
except nomatch:
pass
# Attempt 2: a range e.g. "d-h"
try:
first, j = matchInternalChar(string, i) # `first` is "d"
k = static(string, j, "-")
last, k = matchInternalChar(string, k) # `last` is "h"
firstIndex = ord(first) # 100
lastIndex = ord(last) # 104
# Be strict here, "d-d" is not allowed
if firstIndex >= lastIndex:
raise nomatch("Range '" + first + "' to '" + last + "' not allowed")
chars = "".join([
chr(i) for i in range(firstIndex, lastIndex + 1)
])
return chars, k
except nomatch:
pass
# Attempt 3: just a character on its own
return matchInternalChar(string, i)
def matchClassInterior(string, i):
internals = ""
try:
while True:
internal, i = matchClassInterior1(string, i)
internals += internal
except nomatch:
pass
return internals, i
# wildcard ".", "\\w", "\\d", etc.
for key in shorthand.keys():
try:
return key, static(string, i, shorthand[key])
except nomatch:
pass
# "[^dsgsdg]"
try:
j = static(string, i, "[^")
chars, j = matchClassInterior(string, j)
j = static(string, j, "]")
return ~charclass(chars), j
except nomatch:
pass
# "[sdfsf]"
try:
j = static(string, i, "[")
chars, j = matchClassInterior(string, j)
j = static(string, j, "]")
return charclass(chars), j
except nomatch:
pass
# e.g. if seeing "\\t", return "\t"
for key in escapes.keys():
try:
return charclass(key), static(string, i, escapes[key])
except nomatch:
pass
# e.g. if seeing "\\{", return "{"
for char in charclass.allSpecial:
try:
return charclass(char), static(string, i, "\\" + char)
except nomatch:
pass
# e.g. if seeing "\\x40", return "@"
try:
char, j = unescapeHex(string, i)
return charclass(char), j
except nomatch:
pass
# single non-special character, not contained inside square brackets
char, i = string[i], i+1
if char in charclass.allSpecial:
raise nomatch
return charclass(char), i
# set operations
def negate(self):
'''
Negate the current charclass. e.g. [ab] becomes [^ab]. Call
using "charclass2 = ~charclass1"
'''
return charclass(self.chars, negateMe=not self.negated)
def __invert__(self):
return self.negate()
def union(self, other):
try:
# ¬A OR ¬B = ¬(A AND B)
# ¬A OR B = ¬(A - B)
# A OR ¬B = ¬(B - A)
# A OR B
if self.negated:
if other.negated:
return ~charclass(self.chars & other.chars)
return ~charclass(self.chars - other.chars)
if other.negated:
return ~charclass(other.chars - self.chars)
return charclass(self.chars | other.chars)
# "other" lacks attribute "negated" or "chars"
# "other" is not a charclass
# Never mind!
except AttributeError:
return mult(self, one) | other
def intersection(self, other):
try:
# ¬A AND ¬B = ¬(A OR B)
# ¬A AND B = B - A
# A AND ¬B = A - B
# A AND B
if self.negated:
if other.negated:
return ~charclass(self.chars | other.chars)
return charclass(other.chars - self.chars)
if other.negated:
return charclass(self.chars - other.chars)
return charclass(self.chars & other.chars)
# "other" lacks attribute "negated" or "chars"
# "other" is not a charclass
# Never mind!
except AttributeError:
return mult(self, one) & other
def reversed(self):
return self
def copy(self):
return charclass(self.chars.copy(), negateMe=self.negated)
class bound:
'''An integer but sometimes also possibly infinite (None)'''
def __init__(self, v):
if not v is None and v < 0:
raise Exception("Invalid bound: " + repr(v))
self.__dict__['v'] = v
def __repr__(self):
return "bound(" + repr(self.v) + ")"
def __str__(self):
if self == inf:
# This only happens for an unlimited upper bound
return ""
return str(self.v)
@classmethod
def match(cls, string, i = 0):
def matchAnyOf(string, i, collection):
for char in collection:
try:
return char, static(string, i, char)
except nomatch:
pass
raise nomatch
# "0"
try:
return bound(0), static(string, i, "0")
except nomatch:
pass
# "1", etc.
try:
digit, j = matchAnyOf(string, i, "123456789")
integer = int(digit)
try:
while True:
digit, j = matchAnyOf(string, j, "0123456789")
integer *= 10
integer += int(digit)
except nomatch:
return bound(integer), j
except nomatch:
pass
# "" empty string = infinite bound as in "{4,}"
return inf, i
def __eq__(self, other):
try:
return self.v == other.v
except AttributeError:
return False
def __hash__(self):
return hash(self.v)
def __lt__(self, other):
if self == inf:
return False
if other == inf:
return True
return self.v < other.v
def __ge__(self, other):
return not self < other
def __mul__(self, other):
'''Multiply this bound by another'''
if self == bound(0) or other == bound(0):
return bound(0)
if self == inf or other == inf:
return inf
return bound(self.v * other.v)
def __add__(self, other):
'''Add this bound to another'''
if self == inf or other == inf:
return inf
return bound(self.v + other.v)
def __sub__(self, other):
'''
Subtract another bound from this one.
Caution: this operation is not meaningful for all bounds.
'''
if other == inf:
if self != inf:
raise Exception("Can't subtract " + repr(other) + " from " + repr(self))
# Infinity minus infinity is zero. This has to be true so that
# we can for example subtract multiplier(bound(0), inf) from
# multiplier(bound(1), inf) to get multiplier(bound(1), bound(1))
return bound(0)
if self == inf:
return self
return bound(self.v - other.v)
def copy(self):
return bound(self.v)
class multiplier:
'''
A min and a max. The vast majority of characters in regular
expressions occur without a specific multiplier, which is implicitly
equivalent to a min of 1 and a max of 1, but many more have explicit
multipliers like "*" (min = 0, max = inf) and so on.
Although it seems odd and can lead to some confusing edge cases, we do
also permit a max of 0 (iff min is 0 too). This allows the multiplier
"zero" to exist, which actually are quite useful in their own special way.
'''
def __init__(self, min, max):
if min == inf:
raise Exception("Minimum bound of a multiplier can't be " + repr(inf))
if min > max:
raise Exception("Invalid multiplier bounds: " + repr(min) + " and " + repr(max))
# More useful than "min" and "max" in many situations
# are "mandatory" and "optional".
mandatory = min
optional = max - min
self.__dict__['min'] = min
self.__dict__['max'] = max
self.__dict__['mandatory'] = mandatory
self.__dict__['optional'] = optional
def __eq__(self, other):
try:
return self.min == other.min and self.max == other.max
except AttributeError:
return False
def __hash__(self):
return hash((self.min, self.max))
def __repr__(self):
return "multiplier(" + repr(self.min) + ", " + repr(self.max) + ")"
def __str__(self):
if self.max == bound(0):
raise Exception("Can't serialise a multiplier with bound " + repr(self.max))
if self in symbolic.keys():
return symbolic[self]
if self.min == self.max:
return "{" + str(self.min) + "}"
return "{" + str(self.min) + "," + str(self.max) + "}"
@classmethod
def match(cls, string, i = 0):
# {2,3} or {2,}
try:
j = static(string, i, "{")
min, j = bound.match(string, j)
j = static(string, j, ",")
max, j = bound.match(string, j)
j = static(string, j, "}")
return multiplier(min, max), j
except nomatch:
pass
# {2}
try:
j = static(string, i, "{")
min, j = bound.match(string, j)
j = static(string, j, "}")
return multiplier(min, min), j
except nomatch:
pass
# "?"/"*"/"+"/""
# we do these in reverse order of symbol length, because
# that forces "" to be done last
for key in sorted(symbolic, key=lambda key: -len(symbolic[key])):
try:
return key, static(string, i, symbolic[key])
except nomatch:
pass
raise nomatch
@classmethod
def parse(cls, string):
'''
Parse the entire supplied string as an instance of the present class.
Mainly for internal use in unit tests because it drops through to match()
in a convenient way.
'''
obj, i = cls.match(string, 0)
if i != len(string):
raise Exception("Could not parse '" + string + "' beyond index " + str(i))
return obj
def canmultiplyby(self, other):
'''
Multiplication is not well-defined for all pairs of multipliers because
the resulting possibilities do not necessarily form a continuous range.
For example:
{0,x} * {0,y} = {0,x*y}
{2} * {3} = {6}
{2} * {1,2} = ERROR
The proof isn't simple but suffice it to say that {p,p+q} * {r,r+s} is
equal to {pr, (p+q)(r+s)} only if s=0 or qr+1 >= p. If not, then at least
one gap appears in the range. The first inaccessible number is (p+q)r + 1.
'''
return other.optional == bound(0) or \
self.optional * other.mandatory + bound(1) >= self.mandatory
def __mul__(self, other):
'''Multiply this multiplier by another'''
if not self.canmultiplyby(other):
raise Exception("Can't multiply " + repr(self) + " by " + repr(other))
return multiplier(self.min * other.min, self.max * other.max)
def __add__(self, other):
'''Add two multipliers together'''
return multiplier(self.min + other.min, self.max + other.max)
def __sub__(self, other):
'''
Subtract another multiplier from this one.
Caution: multipliers are not totally ordered.
This operation is not meaningful for all pairs of multipliers.
'''
mandatory = self.mandatory - other.mandatory
optional = self.optional - other.optional
return multiplier(mandatory, mandatory + optional)
def canintersect(self, other):
'''
Intersection is not well-defined for all pairs of multipliers.
For example:
{2,3} & {3,4} = {3}
{2,} & {1,7} = {2,7}
{2} & {5} = ERROR
'''
return not (self.max < other.min or other.max < self.min)
def __and__(self, other):
'''
Find the intersection of two multipliers: that is, a third multiplier
expressing the range covered by both of the originals. This is not
defined for all multipliers since they may not overlap.
'''
if not self.canintersect(other):
raise Exception("Can't compute intersection of " + repr(self) + " and " + repr(other))
a = max(self.min, other.min)
b = min(self.max, other.max)
return multiplier(a, b)
def canunion(self, other):
'''Union is not defined for all pairs of multipliers. e.g. {0,1} | {3,4}'''
return not (self.max + bound(1) < other.min or other.max + bound(1) < self.min)
def __or__(self, other):
'''
Find the union of two multipliers: that is, a third multiplier expressing
the range covered by either of the originals. This is not defined for
all multipliers since they may not intersect.
'''
if not self.canunion(other):
raise Exception("Can't compute the union of " + repr(self) + " and " + repr(other))
a = min(self.min, other.min)
b = max(self.max, other.max)
return multiplier(a, b)
def common(self, other):
'''
Find the shared part of two multipliers. This is the largest multiplier
which can be safely subtracted from both the originals. This may
return the "zero" multiplier.
'''
mandatory = min(self.mandatory, other.mandatory)
optional = min(self.optional, other.optional)
return multiplier(mandatory, mandatory + optional)
def copy(self):
return multiplier(self.min.copy(), self.max.copy())
class mult(lego):
'''
A mult is a combination of a multiplicand with
a multiplier (a min and a max). The vast majority of characters in regular
expressions occur without a specific multiplier, which is implicitly
equivalent to a min of 1 and a max of 1, but many more have explicit
multipliers like "*" (min = 0, max = inf) and so on.
e.g. a, b{2}, c?, d*, [efg]{2,5}, f{2,}, (anysubpattern)+, .*, and so on
'''
def __init__(self, multiplicand, multiplier):
self.__dict__["multiplicand"] = multiplicand
self.__dict__["multiplier"] = multiplier
def __eq__(self, other):
try:
return self.multiplicand == other.multiplicand \
and self.multiplier == other.multiplier
except AttributeError:
return False
def __hash__(self):
return hash((self.multiplicand, self.multiplier))
def __repr__(self):
string = "mult("
string += repr(self.multiplicand)
string += ", " + repr(self.multiplier)
string += ")"
return string
def times(self, multiplier):
if multiplier == one:
return self
if self.multiplier.canmultiplyby(multiplier):
return mult(self.multiplicand, self.multiplier * multiplier)
return mult(pattern(conc(self)), multiplier)
def concatenate(self, other):
return conc(self) + other
def union(self, other):
return conc(self) | other
def dock(self, other):
'''
"Dock" another mult from this one (i.e. remove part of the tail) and
return the result. The reverse of concatenation. This is a lot trickier.
e.g. a{4,5} - a{3} = a{1,2}
'''
if other.multiplicand != self.multiplicand:
raise Exception("Can't subtract " + repr(other) + " from " + repr(self))
return mult(self.multiplicand, self.multiplier - other.multiplier)
def common(self, other):
'''
Return the common part of these two mults. This is the largest mult
which can be safely subtracted from both the originals. The multiplier
on this mult could be zero: this is the case if, for example, the
multiplicands disagree.
'''
if self.multiplicand == other.multiplicand:
return mult(self.multiplicand, self.multiplier.common(other.multiplier))
# Multiplicands disagree, no common part at all.
return mult(nothing, zero)
def intersection(self, other):
if hasattr(other, "chars"):
other = mult(other, one)
# If two mults are given which have a common multiplicand, the shortcut
# is just to take the intersection of the two multiplicands.
try:
if self.multiplicand == other.multiplicand \
and self.canintersect(other):
return mult(self.multiplicand, self.multiplier & other.multiplier)
except AttributeError:
# "other" isn't a mult; lacks either a multiplicand or a multiplier.
# Never mind!
pass
# This situation is substantially more complicated if the multiplicand is,
# for example, a pattern. It's difficult to reason sensibly about this
# kind of thing.
return conc(self) & other
def alphabet(self):
return {fsm.anything_else} | self.multiplicand.alphabet()
def empty(self):
return self.multiplicand.empty() and self.multiplier.min > bound(0)
@reduce_after
def reduce(self):
# Can't match anything: reduce to nothing
if self.empty():
return nothing
# If our multiplicand is a pattern containing an empty conc()
# we can pull that "optional" bit out into our own multiplier
# instead.
# e.g. (A|B|C|)D -> (A|B|C)?D
# e.g. (A|B|C|){2} -> (A|B|C){0,2}
try:
if emptystring in self.multiplicand.concs \
and self.multiplier.canmultiplyby(qm):
return mult(
pattern(
*self.multiplicand.concs.difference({emptystring})
),
self.multiplier * qm,
)
except AttributeError:
# self.multiplicand has no attribute "concs"; isn't a pattern; never mind
pass
# If we have an empty multiplicand, we can only match it
# zero times
if self.multiplicand.empty() \
and self.multiplier.min == bound(0):
return emptystring
# Failing that, we have a positive multiplicand which we
# intend to match zero times. In this case the only possible
# match is the empty string.
if self.multiplier == zero:
return emptystring
# no point multiplying in the singular
if self.multiplier == one:
return self.multiplicand
# Try recursively reducing our internal.
reduced = self.multiplicand.reduce()
# "bulk up" smaller lego pieces to pattern if need be
if hasattr(reduced, "multiplicand"):
reduced = conc(reduced)
if hasattr(reduced, "mults"):
reduced = pattern(reduced)
if reduced != self.multiplicand:
return mult(reduced, self.multiplier)
# If our multiplicand is a pattern containing a single conc
# containing a single mult, we can separate that out a lot
# e.g. ([ab])* -> [ab]*
try:
if len(self.multiplicand.concs) == 1:
(singleton,) = self.multiplicand.concs
if len(singleton.mults) == 1:
singlemult = singleton.mults[0]
if singlemult.multiplier.canmultiplyby(self.multiplier):
return mult(
singlemult.multiplicand,
singlemult.multiplier * self.multiplier
)
except AttributeError:
# self.multiplicand has no attribute "concs"; isn't a pattern; never mind
pass
return self
def __str__(self):
# recurse into subpattern
if hasattr(self.multiplicand, "concs"):
output = "(" + str(self.multiplicand) + ")"
else:
output = str(self.multiplicand)
suffix = str(self.multiplier)
return output + suffix
def to_fsm(self, alphabet=None):
if alphabet is None:
alphabet = self.alphabet()
# worked example: (min, max) = (5, 7) or (5, inf)
# (mandatory, optional) = (5, 2) or (5, inf)
unit = self.multiplicand.to_fsm(alphabet)
# accepts e.g. "ab"
# accepts "ababababab"
mandatory = unit * self.multiplier.mandatory.v
# unlimited additional copies
if self.multiplier.optional == inf:
optional = unit.star()
# accepts "(ab)*"
else:
optional = fsm.epsilon(alphabet) | unit
# accepts "(ab)?"
optional *= self.multiplier.optional.v
# accepts "(ab)?(ab)?"
return mandatory + optional
@classmethod
def match(cls, string, i = 0):
def matchMultiplicand(string, i):
# explicitly non-capturing "(?:...)" syntax. No special significance
try:
j = static(string, i, "(?")
j, st = select_static(string, j, ':', 'P<')
if st == 'P<':
j, group_name = read_until(string, j, '>')
multiplicand, j = pattern.match(string, j)
j = static(string, j, ")")
return multiplicand, j
except nomatch:
pass
# normal "(...)" syntax
try:
j = static(string, i, "(")
multiplicand, j = pattern.match(string, j)
j = static(string, j, ")")
return multiplicand, j
except nomatch:
pass
# Just a charclass on its own
return charclass.match(string, i)
multiplicand, j = matchMultiplicand(string, i)
multiplier_, j = multiplier.match(string, j)
return mult(multiplicand, multiplier_), j
def reversed(self):
return mult(reversed(self.multiplicand), self.multiplier)
def copy(self):
return mult(self.multiplicand.copy(), self.multiplier.copy())
class conc(lego):
'''
A conc (short for "concatenation") is a tuple of mults i.e. an unbroken
string of mults occurring one after the other.
e.g. abcde[^fg]*h{4}[a-z]+(subpattern)(subpattern2)
To express the empty string, use an empty conc, conc().
'''
def __init__(self, *mults):
self.__dict__["mults"] = tuple(mults)
def __eq__(self, other):
try:
return self.mults == other.mults
except AttributeError:
return False
def __hash__(self):
return hash(self.mults)
def __repr__(self):
string = "conc("
string += ", ".join(repr(m) for m in self.mults)
string += ")"
return string
def times(self, multiplier):
if multiplier == one:
return self
# Have to replace self with a pattern unfortunately
return pattern(self) * multiplier
def concatenate(self, other):
# other must be a conc too
if hasattr(other, "chars") or hasattr(other, "concs"):
other = mult(other, one)
if hasattr(other, "multiplicand"):
other = conc(other)
return conc(*(self.mults + other.mults))
def union(self, other):
return pattern(self) | other
def intersection(self, other):
return pattern(self) & other
@reduce_after
def reduce(self):
# Can't match anything
if self.empty():
return nothing
# no point concatenating one thing (note: concatenating 0 things is
# entirely valid)
if len(self.mults) == 1:
return self.mults[0]
# Try recursively reducing our internals
reduced = [m.reduce() for m in self.mults]
# "bulk up" smaller lego pieces to concs if need be
reduced = [
pattern(x) if hasattr(x, "mults") else x
for x in reduced
]
reduced = [
mult(x, one) if hasattr(x, "chars") or hasattr(x, "concs") else x
for x in reduced
]
reduced = tuple(reduced)
if reduced != self.mults:
return conc(*reduced)
# Conc contains "()" (i.e. a mult containing only a pattern containing the
# empty string)? That can be removed e.g. "a()b" -> "ab"
for i in range(len(self.mults)):
if self.mults[i].multiplicand == pattern(emptystring):
new = self.mults[:i] + self.mults[i+1:]
return conc(*new)
# multiple mults with identical multiplicands in a row?
# squish those together
# e.g. ab?b?c -> ab{0,2}c
if len(self.mults) > 1:
for i in range(len(self.mults) - 1):
if self.mults[i].multiplicand == self.mults[i + 1].multiplicand:
squished = mult(
self.mults[i].multiplicand,
self.mults[i].multiplier + self.mults[i + 1].multiplier
)
new = self.mults[:i] + (squished,) + self.mults[i + 2:]
return conc(*new)
# Conc contains (among other things) a *singleton* mult containing a pattern
# with only one internal conc? Flatten out.
# e.g. "a(d(ab|a*c))" -> "ad(ab|a*c)"
# BUT NOT "a(d(ab|a*c)){2,}"
# AND NOT "a(d(ab|a*c)|y)"
for i in range(len(self.mults)):
m = self.mults[i]
try:
if m.multiplier == one and len(m.multiplicand.concs) == 1:
(single,) = m.multiplicand.concs
new = self.mults[:i] + single.mults + self.mults[i+1:]
return conc(*new)
except AttributeError:
# m.multiplicand has no attribute "concs"; isn't a pattern; never mind
pass
return self
def to_fsm(self, alphabet=None):
if alphabet is None:
alphabet = self.alphabet()
# start with a component accepting only the empty string
fsm1 = fsm.epsilon(alphabet)
for m in self.mults:
fsm1 += m.to_fsm(alphabet)
return fsm1
def alphabet(self):
return {fsm.anything_else}.union(*[m.alphabet() for m in self.mults])
def empty(self):
for m in self.mults:
if m.empty():
return True
return False
def __str__(self):
return "".join(str(m) for m in self.mults)
@classmethod
def match(cls, string, i = 0):
mults = list()
try:
while True:
m, i = mult.match(string, i)
mults.append(m)
except nomatch:
pass
return conc(*mults), i
def common(self, other, suffix=False):
'''
Return the common prefix of these two concs; that is, the largest conc
which can be safely beheaded() from the front of both.
The result could be emptystring.
"ZYAA, ZYBB" -> "ZY"
"CZ, CZ" -> "CZ"
"YC, ZC" -> ""
With the "suffix" flag set, works from the end. E.g.:
"AAZY, BBZY" -> "ZY"
"CZ, CZ" -> "CZ"
"CY, CZ" -> ""
'''
mults = []
indices = range(min(len(self.mults), len(other.mults))) # e.g. [0, 1, 2, 3]
# Work backwards from the end of both concs instead.
if suffix:
indices = [-i - 1 for i in indices] # e.g. [-1, -2, -3, -4]
for i in indices:
common = self.mults[i].common(other.mults[i])
# Happens when multiplicands disagree (e.g. "A.common(B)") or if
# the multiplicand is shared but the common multiplier is zero
# (e.g. "ABZ*.common(CZ)".)
if common.multiplier == zero:
break
mults.append(common)
# If we did not remove the entirety of both mults, we cannot continue.
if common != self.mults[i] or common != other.mults[i]:
break
if suffix:
mults = reversed(mults)
return conc(*mults)
def dock(self, other):
'''
Subtract another conc from this one.
This is the opposite of concatenation. For example, if ABC + DEF = ABCDEF,
then logically ABCDEF - DEF = ABC.
'''
# e.g. self has mults at indices [0, 1, 2, 3, 4, 5, 6] len=7
# e.g. other has mults at indices [0, 1, 2] len=3
new = list(self.mults)
for i in reversed(range(len(other.mults))): # [2, 1, 0]
# e.g. i = 1, j = 7 - 3 + 1 = 5
j = len(self.mults) - len(other.mults) + i
new[j] = new[j].dock(other.mults[i])
if new[j].multiplier == zero:
# omit that mult entirely since it has been factored out
del new[j]
# If the subtraction is incomplete but there is more to
# other.mults, then we have a problem. For example, "ABC{2} - BC"
# subtracts the C successfully but leaves something behind,
# then tries to subtract the B too, which isn't possible
else:
if i != 0:
raise Exception("Can't subtract " + repr(other) + " from " + repr(self))
return conc(*new)
def behead(self, other):
'''
As with dock() but the other way around. For example, if
ABC + DEF = ABCDEF, then ABCDEF.behead(AB) = CDEF.
'''
# Observe that FEDCBA - BA = FEDC.
return reversed(reversed(self).dock(reversed(other)))
def reversed(self):
return conc(*reversed([reversed(m) for m in self.mults]))
def copy(self):
return conc(*[m.copy() for m in self.mults])
class pattern(lego):
'''
A pattern (also known as an "alt", short for "alternation") is a
set of concs. A pattern expresses multiple alternate possibilities.
When written out as a regex, these would separated by pipes. A pattern
containing no possibilities is possible and represents a regular expression
matching no strings whatsoever (there is no conventional string form for
this).
e.g. "abc|def(ghi|jkl)" is an alt containing two concs: "abc" and
"def(ghi|jkl)". The latter is a conc containing four mults: "d", "e", "f"
and "(ghi|jkl)". The latter in turn is a mult consisting of an upper bound
1, a lower bound 1, and a multiplicand which is a new subpattern, "ghi|jkl".
This new subpattern again consists of two concs: "ghi" and "jkl".
'''
def __init__(self, *concs):
self.__dict__["concs"] = frozenset(concs)
def __eq__(self, other):
try:
return self.concs == other.concs
except AttributeError:
return False
def __hash__(self):
return hash(self.concs)
def __repr__(self):
string = "pattern("
string += ", ".join(repr(c) for c in self.concs)
string += ")"
return string
def times(self, multiplier):
if multiplier == one:
return self
return mult(self, multiplier)
def concatenate(self, other):
return mult(self, one) + other
def alphabet(self):
return {fsm.anything_else}.union(*[c.alphabet() for c in self.concs])
def empty(self):
for c in self.concs:
if not c.empty():
return False
return True
def intersection(self, other):
# A deceptively simple method for an astoundingly difficult operation
alphabet = self.alphabet() | other.alphabet()
# Which means that we can build finite state machines sharing that alphabet
combined = self.to_fsm(alphabet) & other.to_fsm(alphabet)
return from_fsm(combined)
def union(self, other):
# other must be a pattern too
if hasattr(other, "chars"):
other = mult(other, one)
if hasattr(other, "multiplicand"):
other = conc(other)
if hasattr(other, "mults"):
other = pattern(other)
return pattern(*(self.concs | other.concs))
def __str__(self):
if len(self.concs) == 0:
raise Exception("Can't serialise " + repr(self))
# take the alternation of the input collection of regular expressions.
# i.e. jam "|" between each element
# 1+ elements.
return "|".join(sorted(str(c) for c in self.concs))
@reduce_after
def reduce(self):
# emptiness
if self.empty():
return nothing
# If one of our internal concs is empty, remove it
for c in self.concs:
if c.empty():
new = self.concs - {c}
return pattern(*new)
# no point alternating among one possibility
if len(self.concs) == 1:
return list(self.concs)[0]
# Try recursively reducing our internals first.
reduced = [c.reduce() for c in self.concs]
# "bulk up" smaller lego pieces to concs if need be
reduced = [
mult(x, one) if hasattr(x, "chars") or hasattr(x, "concs") else x
for x in reduced
]
reduced = [
conc(x) if hasattr(x, "multiplicand") else x
for x in reduced
]
reduced = frozenset(reduced)
if reduced != self.concs:
return pattern(*reduced)
# If this pattern contains several concs each containing just 1 mult and
# their multiplicands agree, we may be able to merge the multipliers
# e.g. "a{1,2}|a{3,4}|bc" -> "a{1,4}|bc"
oldconcs = list(self.concs) # so we can index the things
for i in range(len(oldconcs)):
conc1 = oldconcs[i]
if len(conc1.mults) != 1:
continue
multiplicand1 = conc1.mults[0].multiplicand
for j in range(i + 1, len(oldconcs)):
conc2 = oldconcs[j]
if len(conc2.mults) != 1:
continue
multiplicand2 = conc2.mults[0].multiplicand
if multiplicand2 != multiplicand1:
continue
multiplicand = multiplicand1
multiplier1 = conc1.mults[0].multiplier
multiplier2 = conc2.mults[0].multiplier
if not multiplier1.canunion(multiplier2):
continue
multiplier = multiplier1 | multiplier2
newconcs = \
oldconcs[:i] + \
oldconcs[i + 1:j] + \
oldconcs[j + 1:] + \
[conc(mult(multiplicand, multiplier))]
return pattern(*newconcs)
# If this pattern contains several concs each containing just 1 mult
# each containing just a charclass, with a multiplier of 1,
# then we can merge those branches together.
# e.g. "0|[1-9]|ab" -> "[0-9]|ab"
changed = False
merger = None
rest = []
for c in self.concs:
if len(c.mults) == 1 \
and c.mults[0].multiplier == one \
and hasattr(c.mults[0].multiplicand, "chars"):
if merger is None:
merger = c.mults[0].multiplicand
else:
merger |= c.mults[0].multiplicand
changed = True
else:
rest.append(c)
if changed:
rest.append(conc(mult(merger, one)))
return pattern(*rest)
# If one of the present pattern's concs is the empty string, and
# there is another conc with a single mult whose lower bound is 0, we
# can omit the empty string.
# E.g. "|(ab)*|def" => "(ab)*|def".
# If there is another conc with a single mult whose lower bound is 1,
# we can merge the empty string into that.
# E.g. "|(ab)+|def" => "(ab)*|def".
if conc() in self.concs:
for c in self.concs:
if len(c.mults) != 1:
continue
m = c.mults[0]
if m.multiplier.min == bound(0):
rest = self.concs - {conc()}
return pattern(*rest)
if m.multiplier.min == bound(1):
rest = self.concs - {conc(), c} | {m * qm}
return pattern(*rest)
# If the present pattern's concs all have a common prefix, split
# that out. This increases the depth of the object
# but it is still arguably simpler/ripe for further reduction
# e.g. "abc|ade" -> a(bc|de)"
prefix = self._commonconc()
if prefix != emptystring:
leftovers = self.behead(prefix)
mults = prefix.mults + (mult(leftovers, one),)
return conc(*mults)
# Same but for suffixes.
# e.g. "xyz|stz -> (xy|st)z"
suffix = self._commonconc(suffix=True)
if suffix != emptystring:
leftovers = self.dock(suffix)
mults = (mult(leftovers, one),) + suffix.mults
return conc(*mults)
return self
@classmethod
def match(cls, string, i = 0):
concs = list()
# first one
c, i = conc.match(string, i)
concs.append(c)
# the rest
while True:
try:
i = static(string, i, "|")
c, i = conc.match(string, i)
concs.append(c)
except nomatch:
return pattern(*concs), i
def dock(self, other):
'''
The opposite of concatenation. Remove a common suffix from the present
pattern; that is, from each of its constituent concs.
AYZ|BYZ|CYZ - YZ = A|B|C.
'''
return pattern(*[c.dock(other) for c in self.concs])
def behead(self, other):
'''
Like dock() but the other way around. Remove a common prefix from the
present pattern; that is, from each of its constituent concs.
ZA|ZB|ZC.behead(Z) = A|B|C
'''
return pattern(*[c.behead(other) for c in self.concs])
def _commonconc(self, suffix=False):
'''
Find the longest conc which acts as prefix to every conc in this pattern.
This could be the empty string. Return the common prefix along with all
the leftovers after truncating that common prefix from each conc.
"ZA|ZB|ZC" -> "Z", "(A|B|C)"
"ZA|ZB|ZC|Z" -> "Z", "(A|B|C|)"
"CZ|CZ" -> "CZ", "()"
If "suffix" is True, the same result but for suffixes.
'''
if len(self.concs) == 0:
raise Exception("Can't call _commonconc on " + repr(self))
from functools import reduce
return reduce(
lambda x, y: x.common(y, suffix=suffix),
self.concs
)
def to_fsm(self, alphabet=None):
if alphabet is None:
alphabet = self.alphabet()
fsm1 = fsm.null(alphabet)
for c in self.concs:
fsm1 |= c.to_fsm(alphabet)
return fsm1
def reversed(self):
return pattern(*(reversed(c) for c in self.concs))
def copy(self):
return pattern(*(c.copy() for c in self.concs))
# Special and useful values go here.
# Standard character classes
w = charclass("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz")
d = charclass("0123456789")
s = charclass("\t\n\v\f\r ")
W = ~w
D = ~d
S = ~s
dot = ~charclass()
# This charclasses expresses "no possibilities at all"
# and can never match anything.
nothing = charclass()
# Textual representations of standard character classes
shorthand = {
w : "\\w", d : "\\d", s : "\\s",
W : "\\W", D : "\\D", S : "\\S",
dot : ".",
}
# Characters which users may escape in a regex instead of inserting them
# literally. In ASCII order:
escapes = {
"\t" : "\\t", # tab
"\n" : "\\n", # line feed
"\v" : "\\v", # vertical tab
"\f" : "\\f", # form feed
"\r" : "\\r", # carriage return
}
# Use this for cases where no upper bound is needed
inf = bound(None)
# Preset multipliers. These get used ALL THE TIME in unit tests
zero = multiplier(bound(0), bound(0)) # has some occasional uses
qm = multiplier(bound(0), bound(1))
one = multiplier(bound(1), bound(1))
star = multiplier(bound(0), inf)
plus = multiplier(bound(1), inf)
# Symbol lookup table for preset multipliers.
symbolic = {
qm : "?",
one : "" ,
star : "*",
plus : "+",
}
# A very special conc expressing the empty string, ""
emptystring = conc()
| mit |
3manuek/scikit-learn | examples/manifold/plot_mds.py | 261 | 2616 | """
=========================
Multi-dimensional scaling
=========================
An illustration of the metric and non-metric MDS on generated noisy data.
The reconstructed points using the metric MDS and non metric MDS are slightly
shifted to avoid overlapping.
"""
# Author: Nelle Varoquaux <nelle.varoquaux@gmail.com>
# Licence: BSD
print(__doc__)
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.collections import LineCollection
from sklearn import manifold
from sklearn.metrics import euclidean_distances
from sklearn.decomposition import PCA
n_samples = 20
seed = np.random.RandomState(seed=3)
X_true = seed.randint(0, 20, 2 * n_samples).astype(np.float)
X_true = X_true.reshape((n_samples, 2))
# Center the data
X_true -= X_true.mean()
similarities = euclidean_distances(X_true)
# Add noise to the similarities
noise = np.random.rand(n_samples, n_samples)
noise = noise + noise.T
noise[np.arange(noise.shape[0]), np.arange(noise.shape[0])] = 0
similarities += noise
mds = manifold.MDS(n_components=2, max_iter=3000, eps=1e-9, random_state=seed,
dissimilarity="precomputed", n_jobs=1)
pos = mds.fit(similarities).embedding_
nmds = manifold.MDS(n_components=2, metric=False, max_iter=3000, eps=1e-12,
dissimilarity="precomputed", random_state=seed, n_jobs=1,
n_init=1)
npos = nmds.fit_transform(similarities, init=pos)
# Rescale the data
pos *= np.sqrt((X_true ** 2).sum()) / np.sqrt((pos ** 2).sum())
npos *= np.sqrt((X_true ** 2).sum()) / np.sqrt((npos ** 2).sum())
# Rotate the data
clf = PCA(n_components=2)
X_true = clf.fit_transform(X_true)
pos = clf.fit_transform(pos)
npos = clf.fit_transform(npos)
fig = plt.figure(1)
ax = plt.axes([0., 0., 1., 1.])
plt.scatter(X_true[:, 0], X_true[:, 1], c='r', s=20)
plt.scatter(pos[:, 0], pos[:, 1], s=20, c='g')
plt.scatter(npos[:, 0], npos[:, 1], s=20, c='b')
plt.legend(('True position', 'MDS', 'NMDS'), loc='best')
similarities = similarities.max() / similarities * 100
similarities[np.isinf(similarities)] = 0
# Plot the edges
start_idx, end_idx = np.where(pos)
#a sequence of (*line0*, *line1*, *line2*), where::
# linen = (x0, y0), (x1, y1), ... (xm, ym)
segments = [[X_true[i, :], X_true[j, :]]
for i in range(len(pos)) for j in range(len(pos))]
values = np.abs(similarities)
lc = LineCollection(segments,
zorder=0, cmap=plt.cm.hot_r,
norm=plt.Normalize(0, values.max()))
lc.set_array(similarities.flatten())
lc.set_linewidths(0.5 * np.ones(len(segments)))
ax.add_collection(lc)
plt.show()
| bsd-3-clause |
mnooner256/pyqrcode | pyqrcode/tables.py | 2 | 31446 | # -*- coding: utf-8 -*-
# Copyright (c) 2013, Michael Nooner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This module lists out all of the tables needed to create a QR code.
If you are viewing this in the HTML documentation, I recommend reading the
actual file instead. The formating for the tables is much more readable.
"""
from __future__ import division, unicode_literals
#: This defines the QR Code's 'mode' which sets what
#: type of code it is along with its size.
modes = {
'numeric': 1,
'alphanumeric': 2,
'binary': 4,
'kanji': 8,
}
#: This defines the amount of error correction. The dictionary
#: allows the user to specify this in several ways.
error_level = {'L': 'L', 'l': 'L', '7%': 'L', .7: 'L',
'M': 'M', 'm': 'M', '15%': 'M', .15: 'M',
'Q': 'Q', 'q': 'Q', '25%': 'Q', .25: 'Q',
'H': 'H', 'h': 'H', '30%': 'H', .30: 'H'}
#: This is a dictionary holds how long the "data length" field is for
#: each version and mode of the QR Code.
data_length_field = {9: {1: 10, 2: 9, 4: 8, 8: 8},
26: {1: 12, 2: 11, 4: 16, 8: 10},
40: {1: 14, 2: 13, 4: 16, 8: 12}}
#: QR Codes uses a unique ASCII-like table for the 'alphanumeric' mode.
#: This is a dictionary representing that unique table, where the
#: keys are the possible characters in the data and the values
#: are the character's numeric representation.
ascii_codes = {'0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7,
'8': 8, '9': 9, 'A': 10, 'B': 11, 'C': 12, 'D': 13, 'E': 14,
'F': 15, 'G': 16, 'H': 17, 'I': 18, 'J': 19, 'K': 20, 'L': 21,
'M': 22, 'N': 23, 'O': 24, 'P': 25, 'Q': 26, 'R': 27, 'S': 28,
'T': 29, 'U': 30, 'V': 31, 'W': 32, 'X': 33, 'Y': 34, 'Z': 35,
' ': 36, '$': 37, '%': 38, '*': 39, '+': 40, '-': 41, '.': 42,
'/': 43, ':': 44}
#: This array specifies the size of a QR Code in pixels. These numbers are
#: defined in the standard. The indexes correspond to the QR Code's
#: version number. This array was taken from:
#:
#: http://www.denso-wave.com/qrcode/vertable1-e.html
version_size = [None, 21, 25, 29, 33, 37, 41, 45, 49, 53, 57,
61, 65, 69, 73, 77, 81, 85, 89, 93, 97,
101, 105, 109, 113, 117, 121, 125, 129, 133, 137,
141, 145, 149, 153, 157, 161, 165, 169, 173, 177]
#: This dictionary lists the data capacity for all possible QR Codes.
#: This dictionary is organized where the first key corresponds to the
#: QR Code version number. The next key corresponds to the error
#: correction level, see error. The final key corresponds to
#: the mode number, see modes. The zero mode number represents the
#: possible "data bits." This table was taken from:
#:
#: http://www.denso-wave.com/qrcode/vertable1-e.html
data_capacity = {
1: {
"L": {0: 152, 1: 41, 2: 25, 4: 17, 8: 10, },
"M": {0: 128, 1: 34, 2: 20, 4: 14, 8: 8, },
"Q": {0: 104, 1: 27, 2: 16, 4: 11, 8: 7, },
"H": {0: 72, 1: 17, 2: 10, 4: 7, 8: 4, }},
2: {
"L": {0: 272, 1: 77, 2: 47, 4: 32, 8: 20, },
"M": {0: 224, 1: 63, 2: 38, 4: 26, 8: 16, },
"Q": {0: 176, 1: 48, 2: 29, 4: 20, 8: 12, },
"H": {0: 128, 1: 34, 2: 20, 4: 14, 8: 8, }},
3: {
"L": {0: 440, 1: 127, 2: 77, 4: 53, 8: 32, },
"M": {0: 352, 1: 101, 2: 61, 4: 42, 8: 26, },
"Q": {0: 272, 1: 77, 2: 47, 4: 32, 8: 20, },
"H": {0: 208, 1: 58, 2: 35, 4: 24, 8: 15, }},
4: {
"L": {0: 640, 1: 187, 2: 114, 4: 78, 8: 48, },
"M": {0: 512, 1: 149, 2: 90, 4: 62, 8: 38, },
"Q": {0: 384, 1: 111, 2: 67, 4: 46, 8: 28, },
"H": {0: 288, 1: 82, 2: 50, 4: 34, 8: 21, }},
5: {
"L": {0: 864, 1: 255, 2: 154, 4: 106, 8: 65, },
"M": {0: 688, 1: 202, 2: 122, 4: 84, 8: 52, },
"Q": {0: 496, 1: 144, 2: 87, 4: 60, 8: 37, },
"H": {0: 368, 1: 106, 2: 64, 4: 44, 8: 27, }},
6: {
"L": {0: 1088, 1: 322, 2: 195, 4: 134, 8: 82, },
"M": {0: 864, 1: 255, 2: 154, 4: 106, 8: 65, },
"Q": {0: 608, 1: 178, 2: 108, 4: 74, 8: 45, },
"H": {0: 480, 1: 139, 2: 84, 4: 58, 8: 36, }},
7: {
"L": {0: 1248, 1: 370, 2: 224, 4: 154, 8: 95, },
"M": {0: 992, 1: 293, 2: 178, 4: 122, 8: 75, },
"Q": {0: 704, 1: 207, 2: 125, 4: 86, 8: 53, },
"H": {0: 528, 1: 154, 2: 93, 4: 64, 8: 39, }},
8: {
"L": {0: 1552, 1: 461, 2: 279, 4: 192, 8: 118, },
"M": {0: 1232, 1: 365, 2: 221, 4: 152, 8: 93, },
"Q": {0: 880, 1: 259, 2: 157, 4: 108, 8: 66, },
"H": {0: 688, 1: 202, 2: 122, 4: 84, 8: 52, }},
9: {
"L": {0: 1856, 1: 552, 2: 335, 4: 230, 8: 141, },
"M": {0: 1456, 1: 432, 2: 262, 4: 180, 8: 111, },
"Q": {0: 1056, 1: 312, 2: 189, 4: 130, 8: 80, },
"H": {0: 800, 1: 235, 2: 143, 4: 98, 8: 60, }},
10: {
"L": {0: 2192, 1: 652, 2: 395, 4: 271, 8: 167, },
"M": {0: 1728, 1: 513, 2: 311, 4: 213, 8: 131, },
"Q": {0: 1232, 1: 364, 2: 221, 4: 151, 8: 93, },
"H": {0: 976, 1: 288, 2: 174, 4: 119, 8: 74, }},
11: {
"L": {0: 2592, 1: 772, 2: 468, 4: 321, 8: 198, },
"M": {0: 2032, 1: 604, 2: 366, 4: 251, 8: 155, },
"Q": {0: 1440, 1: 427, 2: 259, 4: 177, 8: 109, },
"H": {0: 1120, 1: 331, 2: 200, 4: 137, 8: 85, }},
12: {
"L": {0: 2960, 1: 883, 2: 535, 4: 367, 8: 226, },
"M": {0: 2320, 1: 691, 2: 419, 4: 287, 8: 177, },
"Q": {0: 1648, 1: 489, 2: 296, 4: 203, 8: 125, },
"H": {0: 1264, 1: 374, 2: 227, 4: 155, 8: 96, }},
13: {
"L": {0: 3424, 1: 1022, 2: 619, 4: 425, 8: 262, },
"M": {0: 2672, 1: 796, 2: 483, 4: 331, 8: 204, },
"Q": {0: 1952, 1: 580, 2: 352, 4: 241, 8: 149, },
"H": {0: 1440, 1: 427, 2: 259, 4: 177, 8: 109, }},
14: {
"L": {0: 3688, 1: 1101, 2: 667, 4: 458, 8: 282, },
"M": {0: 2920, 1: 871, 2: 528, 4: 362, 8: 223, },
"Q": {0: 2088, 1: 621, 2: 376, 4: 258, 8: 159, },
"H": {0: 1576, 1: 468, 2: 283, 4: 194, 8: 120, }},
15: {
"L": {0: 4184, 1: 1250, 2: 758, 4: 520, 8: 320, },
"M": {0: 3320, 1: 991, 2: 600, 4: 412, 8: 254, },
"Q": {0: 2360, 1: 703, 2: 426, 4: 292, 8: 180, },
"H": {0: 1784, 1: 530, 2: 321, 4: 220, 8: 136, }},
16: {
"L": {0: 4712, 1: 1408, 2: 854, 4: 586, 8: 361, },
"M": {0: 3624, 1: 1082, 2: 656, 4: 450, 8: 277, },
"Q": {0: 2600, 1: 775, 2: 470, 4: 322, 8: 198, },
"H": {0: 2024, 1: 602, 2: 365, 4: 250, 8: 154, }},
17: {
"L": {0: 5176, 1: 1548, 2: 938, 4: 644, 8: 397, },
"M": {0: 4056, 1: 1212, 2: 734, 4: 504, 8: 310, },
"Q": {0: 2936, 1: 876, 2: 531, 4: 364, 8: 224, },
"H": {0: 2264, 1: 674, 2: 408, 4: 280, 8: 173, }},
18: {
"L": {0: 5768, 1: 1725, 2: 1046, 4: 718, 8: 442, },
"M": {0: 4504, 1: 1346, 2: 816, 4: 560, 8: 345, },
"Q": {0: 3176, 1: 948, 2: 574, 4: 394, 8: 243, },
"H": {0: 2504, 1: 746, 2: 452, 4: 310, 8: 191, }},
19: {
"L": {0: 6360, 1: 1903, 2: 1153, 4: 792, 8: 488, },
"M": {0: 5016, 1: 1500, 2: 909, 4: 624, 8: 384, },
"Q": {0: 3560, 1: 1063, 2: 644, 4: 442, 8: 272, },
"H": {0: 2728, 1: 813, 2: 493, 4: 338, 8: 208, }},
20: {
"L": {0: 6888, 1: 2061, 2: 1249, 4: 858, 8: 528, },
"M": {0: 5352, 1: 1600, 2: 970, 4: 666, 8: 410, },
"Q": {0: 3880, 1: 1159, 2: 702, 4: 482, 8: 297, },
"H": {0: 3080, 1: 919, 2: 557, 4: 382, 8: 235, }},
21: {
"L": {0: 7456, 1: 2232, 2: 1352, 4: 929, 8: 572, },
"M": {0: 5712, 1: 1708, 2: 1035, 4: 711, 8: 438, },
"Q": {0: 4096, 1: 1224, 2: 742, 4: 509, 8: 314, },
"H": {0: 3248, 1: 969, 2: 587, 4: 403, 8: 248, }},
22: {
"L": {0: 8048, 1: 2409, 2: 1460, 4: 1003, 8: 618, },
"M": {0: 6256, 1: 1872, 2: 1134, 4: 779, 8: 480, },
"Q": {0: 4544, 1: 1358, 2: 823, 4: 565, 8: 348, },
"H": {0: 3536, 1: 1056, 2: 640, 4: 439, 8: 270, }},
23: {
"L": {0: 8752, 1: 2620, 2: 1588, 4: 1091, 8: 672, },
"M": {0: 6880, 1: 2059, 2: 1248, 4: 857, 8: 528, },
"Q": {0: 4912, 1: 1468, 2: 890, 4: 611, 8: 376, },
"H": {0: 3712, 1: 1108, 2: 672, 4: 461, 8: 284, }},
24: {
"L": {0: 9392, 1: 2812, 2: 1704, 4: 1171, 8: 721, },
"M": {0: 7312, 1: 2188, 2: 1326, 4: 911, 8: 561, },
"Q": {0: 5312, 1: 1588, 2: 963, 4: 661, 8: 407, },
"H": {0: 4112, 1: 1228, 2: 744, 4: 511, 8: 315, }},
25: {
"L": {0: 10208, 1: 3057, 2: 1853, 4: 1273, 8: 784, },
"M": {0: 8000, 1: 2395, 2: 1451, 4: 997, 8: 614, },
"Q": {0: 5744, 1: 1718, 2: 1041, 4: 715, 8: 440, },
"H": {0: 4304, 1: 1286, 2: 779, 4: 535, 8: 330, }},
26: {
"L": {0: 10960, 1: 3283, 2: 1990, 4: 1367, 8: 842, },
"M": {0: 8496, 1: 2544, 2: 1542, 4: 1059, 8: 652, },
"Q": {0: 6032, 1: 1804, 2: 1094, 4: 751, 8: 462, },
"H": {0: 4768, 1: 1425, 2: 864, 4: 593, 8: 365, }},
27: {
"L": {0: 11744, 1: 3514, 2: 2132, 4: 1465, 8: 902, },
"M": {0: 9024, 1: 2701, 2: 1637, 4: 1125, 8: 692, },
"Q": {0: 6464, 1: 1933, 2: 1172, 4: 805, 8: 496, },
"H": {0: 5024, 1: 1501, 2: 910, 4: 625, 8: 385, }},
28: {
"L": {0: 12248, 1: 3669, 2: 2223, 4: 1528, 8: 940, },
"M": {0: 9544, 1: 2857, 2: 1732, 4: 1190, 8: 732, },
"Q": {0: 6968, 1: 2085, 2: 1263, 4: 868, 8: 534, },
"H": {0: 5288, 1: 1581, 2: 958, 4: 658, 8: 405, }},
29: {
"L": {0: 13048, 1: 3909, 2: 2369, 4: 1628, 8: 1002, },
"M": {0: 10136, 1: 3035, 2: 1839, 4: 1264, 8: 778, },
"Q": {0: 7288, 1: 2181, 2: 1322, 4: 908, 8: 559, },
"H": {0: 5608, 1: 1677, 2: 1016, 4: 698, 8: 430, }},
30: {
"L": {0: 13880, 1: 4158, 2: 2520, 4: 1732, 8: 1066, },
"M": {0: 10984, 1: 3289, 2: 1994, 4: 1370, 8: 843, },
"Q": {0: 7880, 1: 2358, 2: 1429, 4: 982, 8: 604, },
"H": {0: 5960, 1: 1782, 2: 1080, 4: 742, 8: 457, }},
31: {
"L": {0: 14744, 1: 4417, 2: 2677, 4: 1840, 8: 1132, },
"M": {0: 11640, 1: 3486, 2: 2113, 4: 1452, 8: 894, },
"Q": {0: 8264, 1: 2473, 2: 1499, 4: 1030, 8: 634, },
"H": {0: 6344, 1: 1897, 2: 1150, 4: 790, 8: 486, }},
32: {
"L": {0: 15640, 1: 4686, 2: 2840, 4: 1952, 8: 1201, },
"M": {0: 12328, 1: 3693, 2: 2238, 4: 1538, 8: 947, },
"Q": {0: 8920, 1: 2670, 2: 1618, 4: 1112, 8: 684, },
"H": {0: 6760, 1: 2022, 2: 1226, 4: 842, 8: 518, }},
33: {
"L": {0: 16568, 1: 4965, 2: 3009, 4: 2068, 8: 1273, },
"M": {0: 13048, 1: 3909, 2: 2369, 4: 1628, 8: 1002, },
"Q": {0: 9368, 1: 2805, 2: 1700, 4: 1168, 8: 719, },
"H": {0: 7208, 1: 2157, 2: 1307, 4: 898, 8: 553, }},
34: {
"L": {0: 17528, 1: 5253, 2: 3183, 4: 2188, 8: 1347, },
"M": {0: 13800, 1: 4134, 2: 2506, 4: 1722, 8: 1060, },
"Q": {0: 9848, 1: 2949, 2: 1787, 4: 1228, 8: 756, },
"H": {0: 7688, 1: 2301, 2: 1394, 4: 958, 8: 590, }},
35: {
"L": {0: 18448, 1: 5529, 2: 3351, 4: 2303, 8: 1417, },
"M": {0: 14496, 1: 4343, 2: 2632, 4: 1809, 8: 1113, },
"Q": {0: 10288, 1: 3081, 2: 1867, 4: 1283, 8: 790, },
"H": {0: 7888, 1: 2361, 2: 1431, 4: 983, 8: 605, }},
36: {
"L": {0: 19472, 1: 5836, 2: 3537, 4: 2431, 8: 1496, },
"M": {0: 15312, 1: 4588, 2: 2780, 4: 1911, 8: 1176, },
"Q": {0: 10832, 1: 3244, 2: 1966, 4: 1351, 8: 832, },
"H": {0: 8432, 1: 2524, 2: 1530, 4: 1051, 8: 647, }},
37: {
"L": {0: 20528, 1: 6153, 2: 3729, 4: 2563, 8: 1577, },
"M": {0: 15936, 1: 4775, 2: 2894, 4: 1989, 8: 1224, },
"Q": {0: 11408, 1: 3417, 2: 2071, 4: 1423, 8: 876, },
"H": {0: 8768, 1: 2625, 2: 1591, 4: 1093, 8: 673, }},
38: {
"L": {0: 21616, 1: 6479, 2: 3927, 4: 2699, 8: 1661, },
"M": {0: 16816, 1: 5039, 2: 3054, 4: 2099, 8: 1292, },
"Q": {0: 12016, 1: 3599, 2: 2181, 4: 1499, 8: 923, },
"H": {0: 9136, 1: 2735, 2: 1658, 4: 1139, 8: 701, }},
39: {
"L": {0: 22496, 1: 6743, 2: 4087, 4: 2809, 8: 1729, },
"M": {0: 17728, 1: 5313, 2: 3220, 4: 2213, 8: 1362, },
"Q": {0: 12656, 1: 3791, 2: 2298, 4: 1579, 8: 972, },
"H": {0: 9776, 1: 2927, 2: 1774, 4: 1219, 8: 750, }},
40: {
"L": {0: 23648, 1: 7089, 2: 4296, 4: 2953, 8: 1817, },
"M": {0: 18672, 1: 5596, 2: 3391, 4: 2331, 8: 1435, },
"Q": {0: 13328, 1: 3993, 2: 2420, 4: 1663, 8: 1024, },
"H": {0: 10208, 1: 3057, 2: 1852, 4: 1273, 8: 784, }}
}
#: This table defines the "Error Correction Code Words and Block Information."
#: The table lists the number of error correction words that are required
#: to be generated for each version and error correction level. The table
#: is accessed by first using the version number as a key and then the
#: error level. The array values correspond to these columns from the source
#: table:
#:
#: +----------------------------+
#: |0 | EC Code Words Per Block |
#: +----------------------------+
#: |1 | Block 1 Count |
#: +----------------------------+
#: |2 | Block 1 Data Code Words |
#: +----------------------------+
#: |3 | Block 2 Count |
#: +----------------------------+
#: |4 | Block 2 Data Code Words |
#: +----------------------------+
#:
#: This table was taken from:
#:
#: http://www.thonky.com/qr-code-tutorial/error-correction-table/
eccwbi = {
1: {
'L': [7, 1, 19, 0, 0, ],
'M': [10, 1, 16, 0, 0, ],
'Q': [13, 1, 13, 0, 0, ],
'H': [17, 1, 9, 0, 0, ],
},
2: {
'L': [10, 1, 34, 0, 0, ],
'M': [16, 1, 28, 0, 0, ],
'Q': [22, 1, 22, 0, 0, ],
'H': [28, 1, 16, 0, 0, ],
},
3: {
'L': [15, 1, 55, 0, 0, ],
'M': [26, 1, 44, 0, 0, ],
'Q': [18, 2, 17, 0, 0, ],
'H': [22, 2, 13, 0, 0, ],
},
4: {
'L': [20, 1, 80, 0, 0, ],
'M': [18, 2, 32, 0, 0, ],
'Q': [26, 2, 24, 0, 0, ],
'H': [16, 4, 9, 0, 0, ],
},
5: {
'L': [26, 1, 108, 0, 0, ],
'M': [24, 2, 43, 0, 0, ],
'Q': [18, 2, 15, 2, 16, ],
'H': [22, 2, 11, 2, 12, ],
},
6: {
'L': [18, 2, 68, 0, 0, ],
'M': [16, 4, 27, 0, 0, ],
'Q': [24, 4, 19, 0, 0, ],
'H': [28, 4, 15, 0, 0, ],
},
7: {
'L': [20, 2, 78, 0, 0, ],
'M': [18, 4, 31, 0, 0, ],
'Q': [18, 2, 14, 4, 15, ],
'H': [26, 4, 13, 1, 14, ],
},
8: {
'L': [24, 2, 97, 0, 0, ],
'M': [22, 2, 38, 2, 39, ],
'Q': [22, 4, 18, 2, 19, ],
'H': [26, 4, 14, 2, 15, ],
},
9: {
'L': [30, 2, 116, 0, 0, ],
'M': [22, 3, 36, 2, 37, ],
'Q': [20, 4, 16, 4, 17, ],
'H': [24, 4, 12, 4, 13, ],
},
10: {
'L': [18, 2, 68, 2, 69, ],
'M': [26, 4, 43, 1, 44, ],
'Q': [24, 6, 19, 2, 20, ],
'H': [28, 6, 15, 2, 16, ],
},
11: {
'L': [20, 4, 81, 0, 0, ],
'M': [30, 1, 50, 4, 51, ],
'Q': [28, 4, 22, 4, 23, ],
'H': [24, 3, 12, 8, 13, ],
},
12: {
'L': [24, 2, 92, 2, 93, ],
'M': [22, 6, 36, 2, 37, ],
'Q': [26, 4, 20, 6, 21, ],
'H': [28, 7, 14, 4, 15, ],
},
13: {
'L': [26, 4, 107, 0, 0, ],
'M': [22, 8, 37, 1, 38, ],
'Q': [24, 8, 20, 4, 21, ],
'H': [22, 12, 11, 4, 12, ],
},
14: {
'L': [30, 3, 115, 1, 116, ],
'M': [24, 4, 40, 5, 41, ],
'Q': [20, 11, 16, 5, 17, ],
'H': [24, 11, 12, 5, 13, ],
},
15: {
'L': [22, 5, 87, 1, 88, ],
'M': [24, 5, 41, 5, 42, ],
'Q': [30, 5, 24, 7, 25, ],
'H': [24, 11, 12, 7, 13, ],
},
16: {
'L': [24, 5, 98, 1, 99, ],
'M': [28, 7, 45, 3, 46, ],
'Q': [24, 15, 19, 2, 20, ],
'H': [30, 3, 15, 13, 16, ],
},
17: {
'L': [28, 1, 107, 5, 108, ],
'M': [28, 10, 46, 1, 47, ],
'Q': [28, 1, 22, 15, 23, ],
'H': [28, 2, 14, 17, 15, ],
},
18: {
'L': [30, 5, 120, 1, 121, ],
'M': [26, 9, 43, 4, 44, ],
'Q': [28, 17, 22, 1, 23, ],
'H': [28, 2, 14, 19, 15, ],
},
19: {
'L': [28, 3, 113, 4, 114, ],
'M': [26, 3, 44, 11, 45, ],
'Q': [26, 17, 21, 4, 22, ],
'H': [26, 9, 13, 16, 14, ],
},
20: {
'L': [28, 3, 107, 5, 108, ],
'M': [26, 3, 41, 13, 42, ],
'Q': [30, 15, 24, 5, 25, ],
'H': [28, 15, 15, 10, 16, ],
},
21: {
'L': [28, 4, 116, 4, 117, ],
'M': [26, 17, 42, 0, 0, ],
'Q': [28, 17, 22, 6, 23, ],
'H': [30, 19, 16, 6, 17, ],
},
22: {
'L': [28, 2, 111, 7, 112, ],
'M': [28, 17, 46, 0, 0, ],
'Q': [30, 7, 24, 16, 25, ],
'H': [24, 34, 13, 0, 0, ],
},
23: {
'L': [30, 4, 121, 5, 122, ],
'M': [28, 4, 47, 14, 48, ],
'Q': [30, 11, 24, 14, 25, ],
'H': [30, 16, 15, 14, 16, ],
},
24: {
'L': [30, 6, 117, 4, 118, ],
'M': [28, 6, 45, 14, 46, ],
'Q': [30, 11, 24, 16, 25, ],
'H': [30, 30, 16, 2, 17, ],
},
25: {
'L': [26, 8, 106, 4, 107, ],
'M': [28, 8, 47, 13, 48, ],
'Q': [30, 7, 24, 22, 25, ],
'H': [30, 22, 15, 13, 16, ],
},
26: {
'L': [28, 10, 114, 2, 115, ],
'M': [28, 19, 46, 4, 47, ],
'Q': [28, 28, 22, 6, 23, ],
'H': [30, 33, 16, 4, 17, ],
},
27: {
'L': [30, 8, 122, 4, 123, ],
'M': [28, 22, 45, 3, 46, ],
'Q': [30, 8, 23, 26, 24, ],
'H': [30, 12, 15, 28, 16, ],
},
28: {
'L': [30, 3, 117, 10, 118, ],
'M': [28, 3, 45, 23, 46, ],
'Q': [30, 4, 24, 31, 25, ],
'H': [30, 11, 15, 31, 16, ],
},
29: {
'L': [30, 7, 116, 7, 117, ],
'M': [28, 21, 45, 7, 46, ],
'Q': [30, 1, 23, 37, 24, ],
'H': [30, 19, 15, 26, 16, ],
},
30: {
'L': [30, 5, 115, 10, 116, ],
'M': [28, 19, 47, 10, 48, ],
'Q': [30, 15, 24, 25, 25, ],
'H': [30, 23, 15, 25, 16, ],
},
31: {
'L': [30, 13, 115, 3, 116, ],
'M': [28, 2, 46, 29, 47, ],
'Q': [30, 42, 24, 1, 25, ],
'H': [30, 23, 15, 28, 16, ],
},
32: {
'L': [30, 17, 115, 0, 0, ],
'M': [28, 10, 46, 23, 47, ],
'Q': [30, 10, 24, 35, 25, ],
'H': [30, 19, 15, 35, 16, ],
},
33: {
'L': [30, 17, 115, 1, 116, ],
'M': [28, 14, 46, 21, 47, ],
'Q': [30, 29, 24, 19, 25, ],
'H': [30, 11, 15, 46, 16, ],
},
34: {
'L': [30, 13, 115, 6, 116, ],
'M': [28, 14, 46, 23, 47, ],
'Q': [30, 44, 24, 7, 25, ],
'H': [30, 59, 16, 1, 17, ],
},
35: {
'L': [30, 12, 121, 7, 122, ],
'M': [28, 12, 47, 26, 48, ],
'Q': [30, 39, 24, 14, 25, ],
'H': [30, 22, 15, 41, 16, ],
},
36: {
'L': [30, 6, 121, 14, 122, ],
'M': [28, 6, 47, 34, 48, ],
'Q': [30, 46, 24, 10, 25, ],
'H': [30, 2, 15, 64, 16, ],
},
37: {
'L': [30, 17, 122, 4, 123, ],
'M': [28, 29, 46, 14, 47, ],
'Q': [30, 49, 24, 10, 25, ],
'H': [30, 24, 15, 46, 16, ],
},
38: {
'L': [30, 4, 122, 18, 123, ],
'M': [28, 13, 46, 32, 47, ],
'Q': [30, 48, 24, 14, 25, ],
'H': [30, 42, 15, 32, 16, ],
},
39: {
'L': [30, 20, 117, 4, 118, ],
'M': [28, 40, 47, 7, 48, ],
'Q': [30, 43, 24, 22, 25, ],
'H': [30, 10, 15, 67, 16, ],
},
40: {
'L': [30, 19, 118, 6, 119, ],
'M': [28, 18, 47, 31, 48, ],
'Q': [30, 34, 24, 34, 25, ],
'H': [30, 20, 15, 61, 16, ],
},
}
#: This table lists all of the generator polynomials used by QR Codes.
#: They are indexed by the number of "ECC Code Words" (see table above).
#: This table is taken from:
#:
#: http://www.matchadesign.com/blog/qr-code-demystified-part-4/
generator_polynomials = {
7: [87, 229, 146, 149, 238, 102, 21],
10: [251, 67, 46, 61, 118, 70, 64, 94, 32, 45],
13: [74, 152, 176, 100, 86, 100, 106, 104, 130, 218, 206, 140, 78],
15: [8, 183, 61, 91, 202, 37, 51, 58, 58, 237, 140, 124, 5, 99, 105],
16: [120, 104, 107, 109, 102, 161, 76, 3, 91, 191, 147, 169, 182, 194,
225, 120],
17: [43, 139, 206, 78, 43, 239, 123, 206, 214, 147, 24, 99, 150, 39,
243, 163, 136],
18: [215, 234, 158, 94, 184, 97, 118, 170, 79, 187, 152, 148, 252, 179,
5, 98, 96, 153],
20: [17, 60, 79, 50, 61, 163, 26, 187, 202, 180, 221, 225, 83, 239, 156,
164, 212, 212, 188, 190],
22: [210, 171, 247, 242, 93, 230, 14, 109, 221, 53, 200, 74, 8, 172, 98,
80, 219, 134, 160, 105, 165, 231],
24: [229, 121, 135, 48, 211, 117, 251, 126, 159, 180, 169, 152, 192, 226,
228, 218, 111, 0, 117, 232, 87, 96, 227, 21],
26: [173, 125, 158, 2, 103, 182, 118, 17, 145, 201, 111, 28, 165, 53, 161,
21, 245, 142, 13, 102, 48, 227, 153, 145, 218, 70],
28: [168, 223, 200, 104, 224, 234, 108, 180, 110, 190, 195, 147, 205, 27,
232, 201, 21, 43, 245, 87, 42, 195, 212, 119, 242, 37, 9, 123],
30: [41, 173, 145, 152, 216, 31, 179, 182, 50, 48, 110, 86, 239, 96, 222,
125, 42, 173, 226, 193, 224, 130, 156, 37, 251, 216, 238, 40, 192,
180]
}
#: This table contains the log and values used in GF(256) arithmetic.
#: They are used to generate error correction codes for QR Codes.
#: This table is taken from:
#:
#: vhttp://www.thonky.com/qr-code-tutorial/log-antilog-table/
galois_log = [
1, 2, 4, 8, 16, 32, 64, 128, 29, 58, 116, 232, 205, 135, 19, 38, 76, 152,
45, 90, 180, 117, 234, 201, 143, 3, 6, 12, 24, 48, 96, 192, 157, 39, 78,
156, 37, 74, 148, 53, 106, 212, 181, 119, 238, 193, 159, 35, 70, 140, 5,
10, 20, 40, 80, 160, 93, 186, 105, 210, 185, 111, 222, 161, 95, 190, 97,
194, 153, 47, 94, 188, 101, 202, 137, 15, 30, 60, 120, 240, 253, 231, 211,
187, 107, 214, 177, 127, 254, 225, 223, 163, 91, 182, 113, 226, 217, 175,
67, 134, 17, 34, 68, 136, 13, 26, 52, 104, 208, 189, 103, 206, 129, 31,
62, 124, 248, 237, 199, 147, 59, 118, 236, 197, 151, 51, 102, 204, 133,
23, 46, 92, 184, 109, 218, 169, 79, 158, 33, 66, 132, 21, 42, 84, 168, 77,
154, 41, 82, 164, 85, 170, 73, 146, 57, 114, 228, 213, 183, 115, 230, 209,
191, 99, 198, 145, 63, 126, 252, 229, 215, 179, 123, 246, 241, 255, 227,
219, 171, 75, 150, 49, 98, 196, 149, 55, 110, 220, 165, 87, 174, 65, 130,
25, 50, 100, 200, 141, 7, 14, 28, 56, 112, 224, 221, 167, 83, 166, 81,
162, 89, 178, 121, 242, 249, 239, 195, 155, 43, 86, 172, 69, 138, 9, 18,
36, 72, 144, 61, 122, 244, 245, 247, 243, 251, 235, 203, 139, 11, 22, 44,
88, 176, 125, 250, 233, 207, 131, 27, 54, 108, 216, 173, 71, 142, 1,]
#: This table contains the antilog and values used in GF(256) arithmetic.
#: They are used to generate error correction codes for QR Codes.
#: This table is taken from:
#:
#: http://www.thonky.com/qr-code-tutorial/log-antilog-table/
galois_antilog = [
None, 0, 1, 25, 2, 50, 26, 198, 3, 223, 51, 238, 27, 104, 199, 75, 4, 100,
224, 14, 52, 141, 239, 129, 28, 193, 105, 248, 200, 8, 76, 113, 5, 138,
101, 47, 225, 36, 15, 33, 53, 147, 142, 218, 240, 18, 130, 69, 29, 181,
194, 125, 106, 39, 249, 185, 201, 154, 9, 120, 77, 228, 114, 166, 6, 191,
139, 98, 102, 221, 48, 253, 226, 152, 37, 179, 16, 145, 34, 136, 54, 208,
148, 206, 143, 150, 219, 189, 241, 210, 19, 92, 131, 56, 70, 64, 30, 66,
182, 163, 195, 72, 126, 110, 107, 58, 40, 84, 250, 133, 186, 61, 202, 94,
155, 159, 10, 21, 121, 43, 78, 212, 229, 172, 115, 243, 167, 87, 7, 112,
192, 247, 140, 128, 99, 13, 103, 74, 222, 237, 49, 197, 254, 24, 227, 165,
153, 119, 38, 184, 180, 124, 17, 68, 146, 217, 35, 32, 137, 46, 55, 63,
209, 91, 149, 188, 207, 205, 144, 135, 151, 178, 220, 252, 190, 97, 242,
86, 211, 171, 20, 42, 93, 158, 132, 60, 57, 83, 71, 109, 65, 162, 31, 45,
67, 216, 183, 123, 164, 118, 196, 23, 73, 236, 127, 12, 111, 246, 108,
161, 59, 82, 41, 157, 85, 170, 251, 96, 134, 177, 187, 204, 62, 90, 203,
89, 95, 176, 156, 169, 160, 81, 11, 245, 22, 235, 122, 117, 44, 215, 79,
174, 213, 233, 230, 231, 173, 232, 116, 214, 244, 234, 168, 80, 88, 175,]
#: This table contains the coordinates for the position adjustment patterns.
#: The index of the table corresponds to the QR Code's version number.
#: This table is taken from:
#:
#: http://www.thonky.com/qr-code-tutorial/part-3-mask-pattern/
position_adjustment = [
None, #There is not version 0
None, #Version 1 does not need adjustment
[6, 18, ],
[6, 22, ],
[6, 26, ],
[6, 30, ],
[6, 34, ],
[6, 22, 38, ],
[6, 24, 42, ],
[6, 26, 46, ],
[6, 28, 50, ],
[6, 30, 54, ],
[6, 32, 58, ],
[6, 34, 62, ],
[6, 26, 46, 66, ],
[6, 26, 48, 70, ],
[6, 26, 50, 74, ],
[6, 30, 54, 78, ],
[6, 30, 56, 82, ],
[6, 30, 58, 86, ],
[6, 34, 62, 90, ],
[6, 28, 50, 72, 94, ],
[6, 26, 50, 74, 98, ],
[6, 30, 54, 78, 102, ],
[6, 28, 54, 80, 106, ],
[6, 32, 58, 84, 110, ],
[6, 30, 58, 86, 114, ],
[6, 34, 62, 90, 118, ],
[6, 26, 50, 74, 98, 122, ],
[6, 30, 54, 78, 102, 126, ],
[6, 26, 52, 78, 104, 130, ],
[6, 30, 56, 82, 108, 134, ],
[6, 34, 60, 86, 112, 138, ],
[6, 30, 58, 86, 114, 142, ],
[6, 34, 62, 90, 118, 146, ],
[6, 30, 54, 78, 102, 126, 150, ],
[6, 24, 50, 76, 102, 128, 154, ],
[6, 28, 54, 80, 106, 132, 158, ],
[6, 32, 58, 84, 110, 136, 162, ],
[6, 26, 54, 82, 110, 138, 166, ],
[6, 30, 58, 86, 114, 142, 170, ],
]
#: This table specifies the bit pattern to be added to a QR Code's
#: image to specify what version the code is. Note, this pattern
#: is not used for versions 1-6. This table is taken from:
#:
#: http://www.thonky.com/qr-code-tutorial/part-3-mask-pattern/
version_pattern = [None, None, None, None, None, None, None, #0-6
'000111110010010100', '001000010110111100', '001001101010011001',
'001010010011010011', '001011101111110110', '001100011101100010',
'001101100001000111', '001110011000001101', '001111100100101000',
'010000101101111000', '010001010001011101', '010010101000010111',
'010011010100110010', '010100100110100110', '010101011010000011',
'010110100011001001', '010111011111101100', '011000111011000100',
'011001000111100001', '011010111110101011', '011011000010001110',
'011100110000011010', '011101001100111111', '011110110101110101',
'011111001001010000', '100000100111010101', '100001011011110000',
'100010100010111010', '100011011110011111', '100100101100001011',
'100101010000101110', '100110101001100100', '100111010101000001',
'101000110001101001'
]
#: This table contains the bit fields needed to specify the error code level and
#: mask pattern used by a QR Code. This table is take from:
#:
#: http://www.thonky.com/qr-code-tutorial/part-3-mask-pattern/
type_bits = {
'L': {
0: '111011111000100',
1: '111001011110011',
2: '111110110101010',
3: '111100010011101',
4: '110011000101111',
5: '110001100011000',
6: '110110001000001',
7: '110100101110110',
},
'M': {
0: '101010000010010',
1: '101000100100101',
2: '101111001111100',
3: '101101101001011',
4: '100010111111001',
5: '100000011001110',
6: '100111110010111',
7: '100101010100000',
},
'Q': {
0: '011010101011111',
1: '011000001101000',
2: '011111100110001',
3: '011101000000110',
4: '010010010110100',
5: '010000110000011',
6: '010111011011010',
7: '010101111101101',
},
'H': {
0: '001011010001001',
1: '001001110111110',
2: '001110011100111',
3: '001100111010000',
4: '000011101100010',
5: '000001001010101',
6: '000110100001100',
7: '000100000111011',
},
}
#: This table contains *functions* to compute whether to change current bit when
#: creating the masks. All of the functions in the table return a boolean value.
#: A True result means you should add the bit to the QR Code exactly as is. A
#: False result means you should add the opposite bit. This table was taken
#: from:
#:
#: http://www.thonky.com/qr-code-tutorial/mask-patterns/
mask_patterns = [
lambda row, col: (row + col) % 2 == 0,
lambda row, col: row % 2 == 0,
lambda row, col: col % 3 == 0,
lambda row, col: (row + col) % 3 == 0,
lambda row, col: ((row // 2) + (col // 3)) % 2 == 0,
lambda row, col: ((row * col) % 2) + ((row * col) % 3) == 0,
lambda row, col: (((row * col) % 2) + ((row * col) % 3)) % 2 == 0,
lambda row, col: (((row + col) % 2) + ((row * col) % 3)) % 2 == 0]
#: This is a table of ASCII escape code for terminal colors. QR codes
#: are drawn using a space with a colored background. Hence, only
#: codes affecting background colors have been added.
#: http://misc.flogisoft.com/bash/tip_colors_and_formatting
term_colors = {
'default': 49,
'background': 49,
'reverse': 7,
'reversed': 7,
'inverse': 7,
'inverted': 7,
'black': 40,
'red': 41,
'green': 42,
'yellow': 43,
'blue': 44,
'magenta': 45,
'cyan': 46,
'light gray': 47,
'light grey': 47,
'dark gray': 100,
'dark grey': 100,
'light red': 101,
'light green': 102,
'light blue': 103,
'light yellow': 104,
'light magenta': 105,
'light cyan': 106,
'white': 107
}
| bsd-3-clause |
vitan/hue | desktop/core/ext-py/guppy-0.1.10/guppy/heapy/Monitor.py | 37 | 12734 | #._cv_part guppy.heapy.Monitor
import os, pprint, signal, socket, SocketServer, sys, threading, time, traceback
import cPickle as pickle
try:
import readline # Imported to _enable_ command line editing
except ImportError:
pass
import select, Queue
from guppy.heapy.RemoteConstants import *
from guppy.heapy.Console import Console
from guppy.sets import mutnodeset
from guppy.etc.etc import ptable
from guppy.etc import cmd
class Server(SocketServer.ThreadingTCPServer):
pass
def ioready(fd, wait):
r, w, x = select.select([fd], [], [], wait)
return len(r)
def queue_get_interruptible(q, noblock=0):
while 1:
try:
return q.get(timeout=0.2)
except Queue.Empty:
if noblock:
break
# Special value signals that connection has been closed
CONN_CLOSED = ('CLOSED',)
class Handler(SocketServer.StreamRequestHandler):
allow_reuse_address = 1
def close(self):
if not self.isclosed.tas(0):
self.server.monitor.remove_connection(self)
self.dataq.put(CONN_CLOSED)
self.request.shutdown(2)
self.request.close()
def send_cmd(self, cmd):
if not cmd.endswith('\n'):
cmd += '\n'
self.request.send(cmd)
def browser_cmd(self, cmd):
if self.prompt == '>>> ':
self.exec_cmd('q', retdata=1)
if self.prompt == '<Annex> ':
self.exec_cmd('cont', retdata=1)
return self.exec_cmd(cmd, retdata=1)
def exec_cmd(self, cmd, retdata=0, noblock=0):
if cmd is not None:
self.send_cmd(cmd)
self.promptstate = False
datas = []
while 1:
p = queue_get_interruptible(self.dataq, noblock)
if p is None:
if self.promptstate:
break
else:
time.sleep(1)
continue
if p is CONN_CLOSED:
raise EOFError
if p[0] == 'DATA':
self.promptstate = False
if retdata:
datas.append(p[1])
else:
sys.stdout.write(p[1])
elif p[0] == 'PROMPT':
self.prompt = p[1]
if self.dataq.empty():
self.promptstate = True
break
else:
self.promptstate = False
else:
assert 0
if retdata:
return ''.join(datas)
def get_ps(self, name):
for line in self.firstdata.split('\n'):
if line.startswith(name):
if '=' in line:
ix = line.index('=')
line = line[ix+1:].strip()
return line
return ''
def get_val(self, expr):
data = self.browser_cmd('dump %s'%expr)
return pickle.loads(data)
def handle(self):
self.prompt = None
self.promptstate = False
self.isclosed = mutnodeset()
self.dataq = Queue.Queue()
self.server.monitor.add_connection(self)
while 1:
try:
data = self.rfile.readline()
if not data:
raise EOFError,'End of file'
if data.endswith(DONE):
raise EOFError,'DONE'
except (EOFError, socket.error):
break
if data.endswith(READLINE):
prompt = data[:-len(READLINE)]
self.dataq.put(('PROMPT',prompt))
if self.prompt is None:
self.firstdata = self.exec_cmd(cmd=None,retdata=1)
else:
self.dataq.put(('DATA',data))
self.close()
class MonitorConnection(cmd.Cmd):
use_raw_input = 1
def __init__(self, monitor):
self.aliases = {}
cmd.Cmd.__init__(self)
self.hno = 0
self.isclosed = 0
self.forceexit = 0
self.prompt = '<Monitor> '
self.monitor = monitor
self.server = s = Server((LOCALHOST, HEAPYPORT), Handler)
self.server.monitor = monitor
self.st = threading.Thread(target = self.run_server,
args = ())
self.st.start()
def close(self):
self.isclosed = 1
self.server.socket.shutdown(2)
self.server.server_close()
self.server.verify_request = lambda x, y: 0
def default(self, line):
cmd.Cmd.default(self, line)
cmd.Cmd.do_help(self, '')
def run_server(self):
s = self.server
while not self.isclosed:
s.handle_request()
s.server_close()
def exec_cmd(self, cmd):
if not cmd:
# I don't want the repeat of previous command when giving
# empty command that is provided by cmd.py.
# It turned out to be confusing sometimes.
return
line = cmd
try:
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
return stop
except:
self.handle_error(line)
def handle_error(self, cmdline):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print >>sys.stderr,'-'*40
print >>sys.stderr,'Exception happened during processing the command',
print >>sys.stderr,repr(cmdline)
import traceback
traceback.print_exc()
print >>sys.stderr, '-'*40
# Alias handling etc copied from pdb.py in Python dist
def precmd(self, line):
"""Handle alias expansion and ';;' separator."""
self.curline = line
if not line:
return line
args = line.split()
while self.aliases.has_key(args[0]):
line = self.aliases[args[0]]
if '%' in line:
ii = 1
for tmpArg in args[1:]:
line = line.replace("%" + str(ii),
tmpArg)
line = line.replace('%>=' + str(ii),
' '.join(args[ii:]))
ii = ii + 1
line = line.replace("%*", ' '.join(args[1:]))
else:
line = line + ' ' + ' '.join(args[1:])
args = line.split()
# split into ';;' separated commands
# unless it's an alias command
if args[0] != 'alias':
marker = line.find(';;')
if marker >= 0:
# queue up everything after marker
next = line[marker+2:].lstrip()
self.cmdqueue.append(next)
line = line[:marker].rstrip()
return line
def do_exit(self, arg):
self.forceexit = 1
return 1
def help_exit(self):
print """exit
-----
Exit from the monitor and from the Python process that started it.
This makes sure to exit without waiting for the server thread to terminate.
See also the q command."""
do_h = cmd.Cmd.do_help
def help_h(self):
print """h(elp)
-----
Without argument, print the list of available commands.
With a command name as argument, print help about that command."""
def help_help(self):
self.help_h()
def do_int(self, arg):
try:
con = Console(stdin=self.stdin,stdout=self.stdout,
locals=self.__dict__)
con.interact(
"Local interactive console. To return to Monitor, type %r."%
con.EOF_key_sequence)
finally:
pass
def help_int(self):
print """int
-----
Local interactive console.
This will bring up a Python console locally in
the same interpreter process that the Monitor itself."""
def do_ki(self, arg):
if not arg:
arg = self.conid
arg = int(arg)
c = self.monitor.connections[arg]
if c.get_ps('noninterruptible'):
print '''\
Error: Can not interrupt this remote connection (uses Python < 2.4)'''
else:
print 'Sending KeyboardInterrupt to connection %s.'%arg
c.send_cmd(KEYBOARDINTERRUPT)
def help_ki(self):
print """ki <connection ID>
-----
Keyboard Interrupt
Send a command to interrupt the remote thread on the specified
connection (default is the last one connected to).
Notes:
It currently only works with Python >= 2.4. The remote thread will
not always be awakened, for example if it is waiting in
time.sleep(). Sometimes using several ki commands helps."""
def do_lc(self, arg):
table = [['CID', 'PID', 'ARGV']]
for cid, con in self.monitor.connections.items():
table.append([cid,
con.get_ps('target.pid'),
con.get_ps('target.sys.argv')])
ptable(table, self.stdout)
def help_lc(self):
print """lc
-----
List Connections.
List the currently open connections.
The table printed has one line per connection in this form:
CID PID ARGV
1 17999 ['/home/nilsson/bin/solitaire.py']
CID is the connection ID, which may be used as argument to the sc
command.
PID is the process ID of the target interpreter process. In Linux,
this is the parent of the remote control interpreter thread that runs
the Annex that the connection is talking to.
ARGV is the argument vector in the target Python interpereter."""
def do_sc(self, arg):
if arg:
self.conid = int(arg)
print 'Remote connection %d. To return to Monitor, type <Ctrl-C> or .<RETURN>'%self.conid
self.monitor.set_connection(self.monitor.connections[self.conid])
def help_sc(self):
print """sc <connection ID>
-----
Set connection to communicate with a remote thread.
With an argument, set current connection to the number specified.
Without argument, use the same connection as last time. You will then
be talking to a remote process via that connection. You can return to
Monitor at any time by <Ctrl-C>. You may also use the '.' command
(followed by <Return>), if the remote process is waiting for input.
The '.' character may be followed by a monitor command, to execute it
directly instead of returning to the monitor. For example, when
talking to a connection, '.sc 1' will directly change to connection 1."""
def do_q(self, arg):
return 1
def help_q(self):
print """q
-----
Quit from the monitor.
This will not exit from Python itself if called from an interactive
interpreter. To make sure to exit from Python, use the exit command."""
class Monitor:
use_raw_input = 1
def __init__(self):
self.connection = self.monitor_connection = MonitorConnection(self)
self.connections = {}
self.ids = 0
self.prompt = None
def newid(self):
if not self.connections:
self.ids = 1
self.monitor_connection.conid = self.ids
else:
self.ids = max([1]+[c for c in self.connections.keys()])+1
return self.ids
def add_connection(self, connection):
hid = self.newid()
self.connections[hid] = connection
connection.monitor_id = hid
self.print_async( '*** Connection %s opened ***'%hid)
def print_async(self, text):
""" Print text only if we are waiting for input,
and then restore the prompt. """
if self.prompt is not None:
print '\n'+text
sys.stdout.write(self.prompt)
sys.stdout.flush()
def remove_connection(self, connection):
del self.connections[connection.monitor_id]
if connection is self.connection:
self.set_connection(self.monitor_connection)
self.print_async( '*** Connection %s closed ***'%connection.monitor_id)
def run(self):
try:
stop = 0
while not stop:
try:
while not stop:
conn = self.connection
self.prompt = conn.prompt
if conn is not self.monitor_connection:
conn.exec_cmd(cmd=None,noblock=1)
cmd = raw_input(conn.prompt)
self.prompt = None
conn = None
if cmd.startswith('.'):
if cmd == '.':
self.connection = self.monitor_connection
else:
cmd = cmd[1:]
conn = self.monitor_connection
#elif cmd or self.connection is self.monitor_connection:
else:
conn = self.connection
if conn:
try:
r = conn.exec_cmd(cmd)
except EOFError:
r = 1
if conn is self.monitor_connection and r:
stop = 1
#print 'to stop'
#print 'end of loop'
except EOFError:
'We better exit in case the input is from a file'
#print 'EOFError'
#print 'Use the monitor q command to quit.'
print '*** End Of File - Exiting Monitor ***'
self.connection = self.monitor_connection
stop = 1
except KeyboardInterrupt:
print 'KeyboardInterrupt'
print 'Use the ki command to interrupt a remote process.'
self.connection = self.monitor_connection
continue
finally:
self.prompt=None # Avoid closing messages
#print 'to close'
self.close()
def close(self):
for c in self.connections.values():
try:
#print 'to close:', c
c.close()
except socket.error:
pass
try:
#print 'to close: self'
self.monitor_connection.close()
except socket.error:
pass
if self.monitor_connection.forceexit:
os._exit(0)
def set_connection(self, connection):
self.connection = connection
self.prompt = connection.prompt
def monitor():
"""monitor() [0]
Start an interactive remote monitor.
This can be used to get information about the state, in
particular the memory usage, of separately running Python
processes.
References
[0] heapy_Use.html#heapykinds.Use.monitor"""
from guppy.heapy import Remote
Remote.off()
m = Monitor()
m.run()
if __name__ == '__main__':
monitor()
| apache-2.0 |
bywbilly/tornado | tornado/wsgi.py | 127 | 13436 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""WSGI support for the Tornado web framework.
WSGI is the Python standard for web servers, and allows for interoperability
between Tornado and other Python web frameworks and servers. This module
provides WSGI support in two ways:
* `WSGIAdapter` converts a `tornado.web.Application` to the WSGI application
interface. This is useful for running a Tornado app on another
HTTP server, such as Google App Engine. See the `WSGIAdapter` class
documentation for limitations that apply.
* `WSGIContainer` lets you run other WSGI applications and frameworks on the
Tornado HTTP server. For example, with this class you can mix Django
and Tornado handlers in a single server.
"""
from __future__ import absolute_import, division, print_function, with_statement
import sys
from io import BytesIO
import tornado
from tornado.concurrent import Future
from tornado import escape
from tornado import httputil
from tornado.log import access_log
from tornado import web
from tornado.escape import native_str
from tornado.util import unicode_type
try:
import urllib.parse as urllib_parse # py3
except ImportError:
import urllib as urllib_parse
# PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
# that are smuggled inside objects of type unicode (via the latin1 encoding).
# These functions are like those in the tornado.escape module, but defined
# here to minimize the temptation to use them in non-wsgi contexts.
if str is unicode_type:
def to_wsgi_str(s):
assert isinstance(s, bytes)
return s.decode('latin1')
def from_wsgi_str(s):
assert isinstance(s, str)
return s.encode('latin1')
else:
def to_wsgi_str(s):
assert isinstance(s, bytes)
return s
def from_wsgi_str(s):
assert isinstance(s, str)
return s
class WSGIApplication(web.Application):
"""A WSGI equivalent of `tornado.web.Application`.
.. deprecated:: 4.0
Use a regular `.Application` and wrap it in `WSGIAdapter` instead.
"""
def __call__(self, environ, start_response):
return WSGIAdapter(self)(environ, start_response)
# WSGI has no facilities for flow control, so just return an already-done
# Future when the interface requires it.
_dummy_future = Future()
_dummy_future.set_result(None)
class _WSGIConnection(httputil.HTTPConnection):
def __init__(self, method, start_response, context):
self.method = method
self.start_response = start_response
self.context = context
self._write_buffer = []
self._finished = False
self._expected_content_remaining = None
self._error = None
def set_close_callback(self, callback):
# WSGI has no facility for detecting a closed connection mid-request,
# so we can simply ignore the callback.
pass
def write_headers(self, start_line, headers, chunk=None, callback=None):
if self.method == 'HEAD':
self._expected_content_remaining = 0
elif 'Content-Length' in headers:
self._expected_content_remaining = int(headers['Content-Length'])
else:
self._expected_content_remaining = None
self.start_response(
'%s %s' % (start_line.code, start_line.reason),
[(native_str(k), native_str(v)) for (k, v) in headers.get_all()])
if chunk is not None:
self.write(chunk, callback)
elif callback is not None:
callback()
return _dummy_future
def write(self, chunk, callback=None):
if self._expected_content_remaining is not None:
self._expected_content_remaining -= len(chunk)
if self._expected_content_remaining < 0:
self._error = httputil.HTTPOutputError(
"Tried to write more data than Content-Length")
raise self._error
self._write_buffer.append(chunk)
if callback is not None:
callback()
return _dummy_future
def finish(self):
if (self._expected_content_remaining is not None and
self._expected_content_remaining != 0):
self._error = httputil.HTTPOutputError(
"Tried to write %d bytes less than Content-Length" %
self._expected_content_remaining)
raise self._error
self._finished = True
class _WSGIRequestContext(object):
def __init__(self, remote_ip, protocol):
self.remote_ip = remote_ip
self.protocol = protocol
def __str__(self):
return self.remote_ip
class WSGIAdapter(object):
"""Converts a `tornado.web.Application` instance into a WSGI application.
Example usage::
import tornado.web
import tornado.wsgi
import wsgiref.simple_server
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
wsgi_app = tornado.wsgi.WSGIAdapter(application)
server = wsgiref.simple_server.make_server('', 8888, wsgi_app)
server.serve_forever()
See the `appengine demo
<https://github.com/tornadoweb/tornado/tree/stable/demos/appengine>`_
for an example of using this module to run a Tornado app on Google
App Engine.
In WSGI mode asynchronous methods are not supported. This means
that it is not possible to use `.AsyncHTTPClient`, or the
`tornado.auth` or `tornado.websocket` modules.
.. versionadded:: 4.0
"""
def __init__(self, application):
if isinstance(application, WSGIApplication):
self.application = lambda request: web.Application.__call__(
application, request)
else:
self.application = application
def __call__(self, environ, start_response):
method = environ["REQUEST_METHOD"]
uri = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", "")))
uri += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", "")))
if environ.get("QUERY_STRING"):
uri += "?" + environ["QUERY_STRING"]
headers = httputil.HTTPHeaders()
if environ.get("CONTENT_TYPE"):
headers["Content-Type"] = environ["CONTENT_TYPE"]
if environ.get("CONTENT_LENGTH"):
headers["Content-Length"] = environ["CONTENT_LENGTH"]
for key in environ:
if key.startswith("HTTP_"):
headers[key[5:].replace("_", "-")] = environ[key]
if headers.get("Content-Length"):
body = environ["wsgi.input"].read(
int(headers["Content-Length"]))
else:
body = b""
protocol = environ["wsgi.url_scheme"]
remote_ip = environ.get("REMOTE_ADDR", "")
if environ.get("HTTP_HOST"):
host = environ["HTTP_HOST"]
else:
host = environ["SERVER_NAME"]
connection = _WSGIConnection(method, start_response,
_WSGIRequestContext(remote_ip, protocol))
request = httputil.HTTPServerRequest(
method, uri, "HTTP/1.1", headers=headers, body=body,
host=host, connection=connection)
request._parse_body()
self.application(request)
if connection._error:
raise connection._error
if not connection._finished:
raise Exception("request did not finish synchronously")
return connection._write_buffer
class WSGIContainer(object):
r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server.
.. warning::
WSGI is a *synchronous* interface, while Tornado's concurrency model
is based on single-threaded asynchronous execution. This means that
running a WSGI app with Tornado's `WSGIContainer` is *less scalable*
than running the same app in a multi-threaded WSGI server like
``gunicorn`` or ``uwsgi``. Use `WSGIContainer` only when there are
benefits to combining Tornado and WSGI in the same process that
outweigh the reduced scalability.
Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to
run it. For example::
def simple_app(environ, start_response):
status = "200 OK"
response_headers = [("Content-type", "text/plain")]
start_response(status, response_headers)
return ["Hello world!\n"]
container = tornado.wsgi.WSGIContainer(simple_app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(8888)
tornado.ioloop.IOLoop.current().start()
This class is intended to let other frameworks (Django, web.py, etc)
run on the Tornado HTTP server and I/O loop.
The `tornado.web.FallbackHandler` class is often useful for mixing
Tornado and WSGI apps in the same server. See
https://github.com/bdarnell/django-tornado-demo for a complete example.
"""
def __init__(self, wsgi_application):
self.wsgi_application = wsgi_application
def __call__(self, request):
data = {}
response = []
def start_response(status, response_headers, exc_info=None):
data["status"] = status
data["headers"] = response_headers
return response.append
app_response = self.wsgi_application(
WSGIContainer.environ(request), start_response)
try:
response.extend(app_response)
body = b"".join(response)
finally:
if hasattr(app_response, "close"):
app_response.close()
if not data:
raise Exception("WSGI app did not call start_response")
status_code, reason = data["status"].split(' ', 1)
status_code = int(status_code)
headers = data["headers"]
header_set = set(k.lower() for (k, v) in headers)
body = escape.utf8(body)
if status_code != 304:
if "content-length" not in header_set:
headers.append(("Content-Length", str(len(body))))
if "content-type" not in header_set:
headers.append(("Content-Type", "text/html; charset=UTF-8"))
if "server" not in header_set:
headers.append(("Server", "TornadoServer/%s" % tornado.version))
start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason)
header_obj = httputil.HTTPHeaders()
for key, value in headers:
header_obj.add(key, value)
request.connection.write_headers(start_line, header_obj, chunk=body)
request.connection.finish()
self._log(status_code, request)
@staticmethod
def environ(request):
"""Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
"""
hostport = request.host.split(":")
if len(hostport) == 2:
host = hostport[0]
port = int(hostport[1])
else:
host = request.host
port = 443 if request.protocol == "https" else 80
environ = {
"REQUEST_METHOD": request.method,
"SCRIPT_NAME": "",
"PATH_INFO": to_wsgi_str(escape.url_unescape(
request.path, encoding=None, plus=False)),
"QUERY_STRING": request.query,
"REMOTE_ADDR": request.remote_ip,
"SERVER_NAME": host,
"SERVER_PORT": str(port),
"SERVER_PROTOCOL": request.version,
"wsgi.version": (1, 0),
"wsgi.url_scheme": request.protocol,
"wsgi.input": BytesIO(escape.utf8(request.body)),
"wsgi.errors": sys.stderr,
"wsgi.multithread": False,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}
if "Content-Type" in request.headers:
environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
if "Content-Length" in request.headers:
environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
for key, value in request.headers.items():
environ["HTTP_" + key.replace("-", "_").upper()] = value
return environ
def _log(self, status_code, request):
if status_code < 400:
log_method = access_log.info
elif status_code < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * request.request_time()
summary = request.method + " " + request.uri + " (" + \
request.remote_ip + ")"
log_method("%d %s %.2fms", status_code, summary, request_time)
HTTPRequest = httputil.HTTPServerRequest
| apache-2.0 |
thiagopena/PySIGNFe | pysignfe/nfe/manual_401/conssitnfe_200.py | 1 | 3143 | # -*- coding: utf-8 -*-
from pysignfe.xml_sped import *
from pysignfe.nfe.manual_401 import ESQUEMA_ATUAL
from pysignfe.nfe.manual_300 import conssitnfe_107
from pysignfe.nfe.manual_401 import ProtNFe_200, RetCancNFe_200
import os
DIRNAME = os.path.dirname(__file__)
class ConsSitNFe(conssitnfe_107.ConsSitNFe):
def __init__(self):
super(ConsSitNFe, self).__init__()
self.versao = TagDecimal(nome=u'consSitNFe', codigo=u'EP01', propriedade=u'versao', namespace=NAMESPACE_NFE, valor=u'2.01', raiz=u'/')
self.caminho_esquema = os.path.join(DIRNAME, u'schema', ESQUEMA_ATUAL + u'/')
self.arquivo_esquema = u'consSitNFe_v2.01.xsd'
class RetConsSitNFe(conssitnfe_107.RetConsSitNFe):
def __init__(self):
super(RetConsSitNFe, self).__init__()
self.versao = TagDecimal(nome=u'retConsSitNFe', codigo=u'ER01', propriedade=u'versao', namespace=NAMESPACE_NFE, valor=u'2.01', raiz=u'/')
self.tpAmb = TagInteiro(nome=u'tpAmb' , codigo=u'ER03' , tamanho=[1, 1, 1], raiz=u'//retConsSitNFe')
self.verAplic = TagCaracter(nome=u'verAplic' , codigo=u'ER04' , tamanho=[1, 20] , raiz=u'//retConsSitNFe')
self.cStat = TagCaracter(nome=u'cStat' , codigo=u'ER05' , tamanho=[1, 3] , raiz=u'//retConsSitNFe')
self.xMotivo = TagCaracter(nome=u'xMotivo' , codigo=u'ER06' , tamanho=[1, 2000] , raiz=u'//retConsSitNFe')
self.cUF = TagInteiro(nome=u'cUF' , codigo=u'ER07' , tamanho=[2, 2, 2], raiz=u'//retConsSitNFe')
self.chNFe = TagCaracter(nome=u'chNFe' , codigo=u'ER07b', tamanho=[44, 44] , raiz=u'//retConsSitNFe', obrigatorio=False)
self.protNFe = None
self.retCancNFe = None
self.caminho_esquema = os.path.join(DIRNAME, u'schema', ESQUEMA_ATUAL + u'/')
self.arquivo_esquema = u'retConsSitNFe_v2.01.xsd'
def get_xml(self):
xml = XMLNFe.get_xml(self)
xml += ABERTURA
xml += self.versao.xml
xml += self.tpAmb.xml
xml += self.verAplic.xml
xml += self.cStat.xml
xml += self.xMotivo.xml
xml += self.cUF.xml
xml += self.chNFe.xml
if self.protNFe is not None:
xml += self.protNFe.xml
if self.retCancNFe is not None:
xml += tira_abertura(self.retCancNFe.xml)
xml += u'</retConsSitNFe>'
return xml
def set_xml(self, arquivo):
if self._le_xml(arquivo):
self.versao.xml = arquivo
self.tpAmb.xml = arquivo
self.verAplic.xml = arquivo
self.cStat.xml = arquivo
self.xMotivo.xml = arquivo
self.cUF.xml = arquivo
self.chNFe.xml = arquivo
if self._le_noh(u'//retConsSitNFe/protNFe') is not None:
self.protNFe = ProtNFe_200()
self.protNFe.xml = arquivo
if self._le_noh(u'//retConsSitNFe/retCancNFe') is not None:
self.retCancNFe = RetCancNFe_200()
self.retCancNFe.xml = arquivo
xml = property(get_xml, set_xml)
| lgpl-2.1 |
kidburglar/youtube-dl | youtube_dl/extractor/skylinewebcams.py | 20 | 1452 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class SkylineWebcamsIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?skylinewebcams\.com/[^/]+/webcam/(?:[^/]+/)+(?P<id>[^/]+)\.html'
_TEST = {
'url': 'https://www.skylinewebcams.com/it/webcam/italia/lazio/roma/scalinata-piazza-di-spagna-barcaccia.html',
'info_dict': {
'id': 'scalinata-piazza-di-spagna-barcaccia',
'ext': 'mp4',
'title': 're:^Live Webcam Scalinata di Piazza di Spagna - La Barcaccia [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': 'Roma, veduta sulla Scalinata di Piazza di Spagna e sulla Barcaccia',
'is_live': True,
},
'params': {
'skip_download': True,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
stream_url = self._search_regex(
r'(?:url|source)\s*:\s*(["\'])(?P<url>(?:https?:)?//.+?\.m3u8.*?)\1', webpage,
'stream url', group='url')
title = self._og_search_title(webpage)
description = self._og_search_description(webpage)
return {
'id': video_id,
'url': stream_url,
'ext': 'mp4',
'title': self._live_title(title),
'description': description,
'is_live': True,
}
| unlicense |
xianjunzhengbackup/Cloud-Native-Python | env/lib/python3.6/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py | 2360 | 3778 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
# Note: This file is under the PSF license as the code comes from the python
# stdlib. http://docs.python.org/3/license.html
import re
__version__ = '3.4.0.2'
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
| mit |
firstblade/xbmc | lib/gtest/scripts/gen_gtest_pred_impl.py | 2538 | 21986 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""gen_gtest_pred_impl.py v0.1
Generates the implementation of Google Test predicate assertions and
accompanying tests.
Usage:
gen_gtest_pred_impl.py MAX_ARITY
where MAX_ARITY is a positive integer.
The command generates the implementation of up-to MAX_ARITY-ary
predicate assertions, and writes it to file gtest_pred_impl.h in the
directory where the script is. It also generates the accompanying
unit test in file gtest_pred_impl_unittest.cc.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import sys
import time
# Where this script is.
SCRIPT_DIR = os.path.dirname(sys.argv[0])
# Where to store the generated header.
HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
# Where to store the generated unit test.
UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
def HeaderPreamble(n):
"""Returns the preamble for the header file.
Args:
n: the maximum arity of the predicate macros to be generated.
"""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
'n' : n
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
//
// Implements a family of generic predicate assertion macros.
#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
// Makes sure this header is not included before gtest.h.
#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
# error Do not include gtest_pred_impl.h directly. Include gtest.h instead.
#endif // GTEST_INCLUDE_GTEST_GTEST_H_
// This header implements a family of generic predicate assertion
// macros:
//
// ASSERT_PRED_FORMAT1(pred_format, v1)
// ASSERT_PRED_FORMAT2(pred_format, v1, v2)
// ...
//
// where pred_format is a function or functor that takes n (in the
// case of ASSERT_PRED_FORMATn) values and their source expression
// text, and returns a testing::AssertionResult. See the definition
// of ASSERT_EQ in gtest.h for an example.
//
// If you don't care about formatting, you can use the more
// restrictive version:
//
// ASSERT_PRED1(pred, v1)
// ASSERT_PRED2(pred, v1, v2)
// ...
//
// where pred is an n-ary function or functor that returns bool,
// and the values v1, v2, ..., must support the << operator for
// streaming to std::ostream.
//
// We also define the EXPECT_* variations.
//
// For now we only support predicates whose arity is at most %(n)s.
// Please email googletestframework@googlegroups.com if you need
// support for higher arities.
// GTEST_ASSERT_ is the basic statement to which all of the assertions
// in this file reduce. Don't use this in your code.
#define GTEST_ASSERT_(expression, on_failure) \\
GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
if (const ::testing::AssertionResult gtest_ar = (expression)) \\
; \\
else \\
on_failure(gtest_ar.failure_message())
""" % DEFS)
def Arity(n):
"""Returns the English name of the given arity."""
if n < 0:
return None
elif n <= 3:
return ['nullary', 'unary', 'binary', 'ternary'][n]
else:
return '%s-ary' % n
def Title(word):
"""Returns the given word in title case. The difference between
this and string's title() method is that Title('4-ary') is '4-ary'
while '4-ary'.title() is '4-Ary'."""
return word[0].upper() + word[1:]
def OneTo(n):
"""Returns the list [1, 2, 3, ..., n]."""
return range(1, n + 1)
def Iter(n, format, sep=''):
"""Given a positive integer n, a format string that contains 0 or
more '%s' format specs, and optionally a separator string, returns
the join of n strings, each formatted with the format string on an
iterator ranged from 1 to n.
Example:
Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
"""
# How many '%s' specs are in format?
spec_count = len(format.split('%s')) - 1
return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
def ImplementationForArity(n):
"""Returns the implementation of n-ary predicate assertions."""
# A map the defines the values used in the implementation template.
DEFS = {
'n' : str(n),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'arity' : Arity(n),
'Arity' : Title(Arity(n))
}
impl = """
// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
template <typename Pred""" % DEFS
impl += Iter(n, """,
typename T%s""")
impl += """>
AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
impl += Iter(n, """,
const char* e%s""")
impl += """,
Pred pred"""
impl += Iter(n, """,
const T%s& v%s""")
impl += """) {
if (pred(%(vs)s)) return AssertionSuccess();
""" % DEFS
impl += ' return AssertionFailure() << pred_text << "("'
impl += Iter(n, """
<< e%s""", sep=' << ", "')
impl += ' << ") evaluates to false, where"'
impl += Iter(n, """
<< "\\n" << e%s << " evaluates to " << v%s""")
impl += """;
}
// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
// Don't use this in your code.
#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s), \\
on_failure)
// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
impl += Iter(n, """, \\
#v%s""")
impl += """, \\
pred"""
impl += Iter(n, """, \\
v%s""")
impl += """), on_failure)
// %(Arity)s predicate assertion macros.
#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
""" % DEFS
return impl
def HeaderPostamble():
"""Returns the postamble for the header file."""
return """
#endif // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
"""
def GenerateFile(path, content):
"""Given a file path and a content string, overwrites it with the
given content."""
print 'Updating file %s . . .' % path
f = file(path, 'w+')
print >>f, content,
f.close()
print 'File %s has been updated.' % path
def GenerateHeader(n):
"""Given the maximum arity n, updates the header file that implements
the predicate assertions."""
GenerateFile(HEADER,
HeaderPreamble(n)
+ ''.join([ImplementationForArity(i) for i in OneTo(n)])
+ HeaderPostamble())
def UnitTestPreamble():
"""Returns the preamble for the unit test file."""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
// Regression test for gtest_pred_impl.h
//
// This file is generated by a script and quite long. If you intend to
// learn how Google Test works by reading its unit tests, read
// gtest_unittest.cc instead.
//
// This is intended as a regression test for the Google Test predicate
// assertions. We compile it as part of the gtest_unittest target
// only to keep the implementation tidy and compact, as it is quite
// involved to set up the stage for testing Google Test using Google
// Test itself.
//
// Currently, gtest_unittest takes ~11 seconds to run in the testing
// daemon. In the future, if it grows too large and needs much more
// time to finish, we should consider separating this file into a
// stand-alone regression test.
#include <iostream>
#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
// A user-defined data type.
struct Bool {
explicit Bool(int val) : value(val != 0) {}
bool operator>(int n) const { return value > Bool(n).value; }
Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
bool operator==(const Bool& rhs) const { return value == rhs.value; }
bool value;
};
// Enables Bool to be used in assertions.
std::ostream& operator<<(std::ostream& os, const Bool& x) {
return os << (x.value ? "true" : "false");
}
""" % DEFS)
def TestsForArity(n):
"""Returns the tests for n-ary predicate assertions."""
# A map that defines the values used in the template for the tests.
DEFS = {
'n' : n,
'es' : Iter(n, 'e%s', sep=', '),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'tvs' : Iter(n, 'T%s v%s', sep=', '),
'int_vs' : Iter(n, 'int v%s', sep=', '),
'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
'types' : Iter(n, 'typename T%s', sep=', '),
'v_sum' : Iter(n, 'v%s', sep=' + '),
'arity' : Arity(n),
'Arity' : Title(Arity(n)),
}
tests = (
"""// Sample functions/functors for testing %(arity)s predicate assertions.
// A %(arity)s predicate function.
template <%(types)s>
bool PredFunction%(n)s(%(tvs)s) {
return %(v_sum)s > 0;
}
// The following two functions are needed to circumvent a bug in
// gcc 2.95.3, which sometimes has problem with the above template
// function.
bool PredFunction%(n)sInt(%(int_vs)s) {
return %(v_sum)s > 0;
}
bool PredFunction%(n)sBool(%(Bool_vs)s) {
return %(v_sum)s > 0;
}
""" % DEFS)
tests += """
// A %(arity)s predicate functor.
struct PredFunctor%(n)s {
template <%(types)s>
bool operator()(""" % DEFS
tests += Iter(n, 'const T%s& v%s', sep=""",
""")
tests += """) {
return %(v_sum)s > 0;
}
};
""" % DEFS
tests += """
// A %(arity)s predicate-formatter function.
template <%(types)s>
testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) {
if (PredFunction%(n)s(%(vs)s))
return testing::AssertionSuccess();
return testing::AssertionFailure()
<< """ % DEFS
tests += Iter(n, 'e%s', sep=' << " + " << ')
tests += """
<< " is expected to be positive, but evaluates to "
<< %(v_sum)s << ".";
}
""" % DEFS
tests += """
// A %(arity)s predicate-formatter functor.
struct PredFormatFunctor%(n)s {
template <%(types)s>
testing::AssertionResult operator()(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) const {
return PredFormatFunction%(n)s(%(es)s, %(vs)s);
}
};
""" % DEFS
tests += """
// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
class Predicate%(n)sTest : public testing::Test {
protected:
virtual void SetUp() {
expected_to_finish_ = true;
finished_ = false;""" % DEFS
tests += """
""" + Iter(n, 'n%s_ = ') + """0;
}
"""
tests += """
virtual void TearDown() {
// Verifies that each of the predicate's arguments was evaluated
// exactly once."""
tests += ''.join(["""
EXPECT_EQ(1, n%s_) <<
"The predicate assertion didn't evaluate argument %s "
"exactly once.";""" % (i, i + 1) for i in OneTo(n)])
tests += """
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
FAIL() << "The predicate assertion unexpactedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
}
}
// true iff the test function is expected to run to finish.
static bool expected_to_finish_;
// true iff the test function did run to finish.
static bool finished_;
""" % DEFS
tests += Iter(n, """
static int n%s_;""")
tests += """
};
bool Predicate%(n)sTest::expected_to_finish_;
bool Predicate%(n)sTest::finished_;
""" % DEFS
tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
""") % DEFS
tests += """
typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
""" % DEFS
def GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type):
"""Returns the test for a predicate assertion macro.
Args:
use_format: true iff the assertion is a *_PRED_FORMAT*.
use_assert: true iff the assertion is a ASSERT_*.
expect_failure: true iff the assertion is expected to fail.
use_functor: true iff the first argument of the assertion is
a functor (as opposed to a function)
use_user_type: true iff the predicate functor/function takes
argument(s) of a user-defined type.
Example:
GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
of a successful EXPECT_PRED_FORMATn() that takes a functor
whose arguments have built-in types."""
if use_assert:
assrt = 'ASSERT' # 'assert' is reserved, so we cannot use
# that identifier here.
else:
assrt = 'EXPECT'
assertion = assrt + '_PRED'
if use_format:
pred_format = 'PredFormat'
assertion += '_FORMAT'
else:
pred_format = 'Pred'
assertion += '%(n)s' % DEFS
if use_functor:
pred_format_type = 'functor'
pred_format += 'Functor%(n)s()'
else:
pred_format_type = 'function'
pred_format += 'Function%(n)s'
if not use_format:
if use_user_type:
pred_format += 'Bool'
else:
pred_format += 'Int'
test_name = pred_format_type.title()
if use_user_type:
arg_type = 'user-defined type (Bool)'
test_name += 'OnUserType'
if expect_failure:
arg = 'Bool(n%s_++)'
else:
arg = 'Bool(++n%s_)'
else:
arg_type = 'built-in type (int)'
test_name += 'OnBuiltInType'
if expect_failure:
arg = 'n%s_++'
else:
arg = '++n%s_'
if expect_failure:
successful_or_failed = 'failed'
expected_or_not = 'expected.'
test_name += 'Failure'
else:
successful_or_failed = 'successful'
expected_or_not = 'UNEXPECTED!'
test_name += 'Success'
# A map that defines the values used in the test template.
defs = DEFS.copy()
defs.update({
'assert' : assrt,
'assertion' : assertion,
'test_name' : test_name,
'pf_type' : pred_format_type,
'pf' : pred_format,
'arg_type' : arg_type,
'arg' : arg,
'successful' : successful_or_failed,
'expected' : expected_or_not,
})
test = """
// Tests a %(successful)s %(assertion)s where the
// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
indent = (len(assertion) + 3)*' '
extra_indent = ''
if expect_failure:
extra_indent = ' '
if use_assert:
test += """
expected_to_finish_ = false;
EXPECT_FATAL_FAILURE({ // NOLINT"""
else:
test += """
EXPECT_NONFATAL_FAILURE({ // NOLINT"""
test += '\n' + extra_indent + """ %(assertion)s(%(pf)s""" % defs
test = test % defs
test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
test += ');\n' + extra_indent + ' finished_ = true;\n'
if expect_failure:
test += ' }, "");\n'
test += '}\n'
return test
# Generates tests for all 2**6 = 64 combinations.
tests += ''.join([GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type)
for use_format in [0, 1]
for use_assert in [0, 1]
for expect_failure in [0, 1]
for use_functor in [0, 1]
for use_user_type in [0, 1]
])
return tests
def UnitTestPostamble():
"""Returns the postamble for the tests."""
return ''
def GenerateUnitTest(n):
"""Returns the tests for up-to n-ary predicate assertions."""
GenerateFile(UNIT_TEST,
UnitTestPreamble()
+ ''.join([TestsForArity(i) for i in OneTo(n)])
+ UnitTestPostamble())
def _Main():
"""The entry point of the script. Generates the header file and its
unit test."""
if len(sys.argv) != 2:
print __doc__
print 'Author: ' + __author__
sys.exit(1)
n = int(sys.argv[1])
GenerateHeader(n)
GenerateUnitTest(n)
if __name__ == '__main__':
_Main()
| gpl-2.0 |
GOFAI/glasstone | examples/wseg10.py | 1 | 1738 | import matplotlib.pyplot as plt
from matplotlib import cm, colors, colorbar
import numpy as np
from glasstone.fallout import WSEG10
# ground zero x & y locations (st. mi)
gzx = 1
gzy = 1
# yield in megatons
yld = 0.01
# fission fraction
ff = 1.0
# wind speed (mph)
wind_speed = 1.151515 * 2.0
# wind direction (in degrees with wind from north = 0)
wind_direction = 225
# wind shear (change in mph per kilofoot change in altitude)
wind_shear = 0.23
x = np.arange(-1, 10, 0.1)
y = np.arange(-1, 10, 0.1)
X, Y = np.meshgrid(x, y)
# use WSEG10's native units
w = WSEG10(gzx, gzy, yld, ff, wind_speed, wind_direction, wind_shear, dunits='mi', wunits='mph', yunits='MT', shearunits='mph/kilofoot')
dose = np.vectorize(w.D_Hplus1)
Z = dose(X, Y, dunits='mi', doseunits='Roentgen')
fig = plt.figure()
ax1 = fig.add_axes([0.1, 0.1, 0.7, 0.8])
ax2 = fig.add_axes([0.85, 0.1, 0.05, 0.75])
CS = ax1.contour(X, Y, Z, [100, 300, 500, 1000, 3000], colors=('b', 'g', 'c', 'y', 'r'), linewidths=2)
cmap = colors.ListedColormap(['b', 'g', 'c', 'y'])
cmap.set_over('r')
cmap.set_under('w')
norm = colors.BoundaryNorm([100, 300, 500, 1000, 3000], cmap.N)
cb = colorbar.ColorbarBase(ax2, cmap=cmap,
norm=norm,
boundaries=[0] + [100, 300, 500, 1000, 3000] + [5000],
extend='both',
extendfrac='auto')
cb.set_label(r'$H+1$ dose rate $(R/hr)$')
ax1.grid(True)
ax1.set_title('WSEG-10 $H+1$ dose rate contours for 10kT burst')
ax1.text(-0.5, 7.5, '$Wind: SW, 2.30303 mi/hr$\n$Shear: 0.23 mi/hr-kilofeet$\n$Yield: 10kT$\n$GZ:1,1$\n$FF: 1.0$\n$HOB: 0$')
ax1.set_ylim([-0.5, 11])
ax1.set_ylabel('$st.$ $miles$')
ax1.set_xlabel('$st.$ $miles$')
plt.show()
| mit |
xyzz/vcmi-build | project/jni/python/src/Demo/tkinter/matt/menu-all-types-of-entries.py | 35 | 9110 | from Tkinter import *
# some vocabulary to keep from getting confused. This terminology
# is something I cooked up for this file, but follows the man pages
# pretty closely
#
#
#
# This is a MENUBUTTON
# V
# +-------------+
# | |
#
# +------------++------------++------------+
# | || || |
# | File || Edit || Options | <-------- the MENUBAR
# | || || |
# +------------++------------++------------+
# | New... |
# | Open... |
# | Print |
# | | <-------- This is a MENU. The lines of text in the menu are
# | | MENU ENTRIES
# | +---------------+
# | Open Files > | file1 |
# | | file2 |
# | | another file | <------ this cascading part is also a MENU
# +----------------| |
# | |
# | |
# | |
# +---------------+
# some miscellaneous callbacks
def new_file():
print "opening new file"
def open_file():
print "opening OLD file"
def print_something():
print "picked a menu item"
anchovies = 0
def print_anchovies():
global anchovies
anchovies = not anchovies
print "anchovies?", anchovies
def makeCommandMenu():
# make menu button
Command_button = Menubutton(mBar, text='Simple Button Commands',
underline=0)
Command_button.pack(side=LEFT, padx="2m")
# make the pulldown part of the File menu. The parameter passed is the master.
# we attach it to the button as a python attribute called "menu" by convention.
# hopefully this isn't too confusing...
Command_button.menu = Menu(Command_button)
# just to be cute, let's disable the undo option:
Command_button.menu.add_command(label="Undo")
# undo is the 0th entry...
Command_button.menu.entryconfig(0, state=DISABLED)
Command_button.menu.add_command(label='New...', underline=0,
command=new_file)
Command_button.menu.add_command(label='Open...', underline=0,
command=open_file)
Command_button.menu.add_command(label='Different Font', underline=0,
font='-*-helvetica-*-r-*-*-*-180-*-*-*-*-*-*',
command=print_something)
# we can make bitmaps be menu entries too. File format is X11 bitmap.
# if you use XV, save it under X11 bitmap format. duh-uh.,..
Command_button.menu.add_command(
bitmap="info")
#bitmap='@/home/mjc4y/dilbert/project.status.is.doomed.last.panel.bm')
# this is just a line
Command_button.menu.add('separator')
# change the color
Command_button.menu.add_command(label='Quit', underline=0,
background='red',
activebackground='green',
command=Command_button.quit)
# set up a pointer from the file menubutton back to the file menu
Command_button['menu'] = Command_button.menu
return Command_button
def makeCascadeMenu():
# make menu button
Cascade_button = Menubutton(mBar, text='Cascading Menus', underline=0)
Cascade_button.pack(side=LEFT, padx="2m")
# the primary pulldown
Cascade_button.menu = Menu(Cascade_button)
# this is the menu that cascades from the primary pulldown....
Cascade_button.menu.choices = Menu(Cascade_button.menu)
# ...and this is a menu that cascades from that.
Cascade_button.menu.choices.wierdones = Menu(Cascade_button.menu.choices)
# then you define the menus from the deepest level on up.
Cascade_button.menu.choices.wierdones.add_command(label='avacado')
Cascade_button.menu.choices.wierdones.add_command(label='belgian endive')
Cascade_button.menu.choices.wierdones.add_command(label='beefaroni')
# definition of the menu one level up...
Cascade_button.menu.choices.add_command(label='Chocolate')
Cascade_button.menu.choices.add_command(label='Vanilla')
Cascade_button.menu.choices.add_command(label='TuttiFruiti')
Cascade_button.menu.choices.add_command(label='WopBopaLoopBapABopBamBoom')
Cascade_button.menu.choices.add_command(label='Rocky Road')
Cascade_button.menu.choices.add_command(label='BubbleGum')
Cascade_button.menu.choices.add_cascade(
label='Wierd Flavors',
menu=Cascade_button.menu.choices.wierdones)
# and finally, the definition for the top level
Cascade_button.menu.add_cascade(label='more choices',
menu=Cascade_button.menu.choices)
Cascade_button['menu'] = Cascade_button.menu
return Cascade_button
def makeCheckbuttonMenu():
global fred
# make menu button
Checkbutton_button = Menubutton(mBar, text='Checkbutton Menus',
underline=0)
Checkbutton_button.pack(side=LEFT, padx='2m')
# the primary pulldown
Checkbutton_button.menu = Menu(Checkbutton_button)
# and all the check buttons. Note that the "variable" "onvalue" and "offvalue" options
# are not supported correctly at present. You have to do all your application
# work through the calback.
Checkbutton_button.menu.add_checkbutton(label='Pepperoni')
Checkbutton_button.menu.add_checkbutton(label='Sausage')
Checkbutton_button.menu.add_checkbutton(label='Extra Cheese')
# so here's a callback
Checkbutton_button.menu.add_checkbutton(label='Anchovy',
command=print_anchovies)
# and start with anchovies selected to be on. Do this by
# calling invoke on this menu option. To refer to the "anchovy" menu
# entry we need to know it's index. To do this, we use the index method
# which takes arguments of several forms:
#
# argument what it does
# -----------------------------------
# a number -- this is useless.
# "last" -- last option in the menu
# "none" -- used with the activate command. see the man page on menus
# "active" -- the currently active menu option. A menu option is made active
# with the 'activate' method
# "@number" -- where 'number' is an integer and is treated like a y coordinate in pixels
# string pattern -- this is the option used below, and attempts to match "labels" using the
# rules of Tcl_StringMatch
Checkbutton_button.menu.invoke(Checkbutton_button.menu.index('Anchovy'))
# set up a pointer from the file menubutton back to the file menu
Checkbutton_button['menu'] = Checkbutton_button.menu
return Checkbutton_button
def makeRadiobuttonMenu():
# make menu button
Radiobutton_button = Menubutton(mBar, text='Radiobutton Menus',
underline=0)
Radiobutton_button.pack(side=LEFT, padx='2m')
# the primary pulldown
Radiobutton_button.menu = Menu(Radiobutton_button)
# and all the Radio buttons. Note that the "variable" "onvalue" and "offvalue" options
# are not supported correctly at present. You have to do all your application
# work through the calback.
Radiobutton_button.menu.add_radiobutton(label='Republican')
Radiobutton_button.menu.add_radiobutton(label='Democrat')
Radiobutton_button.menu.add_radiobutton(label='Libertarian')
Radiobutton_button.menu.add_radiobutton(label='Commie')
Radiobutton_button.menu.add_radiobutton(label='Facist')
Radiobutton_button.menu.add_radiobutton(label='Labor Party')
Radiobutton_button.menu.add_radiobutton(label='Torie')
Radiobutton_button.menu.add_radiobutton(label='Independent')
Radiobutton_button.menu.add_radiobutton(label='Anarchist')
Radiobutton_button.menu.add_radiobutton(label='No Opinion')
# set up a pointer from the file menubutton back to the file menu
Radiobutton_button['menu'] = Radiobutton_button.menu
return Radiobutton_button
def makeDisabledMenu():
Dummy_button = Menubutton(mBar, text='Dead Menu', underline=0)
Dummy_button.pack(side=LEFT, padx='2m')
# this is the standard way of turning off a whole menu
Dummy_button["state"] = DISABLED
return Dummy_button
#################################################
#### Main starts here ...
root = Tk()
# make a menu bar
mBar = Frame(root, relief=RAISED, borderwidth=2)
mBar.pack(fill=X)
Command_button = makeCommandMenu()
Cascade_button = makeCascadeMenu()
Checkbutton_button = makeCheckbuttonMenu()
Radiobutton_button = makeRadiobuttonMenu()
NoMenu = makeDisabledMenu()
# finally, install the buttons in the menu bar.
# This allows for scanning from one menubutton to the next.
mBar.tk_menuBar(Command_button, Cascade_button, Checkbutton_button, Radiobutton_button, NoMenu)
root.title('menu demo')
root.iconname('menu demo')
root.mainloop()
| lgpl-2.1 |
synergeticsedx/deployment-wipro | common/lib/xmodule/xmodule/mako_module.py | 27 | 2004 | """
Code to handle mako templating for XModules and XBlocks.
"""
from xblock.fragment import Fragment
from .x_module import XModuleDescriptor, DescriptorSystem, shim_xmodule_js
class MakoDescriptorSystem(DescriptorSystem):
def __init__(self, render_template, **kwargs):
super(MakoDescriptorSystem, self).__init__(**kwargs)
self.render_template = render_template
class MakoTemplateBlockBase(object):
"""
XBlock intended as a mixin that uses a mako template
to specify the module html.
Expects the descriptor to have the `mako_template` attribute set
with the name of the template to render, and it will pass
the descriptor as the `module` parameter to that template
"""
# pylint: disable=no-member
def __init__(self, *args, **kwargs):
super(MakoTemplateBlockBase, self).__init__(*args, **kwargs)
if getattr(self.runtime, 'render_template', None) is None:
raise TypeError(
'{runtime} must have a render_template function'
' in order to use a MakoDescriptor'.format(
runtime=self.runtime,
)
)
def get_context(self):
"""
Return the context to render the mako template with
"""
return {
'module': self,
'editable_metadata_fields': self.editable_metadata_fields
}
def studio_view(self, context): # pylint: disable=unused-argument
"""
View used in Studio.
"""
# pylint: disable=no-member
fragment = Fragment(
self.system.render_template(self.mako_template, self.get_context())
)
shim_xmodule_js(self, fragment)
return fragment
class MakoModuleDescriptor(MakoTemplateBlockBase, XModuleDescriptor): # pylint: disable=abstract-method
"""
Mixin to use for XModule descriptors.
"""
resources_dir = None
def get_html(self):
return self.studio_view(None).content
| agpl-3.0 |
qedi-r/home-assistant | homeassistant/components/remember_the_milk/__init__.py | 3 | 12661 | """Support to interact with Remember The Milk."""
import json
import logging
import os
from rtmapi import Rtm, RtmRequestFailedException
import voluptuous as vol
from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_NAME, CONF_TOKEN, STATE_OK
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
# httplib2 is a transitive dependency from RtmAPI. If this dependency is not
# set explicitly, the library does not work.
_LOGGER = logging.getLogger(__name__)
DOMAIN = "remember_the_milk"
DEFAULT_NAME = DOMAIN
GROUP_NAME_RTM = "remember the milk accounts"
CONF_SHARED_SECRET = "shared_secret"
CONF_ID_MAP = "id_map"
CONF_LIST_ID = "list_id"
CONF_TIMESERIES_ID = "timeseries_id"
CONF_TASK_ID = "task_id"
RTM_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_SHARED_SECRET): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [RTM_SCHEMA])}, extra=vol.ALLOW_EXTRA
)
CONFIG_FILE_NAME = ".remember_the_milk.conf"
SERVICE_CREATE_TASK = "create_task"
SERVICE_COMPLETE_TASK = "complete_task"
SERVICE_SCHEMA_CREATE_TASK = vol.Schema(
{vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_ID): cv.string}
)
SERVICE_SCHEMA_COMPLETE_TASK = vol.Schema({vol.Required(CONF_ID): cv.string})
def setup(hass, config):
"""Set up the Remember the milk component."""
component = EntityComponent(_LOGGER, DOMAIN, hass, group_name=GROUP_NAME_RTM)
stored_rtm_config = RememberTheMilkConfiguration(hass)
for rtm_config in config[DOMAIN]:
account_name = rtm_config[CONF_NAME]
_LOGGER.info("Adding Remember the milk account %s", account_name)
api_key = rtm_config[CONF_API_KEY]
shared_secret = rtm_config[CONF_SHARED_SECRET]
token = stored_rtm_config.get_token(account_name)
if token:
_LOGGER.debug("found token for account %s", account_name)
_create_instance(
hass,
account_name,
api_key,
shared_secret,
token,
stored_rtm_config,
component,
)
else:
_register_new_account(
hass, account_name, api_key, shared_secret, stored_rtm_config, component
)
_LOGGER.debug("Finished adding all Remember the milk accounts")
return True
def _create_instance(
hass, account_name, api_key, shared_secret, token, stored_rtm_config, component
):
entity = RememberTheMilk(
account_name, api_key, shared_secret, token, stored_rtm_config
)
component.add_entities([entity])
hass.services.register(
DOMAIN,
f"{account_name}_create_task",
entity.create_task,
schema=SERVICE_SCHEMA_CREATE_TASK,
)
hass.services.register(
DOMAIN,
f"{account_name}_complete_task",
entity.complete_task,
schema=SERVICE_SCHEMA_COMPLETE_TASK,
)
def _register_new_account(
hass, account_name, api_key, shared_secret, stored_rtm_config, component
):
request_id = None
configurator = hass.components.configurator
api = Rtm(api_key, shared_secret, "write", None)
url, frob = api.authenticate_desktop()
_LOGGER.debug("Sent authentication request to server")
def register_account_callback(_):
"""Call for register the configurator."""
api.retrieve_token(frob)
token = api.token
if api.token is None:
_LOGGER.error("Failed to register, please try again")
configurator.notify_errors(
request_id, "Failed to register, please try again."
)
return
stored_rtm_config.set_token(account_name, token)
_LOGGER.debug("Retrieved new token from server")
_create_instance(
hass,
account_name,
api_key,
shared_secret,
token,
stored_rtm_config,
component,
)
configurator.request_done(request_id)
request_id = configurator.async_request_config(
f"{DOMAIN} - {account_name}",
callback=register_account_callback,
description="You need to log in to Remember The Milk to"
+ "connect your account. \n\n"
+ 'Step 1: Click on the link "Remember The Milk login"\n\n'
+ 'Step 2: Click on "login completed"',
link_name="Remember The Milk login",
link_url=url,
submit_caption="login completed",
)
class RememberTheMilkConfiguration:
"""Internal configuration data for RememberTheMilk class.
This class stores the authentication token it get from the backend.
"""
def __init__(self, hass):
"""Create new instance of configuration."""
self._config_file_path = hass.config.path(CONFIG_FILE_NAME)
if not os.path.isfile(self._config_file_path):
self._config = dict()
return
try:
_LOGGER.debug("Loading configuration from file: %s", self._config_file_path)
with open(self._config_file_path, "r") as config_file:
self._config = json.load(config_file)
except ValueError:
_LOGGER.error(
"Failed to load configuration file, creating a " "new one: %s",
self._config_file_path,
)
self._config = dict()
def save_config(self):
"""Write the configuration to a file."""
with open(self._config_file_path, "w") as config_file:
json.dump(self._config, config_file)
def get_token(self, profile_name):
"""Get the server token for a profile."""
if profile_name in self._config:
return self._config[profile_name][CONF_TOKEN]
return None
def set_token(self, profile_name, token):
"""Store a new server token for a profile."""
self._initialize_profile(profile_name)
self._config[profile_name][CONF_TOKEN] = token
self.save_config()
def delete_token(self, profile_name):
"""Delete a token for a profile.
Usually called when the token has expired.
"""
self._config.pop(profile_name, None)
self.save_config()
def _initialize_profile(self, profile_name):
"""Initialize the data structures for a profile."""
if profile_name not in self._config:
self._config[profile_name] = dict()
if CONF_ID_MAP not in self._config[profile_name]:
self._config[profile_name][CONF_ID_MAP] = dict()
def get_rtm_id(self, profile_name, hass_id):
"""Get the RTM ids for a Home Assistant task ID.
The id of a RTM tasks consists of the tuple:
list id, timeseries id and the task id.
"""
self._initialize_profile(profile_name)
ids = self._config[profile_name][CONF_ID_MAP].get(hass_id)
if ids is None:
return None
return ids[CONF_LIST_ID], ids[CONF_TIMESERIES_ID], ids[CONF_TASK_ID]
def set_rtm_id(self, profile_name, hass_id, list_id, time_series_id, rtm_task_id):
"""Add/Update the RTM task ID for a Home Assistant task IS."""
self._initialize_profile(profile_name)
id_tuple = {
CONF_LIST_ID: list_id,
CONF_TIMESERIES_ID: time_series_id,
CONF_TASK_ID: rtm_task_id,
}
self._config[profile_name][CONF_ID_MAP][hass_id] = id_tuple
self.save_config()
def delete_rtm_id(self, profile_name, hass_id):
"""Delete a key mapping."""
self._initialize_profile(profile_name)
if hass_id in self._config[profile_name][CONF_ID_MAP]:
del self._config[profile_name][CONF_ID_MAP][hass_id]
self.save_config()
class RememberTheMilk(Entity):
"""Representation of an interface to Remember The Milk."""
def __init__(self, name, api_key, shared_secret, token, rtm_config):
"""Create new instance of Remember The Milk component."""
self._name = name
self._api_key = api_key
self._shared_secret = shared_secret
self._token = token
self._rtm_config = rtm_config
self._rtm_api = Rtm(api_key, shared_secret, "delete", token)
self._token_valid = None
self._check_token()
_LOGGER.debug("Instance created for account %s", self._name)
def _check_token(self):
"""Check if the API token is still valid.
If it is not valid any more, delete it from the configuration. This
will trigger a new authentication process.
"""
valid = self._rtm_api.token_valid()
if not valid:
_LOGGER.error(
"Token for account %s is invalid. You need to " "register again!",
self.name,
)
self._rtm_config.delete_token(self._name)
self._token_valid = False
else:
self._token_valid = True
return self._token_valid
def create_task(self, call):
"""Create a new task on Remember The Milk.
You can use the smart syntax to define the attributes of a new task,
e.g. "my task #some_tag ^today" will add tag "some_tag" and set the
due date to today.
"""
try:
task_name = call.data.get(CONF_NAME)
hass_id = call.data.get(CONF_ID)
rtm_id = None
if hass_id is not None:
rtm_id = self._rtm_config.get_rtm_id(self._name, hass_id)
result = self._rtm_api.rtm.timelines.create()
timeline = result.timeline.value
if hass_id is None or rtm_id is None:
result = self._rtm_api.rtm.tasks.add(
timeline=timeline, name=task_name, parse="1"
)
_LOGGER.debug(
"Created new task '%s' in account %s", task_name, self.name
)
self._rtm_config.set_rtm_id(
self._name,
hass_id,
result.list.id,
result.list.taskseries.id,
result.list.taskseries.task.id,
)
else:
self._rtm_api.rtm.tasks.setName(
name=task_name,
list_id=rtm_id[0],
taskseries_id=rtm_id[1],
task_id=rtm_id[2],
timeline=timeline,
)
_LOGGER.debug(
"Updated task with id '%s' in account " "%s to name %s",
hass_id,
self.name,
task_name,
)
except RtmRequestFailedException as rtm_exception:
_LOGGER.error(
"Error creating new Remember The Milk task for " "account %s: %s",
self._name,
rtm_exception,
)
return False
return True
def complete_task(self, call):
"""Complete a task that was previously created by this component."""
hass_id = call.data.get(CONF_ID)
rtm_id = self._rtm_config.get_rtm_id(self._name, hass_id)
if rtm_id is None:
_LOGGER.error(
"Could not find task with ID %s in account %s. "
"So task could not be closed",
hass_id,
self._name,
)
return False
try:
result = self._rtm_api.rtm.timelines.create()
timeline = result.timeline.value
self._rtm_api.rtm.tasks.complete(
list_id=rtm_id[0],
taskseries_id=rtm_id[1],
task_id=rtm_id[2],
timeline=timeline,
)
self._rtm_config.delete_rtm_id(self._name, hass_id)
_LOGGER.debug(
"Completed task with id %s in account %s", hass_id, self._name
)
except RtmRequestFailedException as rtm_exception:
_LOGGER.error(
"Error creating new Remember The Milk task for " "account %s: %s",
self._name,
rtm_exception,
)
return True
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if not self._token_valid:
return "API token invalid"
return STATE_OK
| apache-2.0 |
alex1770/footballpredictions | getresults.py | 1 | 10181 | #!/usr/bin/python
import urllib2,csv,sys,datetime,time,re,bs4,string
from subprocess import PIPE,Popen
def err(s):
print >>sys.stderr,datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),s
err("Info: starting getresults.py "+' '.join(sys.argv[1:]))
if len(sys.argv)<=1:
print >>sys.stderr,"Usage:",sys.argv[0],"<league> [year]"
print >>sys.stderr," league = 0, 1, 2, 3 or 4"
sys.exit(1)
ln=int(sys.argv[1])# league number
t=datetime.datetime.now();now=t.year-(t.month<7)
# Seasons are specified by the starting year
if len(sys.argv)>2: year=int(sys.argv[2])
else: year=now
f=open('equivnames','r')
eq={}
for x in f:
if x[:1]=='#': continue
y=x.strip().split(' ')
for z in y: eq[z]=y[0]
f.close()
def cap(x):# 'AFC telford utd' -> 'AFC Telford Utd'
l=[]
for y in x.split(): l.append(y[0].upper()+y[1:])
return ' '.join(l)
def getnums(x):
x=[(y if y.isdigit() else ' ') for y in x]
x=''.join(x)
return map(int,x.split())
def text(x):
ok=string.lowercase+string.uppercase+"'-& "
return ''.join(y for y in x if y in ok)
ordn=re.compile(r'(?<=\d)(st|nd|rd|th)\b')
def deord(x): return ordn.sub('',x)# Remove ordinal suffixes
def std(x): return eq[x.strip().replace(' ','_')]# Convert a team name into 'standard form'
# Parsers should return a list containing tuples like
# ('2012-09-15', 'Arsenal', 'Southampton', 6, 1)
# Date, Hometeam, Awayteam, Homegoals, Awaygoals
# Ideally they should fail silently: errors will be picked up by the comparison stage
# Uses http://www.football-data.co.uk/
# Very nice and clean csv and has many past years, but can be a few days out of date
def getfromfootballdata(year,ln):
league=['E0','E1','E2','E3','EC'][ln]
u=urllib2.urlopen('http://www.football-data.co.uk/mmz4281/%02d%02d/'%
(year%100,(year+1)%100)+league+'.csv')
re=csv.reader(u)
l=[]
for r in re:
if r[0]==league:
dt=r[1]
yr=['19','20'][dt[-2:]<'50']+dt[-2:]
mon=dt[3:5]
day=dt[:2]
if r[5]!='' and r[6]!='':# Future fixtures can appear as entries with empty scorelines
l.append((yr+'-'+mon+'-'+day,std(r[3]),std(r[4]),int(r[5]),int(r[6])))
return l
# This parser currently kaput since they changed everything for 2014-15
def getfromsoccernet(year,ln):
suffix=['barclays-premier-league/23/scores',
'english-league-championship/24/scores',
'english-league-one/25/scores',
'english-league-two/26/scores',
'english-conference/27/scores'][ln]
url='http://www.espnfc.com/'+suffix
# The string after eng.x/ appears to be irrelevant
# It's possible the site could throw up a popup which messes stuff up
u=urllib2.urlopen(url)
soup=bs4.BeautifulSoup(u,"html5lib")
l=[];dt=None;count=10
for x in soup.find_all(True):
if x.name=='tr' and 'stathead' in x['class']:
y=x.td
if y:
y=y.text# Expecting a date like "Wednesday, January 2, 2013"
try:
t=time.strptime(y.strip(),'%A, %B %d, %Y')
except ValueError:
continue
dt=time.strftime('%Y-%m-%d',t)
if x.name=='tr' and (x['class'][0][:6]=='oddrow' or x['class'][0][:7]=='evenrow'):
for y in x.find_all('td'):
z=y.text
if z=='FT': count=0;hteam=ateam=hgoals=agoals=None
elif count==1: hteam=z
elif count==2:
f=z.find('-')
if f>=0: hgoals=int(z[:f]);agoals=int(z[f+1:])
elif count==3:
ateam=z
if dt and hteam and ateam and hgoals!=None and agoals!=None:
l.append((dt,std(hteam),std(ateam),hgoals,agoals))
count+=1
return l
def getfromscorespro(year,ln):
suffix=['premier-league','championship','league-one','league-two','national-league'][ln]
name=suffix.replace('-',' ')
suffix+='/%d-%d/results/'%(year,year+1)
url='http://www.scorespro.com/soccer/england/'+suffix
u=urllib2.urlopen(url)
soup=bs4.BeautifulSoup(u,"html5lib")
l=[]
for x in soup.find_all('tr'):
m=[None]*5
for y in x.find_all('td'):
if 'class' in y.attrs:
c=y['class']
if 'kick_t' in c:
for z in y.find_all('span',attrs={'class':'kick_t_dt'}):
(dd,mm,yy)=map(int,z.text.split('.'))
m[0]="%4d-%02d-%02d"%(yy+1900+100*(yy<60),mm,dd)
if 'home' in c: m[1]=std(text(y.text))
if 'away' in c: m[2]=std(text(y.text))
if 'score' in c:
sc=getnums(y.text)
if len(sc)>=2: m[3]=sc[0];m[4]=sc[1]
if all(z!=None for z in m): l.append(tuple(m))
return l
def getfromflashscores(year,ln):
suffix=['premier-league','championship','league-one','league-two','national-league'][ln]
name=suffix.replace('-',' ')
suffix+='/results/'
url='http://www.flashscores.co.uk/football/england/'+suffix
#print "URL",url
p=Popen('google-chrome --headless --disable-gpu --disable-features=NetworkService --dump-dom '+url,shell=True,close_fds=True,stdout=PIPE)
u=p.stdout.read()
p.stdout.close()
if p.wait()!=0: raise Exception("Error with google-chrome subprocess")
soup=bs4.BeautifulSoup(u,"html5lib")
l=[]
for x in soup.find_all('div'):
if 'class' in x.attrs:
y=x['class']
if 'event__check' in y: m=[None]*5
if 'event__time' in y:
tt=getnums(x.text)
if len(tt)>=2: dd=tt[0];mm=tt[1];m[0]="%4d-%02d-%02d"%(year+(mm<7),mm,dd)
if 'event__participant--home' in y: m[1]=std(text(x.text))# Filter out s used for red cards
if 'event__participant--away' in y: m[2]=std(text(x.text))#
if 'event__scores' in y:
sc=getnums(x.text)
if len(sc)>=2: m[3]=sc[0];m[4]=sc[1]
if 'event__part' in y:
if all(z!=None for z in m): l.append(tuple(m))
return l
def getfrombbc(year,ln):
suffix=['118996114','118996115','118996116','118996117','118996118'][ln]
url='http://www.bbc.co.uk/sport/football/results?filter=competition-'+suffix
u=urllib2.urlopen(url)
soup=bs4.BeautifulSoup(u,"html5lib")
l=[];dt=None;playoff=None
for x in soup.find_all(True):
if 'class' not in x.attrs: continue
cl=x['class'][0]
if cl=='table-header':
z=x.text.strip()
try:
t=time.strptime(deord(z),'%A %d %B %Y')
dt=time.strftime('%Y-%m-%d',t)
except ValueError:
continue
if cl=='competition-title':
z=x.text.lower()
playoff='play-off' in z or 'play off' in z
if playoff: continue
if x.name=='td' and cl=='time' and x.text.strip()=='Full time':
if dt and hteam and ateam and hgoals!=None and agoals!=None:
l.append((dt,std(hteam),std(ateam),hgoals,agoals))
if x.name!='span': continue
if cl[:9]=='team-home': hteam=x.text.strip();ateam=hgoals=agoals=None
elif cl=='score':
z=x.text.strip();f=z.find('-')
if f>=0:
try: hgoals=int(z[:f]);agoals=int(z[f+1:])
except: pass
elif cl[:9]=='team-away': ateam=x.text.strip()
return l
def oldmerge(l1,l2):
d={}
for x in l1+l2:
k=x[1:3]
if k not in d: d[k]=x
elif d[k]!=x:
err("Warning: incompatible results (taking last to be correct)")
err("Warning: %s %-19s %-19s %2d %2d\n%s %-19s %-19s %2d %2d\n"%(d[k]+x))
d[k]=x
kk=list(d);kk.sort()
return [d[x] for x in kk]
def getgroundtruth(pp):
# Could have slightly better error checking if assumed that each home-away pairing can
# only occur once, but it's convenient to index by date-home-away. This has the merit
# of working for leagues (e.g., SPL) where home-away pairings can occur multiple times
# per season.
d={};mind={};maxd={}
for n in pp:
for x in pp[n]:
d.setdefault(x[:3],[]).append((n,x))
if n not in mind or x[0]<mind[n]: mind[n]=x[0]
if n not in maxd or x[0]>maxd[n]: maxd[n]=x[0]
#for x in d: print d[x]
#for n in mind: print n,mind[n],maxd[n]
l=[]
for x in d:
cl=d[x];e={}
for (n,y) in cl: e[n]=1
av=0
for n in mind:
if n not in e and mind[n]<x[0] and x[0]<maxd[n]: av+=1# Count antivotes
#if av>0: print cl
# Decompose cl into equiv classes and accept largest class if size(class)>=av
# If equally large then prioritise some feeds over others (todo)
f={}
for x in cl: f.setdefault(x[1],[]).append(x[0])
m=0;tr=None
for x in f:
if len(f[x])>m: m=len(f[x]);tr=x
if tr and m>=av: l.append(tr)
#print cl;print f;print tr;print m;print
l.sort()
return l
def check(pp,gtr):
d={}
for x in gtr: d[x[:3]]=x
for n in pp:
mind='9999-12-31';maxd='0000-01-01'
for x in pp[n]: mind=min(mind,x[0]);maxd=max(maxd,x[0])
e={}
for x in pp[n]: e[x[:3]]=x
for x in d:
if x not in e and x[0]>mind:
# The x[0]>mind condition doesn't count omissions that occurred outside the
# date range of returned results, because some results are "rolling" and
# only intend to give the last month, say.
if x[0]<maxd: err("Error: %s"%n+" omission "+str(d[x]))
else: err("Warning: %s"%n+" slow update "+str(d[x]))
for x in e:
if x not in d: err("Error: %-20s"%n+"Spurious "+str(e[x]))
for x in e:
if x in d and d[x]!=e[x]: err("Error: %s"%n+" wrong "+str(e[x])+" cf GTR "+str(d[x]))
# Third parameter is whether the site in question provides past years data
# Arrange in increasing reliability (order not currently used)
parsers=[
#("soccernet",getfromsoccernet,0),# Quite error prone in 2012; can't distinguish play-offs from league games; broken in 2014-15 - gone all fancy schmancy
#("BBC",getfrombbc,0),# Occasional errors in years < 2012; broken since 2017?
("footballdata",getfromfootballdata,1),# Occasional errors in years < 2012; one error in 2012, one minor (date) error in 2018
("scorespro",getfromscorespro,0),# One problem in 2018-19 due to misnaming Mansfield Town
("flashscores",getfromflashscores,0)]# Introduced 2019-02-25
pp={}
for (n,g,p) in parsers:
if year==now or p:
try:
pp[n]=g(year,ln)
err("Info: parser %s returned %d results"%(n,len(pp[n])))
except Exception as x:
err("Error: parser %s failed with exception %s: %s"%(n,type(x).__name__,str(x)))
gtr=getgroundtruth(pp)
for x in gtr: print "%s %-19s %-19s %2d %2d"%x
check(pp,gtr)
| mit |
rosmo/ansible | test/units/compat/mock.py | 156 | 4556 | # (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat module for Python3.x's unittest.mock module
'''
import sys
# Python 2.7
# Note: Could use the pypi mock library on python3.x as well as python2.x. It
# is the same as the python3 stdlib mock library
try:
# Allow wildcard import because we really do want to import all of mock's
# symbols into this compat shim
# pylint: disable=wildcard-import,unused-wildcard-import
from unittest.mock import *
except ImportError:
# Python 2
# pylint: disable=wildcard-import,unused-wildcard-import
try:
from mock import *
except ImportError:
print('You need the mock library installed on python2.x to run tests')
# Prior to 3.4.4, mock_open cannot handle binary read_data
if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
file_spec = None
def _iterate_read_data(read_data):
# Helper for mock_open:
# Retrieve lines from read_data via a generator so that separate calls to
# readline, read, and readlines are properly interleaved
sep = b'\n' if isinstance(read_data, bytes) else '\n'
data_as_list = [l + sep for l in read_data.split(sep)]
if data_as_list[-1] == sep:
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
data_as_list = data_as_list[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line remove the
# newline that our naive format() added
data_as_list[-1] = data_as_list[-1][:-1]
for line in data_as_list:
yield line
def mock_open(mock=None, read_data=''):
"""
A helper function to create a mock to replace the use of `open`. It works
for `open` called directly or used as a context manager.
The `mock` argument is the mock object to configure. If `None` (the
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
`read_data` is a string for the `read` methoddline`, and `readlines` of the
file handle to return. This is an empty string by default.
"""
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
return list(_data)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
return type(read_data)().join(_data)
def _readline_side_effect():
if handle.readline.return_value is not None:
while True:
yield handle.readline.return_value
for line in _data:
yield line
global file_spec
if file_spec is None:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
if mock is None:
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = handle
_data = _iterate_read_data(read_data)
handle.write.return_value = None
handle.read.return_value = None
handle.readline.return_value = None
handle.readlines.return_value = None
handle.read.side_effect = _read_side_effect
handle.readline.side_effect = _readline_side_effect()
handle.readlines.side_effect = _readlines_side_effect
mock.return_value = handle
return mock
| gpl-3.0 |
xapi-project/sm | tests/test_lvutil.py | 1 | 7779 | import unittest
import testlib
import lvmlib
import mock
import os
import lvutil
import util
ONE_MEGABYTE = 1 * 1024 * 1024
def with_lvm_subsystem(func):
@testlib.with_context
def decorated(self, context, *args, **kwargs):
lvsystem = lvmlib.LVSubsystem(context.log, context.add_executable)
return func(self, lvsystem, * args, ** kwargs)
decorated.__name__ = func.__name__
return decorated
class TestCreate(unittest.TestCase):
def setUp(self):
lock_patcher = mock.patch('lvutil.lock', autospec=True)
self.addCleanup(lock_patcher.stop)
self.mock_lock = lock_patcher.start()
@with_lvm_subsystem
def test_create_volume_size(self, lvsystem):
lvsystem.add_volume_group('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
lvutil.create('volume', 100 * ONE_MEGABYTE, 'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
created_lv, = lvsystem.get_logical_volumes_with_name('volume')
self.assertEquals(100, created_lv.size_mb)
@with_lvm_subsystem
def test_create_volume_is_in_the_right_volume_group(self, lvsystem):
lvsystem.add_volume_group('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
lvutil.create('volume', 100 * ONE_MEGABYTE, 'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
created_lv, = lvsystem.get_logical_volumes_with_name('volume')
self.assertEquals(100, created_lv.size_mb)
self.assertEquals('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7', created_lv.volume_group.name)
self.assertTrue(created_lv.active)
self.assertTrue(created_lv.zeroed)
@with_lvm_subsystem
def test_create_volume_is_active(self, lvsystem):
lvsystem.add_volume_group('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
lvutil.create('volume', 100 * ONE_MEGABYTE, 'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
created_lv, = lvsystem.get_logical_volumes_with_name('volume')
self.assertEquals(100, created_lv.size_mb)
self.assertTrue(created_lv.active)
self.assertTrue(created_lv.zeroed)
@with_lvm_subsystem
def test_create_volume_is_zeroed(self, lvsystem):
lvsystem.add_volume_group('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
lvutil.create('volume', 100 * ONE_MEGABYTE, 'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
created_lv, = lvsystem.get_logical_volumes_with_name('volume')
self.assertEquals(100, created_lv.size_mb)
self.assertTrue(created_lv.zeroed)
@with_lvm_subsystem
def test_create_creates_logical_volume_with_tags(self, lvsystem):
lvsystem.add_volume_group('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
lvutil.create('volume', ONE_MEGABYTE, 'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7', tag='hello')
created_lv, = lvsystem.get_logical_volumes_with_name('volume')
self.assertEquals('hello', created_lv.tag)
@mock.patch('util.pread', autospec=True)
def test_create_percentage_has_precedence_over_size(self, mock_pread):
lvutil.create('volume', ONE_MEGABYTE, 'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7',
size_in_percentage="10%F")
self.assertEqual(1, mock_pread.call_count)
self.assertIn("10%F", mock_pread.call_args[0][0])
class TestRemove(unittest.TestCase):
def setUp(self):
lock_patcher = mock.patch('lvutil.lock', autospec=True)
self.addCleanup(lock_patcher.stop)
self.mock_lock = lock_patcher.start()
@with_lvm_subsystem
def test_remove_removes_volume(self, lvsystem):
lvsystem.add_volume_group('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
lvsystem.get_volume_group('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7').add_volume('volume', 100)
lvutil.remove('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume')
self.assertEquals([], lvsystem.get_logical_volumes_with_name('volume'))
@mock.patch('lvutil._lvmBugCleanup', autospec=True)
@mock.patch('util.pread', autospec=True)
def test_remove_additional_config_param(self, mock_pread, _bugCleanup):
lvutil.remove('VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume', config_param="blah")
mock_pread.assert_called_once_with(
[os.path.join(lvutil.LVM_BIN, lvutil.CMD_LVREMOVE)]
+ "-f VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume --config devices{blah}".split(),
quiet=False)
class TestDeactivate(unittest.TestCase):
def setUp(self):
lock_patcher = mock.patch('lvutil.lock', autospec=True)
pathexists_patcher = mock.patch('lvutil.util.pathexists', autospec=True)
lexists_patcher = mock.patch('lvutil.os.path.lexists', autospec=True)
unlink_patcher = mock.patch('lvutil.os.unlink', autospec=True)
self.addCleanup(mock.patch.stopall)
self.mock_lock = lock_patcher.start()
self.mock_exists = pathexists_patcher.start()
self.mock_lexists = lexists_patcher.start()
self.mock_unlink = unlink_patcher.start()
def __create_test_volume(self, lvsystem):
lvsystem.add_volume_group(
'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7')
lvsystem.get_volume_group(
'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7'
).add_volume('volume', 100)
@with_lvm_subsystem
def test_deactivate_noref_withbugcleanup(self, lvsystem):
# Arrange
self.__create_test_volume(lvsystem)
self.mock_exists.return_value = True
self.mock_lexists.return_value = True
# Act
lvutil.deactivateNoRefcount(
'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume')
@mock.patch('lvutil.util.pread')
@with_lvm_subsystem
def test_deactivate_noref_withnobugcleanup(
self, lvsystem, mock_pread):
# Arrange
self.__create_test_volume(lvsystem)
self.mock_exists.return_value = False
mock_pread.side_effect = [0, 0]
# Act
lvutil.deactivateNoRefcount(
'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume')
@mock.patch('lvutil.util.pread')
@with_lvm_subsystem
def test_deactivate_noref_withbugcleanup_retry(
self, lvsystem, mock_pread):
# Arrange
self.__create_test_volume(lvsystem)
self.mock_exists.return_value = True
self.mock_lexists.return_value = True
mock_pread.side_effect = [0, util.CommandException(0),
util.CommandException(1), 0]
# Act
lvutil.deactivateNoRefcount(
'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume')
@mock.patch('lvutil.os.symlink', autotspec=True)
@mock.patch('lvutil.time.sleep', autospec=True)
@mock.patch('lvutil.util.pread')
@with_lvm_subsystem
def test_deactivate_noref_withbugcleanup_retry_fail(
self, lvsystem, mock_pread, mock_sleep, mock_symlink):
# Arrange
self.__create_test_volume(lvsystem)
self.mock_exists.return_value = True
self.mock_lexists.return_value = False
side_effect = [0, util.CommandException(0)]
side_effect += 11 * [util.CommandException(1),
util.CommandException(0)]
mock_pread.side_effect = side_effect
# Act
with self.assertRaises(util.CommandException):
lvutil.deactivateNoRefcount(
'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume')
# Assert
mock_symlink.assert_called_once_with(
mock.ANY, 'VG_XenStorage-b3b18d06-b2ba-5b67-f098-3cdd5087a2a7/volume')
| lgpl-2.1 |
vivekanand1101/bodhi | bodhi/util.py | 3 | 18057 | # This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Random functions that don't fit elsewhere
"""
import os
import arrow
import socket
import urllib
import tempfile
import markdown
import requests
import subprocess
import libravatar
import hashlib
import collections
import pkg_resources
import functools
import transaction
from os.path import join, dirname, basename, isfile
from datetime import datetime
from collections import defaultdict
from contextlib import contextmanager
from sqlalchemy import create_engine
from pyramid.i18n import TranslationStringFactory
from pyramid.settings import asbool
from kitchen.text.converters import to_bytes
from . import log, buildsys
from .exceptions import RepodataException
from .config import config
try:
import rpm
except ImportError:
log.warning("Could not import 'rpm'")
_ = TranslationStringFactory('bodhi')
## Display a given message as a heading
header = lambda x: u"%s\n %s\n%s\n" % ('=' * 80, x, '=' * 80)
pluralize = lambda val, name: val == 1 and name or "%ss" % name
def get_rpm_header(nvr, tries=0):
""" Get the rpm header for a given build """
tries += 1
headers = [
'name', 'summary', 'version', 'release', 'url', 'description',
'changelogtime', 'changelogname', 'changelogtext',
]
rpmID = nvr + '.src'
koji_session = buildsys.get_session()
try:
result = koji_session.getRPMHeaders(rpmID=rpmID, headers=headers)
except Exception as e:
msg = "Failed %i times to get rpm header data from koji for %s: %s"
log.warning(msg % (tries, nvr, str(e)))
if tries < 3:
# Try again...
return get_rpm_header(nvr, tries=tries)
else:
# Give up for good and re-raise the failure...
raise
if result:
return result
raise ValueError("No rpm headers found in koji for %r" % nvr)
def get_nvr(nvr):
""" Return the [ name, version, release ] a given name-ver-rel. """
x = nvr.split('-')
return ['-'.join(x[:-2]), x[-2], x[-1]]
def mkmetadatadir(path):
"""
Generate package metadata for a given directory; if it doesn't exist, then
create it.
"""
if not os.path.isdir(path):
os.makedirs(path)
subprocess.check_call(['createrepo_c', '--xz', '--database', '--quiet', path])
def get_age(date):
age = datetime.utcnow() - date
if age.days == 0:
if age.seconds < 60:
return "%d %s" % (age.seconds, pluralize(age.seconds, "second"))
minutes = int(age.seconds / 60)
if minutes >= 60:
hours = int(minutes / 60)
return "%d %s" % (hours, pluralize(hours, "hour"))
return "%d %s" % (minutes, pluralize(minutes, "minute"))
return "%d %s" % (age.days, pluralize(age.days, "day"))
def get_age_in_days(date):
if date:
age = datetime.utcnow() - date
return age.days
else:
return 0
def flash_log(msg):
""" Flash and log a given message """
# FIXME: request.session.flash()
#flash(msg)
log.debug(msg)
def get_repo_tag(repo):
""" Pull the koji tag from the given mash repo """
mashconfig = join(dirname(config.get('mash_conf')),
basename(repo) + '.mash')
if isfile(mashconfig):
mashconfig = file(mashconfig, 'r')
lines = mashconfig.readlines()
mashconfig.close()
return filter(lambda x: x.startswith('tag ='), lines)[0].split()[-1]
else:
log.error("Cannot find mash configuration for %s: %s" % (repo,
mashconfig))
def build_evr(build):
if not build['epoch']:
build['epoch'] = 0
return (str(build['epoch']), build['version'], build['release'])
def link(href, text):
return '<a href="%s">%s</a>' % (href, text)
class memoized(object):
'''Decorator. Caches a function's return value each time it is called. If
called later with the same arguments, the cached value is returned (not
reevaluated).
http://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self):
'''Return the function's docstring.'''
return self.func.__doc__
def __get__(self, obj, objtype):
'''Support instance methods.'''
return functools.partial(self.__call__, obj)
def get_db_from_config(dev=False):
from .models import DBSession, Base
if dev:
db_url = 'sqlite:///:memory:'
else:
db_url = config['sqlalchemy.url']
engine = create_engine(db_url)
DBSession.configure(bind=engine)
Base.metadata.bind = engine
Base.metadata.create_all(engine)
return DBSession()
@memoized
def get_critpath_pkgs(collection='master'):
"""Return a list of critical path packages for a given collection"""
critpath_pkgs = []
critpath_type = config.get('critpath.type')
if critpath_type == 'pkgdb':
from pkgdb2client import PkgDB
pkgdb = PkgDB(config.get('pkgdb_url'))
results = pkgdb.get_critpath_packages(branches=collection)
if collection in results['pkgs']:
critpath_pkgs = results['pkgs'][collection]
else:
critpath_pkgs = config.get('critpath_pkgs', '').split()
return critpath_pkgs
class Singleton(object):
def __new__(cls, *args, **kw):
if not '_instance' in cls.__dict__:
cls._instance = object.__new__(cls)
return cls._instance
def sanity_check_repodata(myurl):
"""
Sanity check the repodata for a given repository.
"""
import librepo
h = librepo.Handle()
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, tempfile.mkdtemp())
if myurl[-1] != '/':
myurl += '/'
if myurl.endswith('repodata/'):
myurl = myurl.replace('repodata/', '')
h.setopt(librepo.LRO_URLS, [myurl])
h.setopt(librepo.LRO_LOCAL, True)
h.setopt(librepo.LRO_CHECKSUM, True)
try:
h.perform()
except librepo.LibrepoException as e:
rc, msg, general_msg = e
raise RepodataException(msg)
updateinfo = os.path.join(myurl, 'updateinfo.xml.gz')
if os.path.exists(updateinfo):
ret = subprocess.call(['zgrep', '<id/>', updateinfo])
if not ret:
raise RepodataException('updateinfo.xml.gz contains empty ID tags')
def age(context, date, nuke_ago=False):
humanized = arrow.get(date).humanize()
if nuke_ago:
return humanized.replace(' ago', '')
else:
return humanized
hardcoded_avatars = {
'bodhi': 'https://apps.fedoraproject.org/img/icons/bodhi-{size}.png',
# Taskotron may have a logo at some point. Check this out:
# https://mashaleonova.wordpress.com/2015/08/18/a-logo-for-taskotron/
# Ask tflink before actually putting this in place though. we need
# a nice small square version. It'll look great!
#'taskotron': 'something-fancy.png',
}
def avatar(context, username, size):
# Handle some system users
# https://github.com/fedora-infra/bodhi/issues/308
if username in hardcoded_avatars:
return hardcoded_avatars[username].format(size=size)
# context is a mako context object
request = context['request']
https = request.registry.settings.get('prefer_ssl'),
@request.cache.cache_on_arguments()
def work(username, size):
openid = "http://%s.id.fedoraproject.org/" % username
if asbool(config.get('libravatar_enabled', True)):
if asbool(config.get('libravatar_dns', False)):
return libravatar.libravatar_url(
openid=openid,
https=https,
size=size,
default='retro',
)
else:
query = urllib.urlencode({'s': size, 'd': 'retro'})
hash = hashlib.sha256(openid).hexdigest()
template = "https://seccdn.libravatar.org/avatar/%s?%s"
return template % (hash, query)
return 'libravatar.org'
return work(username, size)
def version(context=None):
return pkg_resources.get_distribution('bodhi').version
def hostname(context=None):
return socket.gethostname()
def markup(context, text):
return markdown.markdown(text, safe_mode="replace",
html_replacement_text="--RAW HTML NOT ALLOWED--")
def status2html(context, status):
status = unicode(status)
cls = {
'pending': 'primary',
'testing': 'warning',
'stable': 'success',
'unpushed': 'danger',
'obsolete': 'default',
'processing': 'info',
}[status]
return "<span class='label label-%s'>%s</span>" % (cls, status)
def state2class(context, state):
state = unicode(state)
cls = {
'disabled': 'default active',
'pending': 'warning',
'current': 'success',
'archived': 'danger'
}
return cls[state] if state in cls.keys() else 'default'
def type2color(context, t):
t = unicode(t)
cls = {
'bugfix': 'rgba(150,180,205,0.5)',
'security': 'rgba(205,150,180,0.5)',
'newpackage': 'rgba(150,205,180,0.5)',
'default': 'rgba(200,200,200,0.5)'
}
return cls[t] if t in cls.keys() else cls['default']
def state2html(context, state):
state_class = state2class(context, state)
return "<span class='label label-%s'>%s</span>" % (state_class, state)
def karma2class(context, karma, default='default'):
if karma and karma >= -2 and karma <= 2:
return {
-2: 'danger',
-1: 'danger',
0: 'info',
1: 'success',
2: 'success',
}.get(karma)
return default
def karma2html(context, karma):
# Recurse if we are handle multiple karma values
if isinstance(karma, tuple):
return '</td><td>'.join([karma2html(context, item) for item in karma])
cls = karma2class(context, karma, None)
if not cls:
if karma < -2:
cls = 'danger'
else:
cls = 'success'
if karma > 0:
karma = "+%i" % karma
else:
karma = "%i" % karma
return "<span class='label label-%s'>%s</span>" % (cls, karma)
def type2html(context, kind):
kind = unicode(kind)
cls = {
'security': 'danger',
'bugfix': 'warning',
'newpackage': 'primary',
'enhancement': 'success',
}.get(kind)
return "<span class='label label-%s'>%s</span>" % (cls, kind)
def severity2html(context, severity):
severity = unicode(severity)
cls = {
'urgent': 'danger',
'high': 'warning',
'medium': 'primary',
'low': 'success',
}.get(severity)
return "<span class='label label-%s'>%s</span>" % (cls, severity)
def suggestion2html(context, suggestion):
suggestion = unicode(suggestion)
cls = {
'reboot': 'danger',
'logout': 'warning',
}.get(suggestion)
return "<span class='label label-%s'>%s</span>" % (cls, suggestion)
def request2html(context, request):
request = unicode(request)
cls = {
'unpush': 'danger',
'obsolete': 'warning',
'testing': 'primary',
'stable': 'success',
}.get(request)
return "<span class='label label-%s'>%s</span>" % (cls, request)
def update2html(context, update):
request = context.get('request')
if hasattr(update, 'title'):
title = update.title
else:
title = update['title']
if hasattr(update, 'alias'):
alias = update.alias
else:
alias = update['alias']
url = request.route_url('update', id=alias or title)
settings = request.registry.settings
max_length = int(settings.get('max_update_length_for_ui', 30))
if len(title) > max_length:
title = title[:max_length] + "..."
return link(url, title)
def pages_list(context, page, pages):
margin = 4
num_pages = (2 * margin) + 1
if page <= margin + 1:
# Current `page` is close to the beginning of `pages`
min_page = 1
max_page = min(pages, num_pages)
elif (pages - page) >= margin:
min_page = max(page - margin, 1)
max_page = min(page + margin, pages)
else:
# Current `page` is close to the end of `pages`
max_page = min(pages, page + margin)
min_page = max(max_page - (num_pages - 1), 1)
return range(min_page, max_page + 1)
def page_url(context, page):
request = context.get('request')
params = dict(request.params)
params['page'] = page
return request.path_url + "?" + urllib.urlencode(params)
def bug_link(context, bug, short=False):
url = "https://bugzilla.redhat.com/show_bug.cgi?id=" + str(bug.bug_id)
display = "#%i" % bug.bug_id
link = "<a target='_blank' href='%s'>%s</a>" % (url, display)
if not short:
if bug.title:
# We're good...
link = link + " " + to_bytes(bug.title)
else:
# Otherwise, the backend is async grabbing the title from rhbz, so
link = link + " <img class='spinner' src='static/img/spinner.gif'>"
return link
def testcase_link(context, test, short=False):
settings = context['request'].registry
default = 'https://fedoraproject.org/wiki/'
url = settings.get('test_case_base_url', default) + test.name
display = test.name.replace('QA:Testcase ', '')
link = "<a target='_blank' href='%s'>%s</a>" % (url, display)
if not short:
link = "Test Case " + link
return link
def sorted_builds(builds):
return sorted(builds,
cmp=lambda x, y: rpm.labelCompare(get_nvr(x), get_nvr(y)),
reverse=True)
def sorted_updates(updates):
"""
Order our updates so that the highest version gets tagged last so that
it appears as the 'latest' in koji.
"""
builds = defaultdict(set)
build_to_update = {}
ordered_updates = []
for update in updates:
for build in update.builds:
n, v, r = get_nvr(build.nvr)
builds[n].add(build.nvr)
build_to_update[build.nvr] = update
for package in builds:
if len(builds[package]) > 1:
log.info('Found multiple %s packages' % package)
log.debug(builds[package])
for build in sorted_builds(builds[package]):
update = build_to_update[build]
if update not in ordered_updates:
ordered_updates.append(update)
else:
update = build_to_update[builds[package].pop()]
if update not in ordered_updates:
ordered_updates.append(update)
log.debug('ordered_updates = %s' % ordered_updates)
return ordered_updates[::-1]
def cmd(cmd, cwd=None):
log.info('Running %r', cmd)
if isinstance(cmd, basestring):
cmd = cmd.split()
p = subprocess.Popen(cmd, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
if out:
log.debug(out)
if err:
log.error(err)
if p.returncode != 0:
log.error('return code %s', p.returncode)
return out, err, p.returncode
def tokenize(string):
""" Given something like "a b, c d" return ['a', 'b', 'c', 'd']. """
for substring in string.split(','):
substring = substring.strip()
if substring:
for token in substring.split():
token = token.strip()
if token:
yield token
def taskotron_results(settings, entity='results', **kwargs):
""" Given an update object, yield resultsdb results. """
url = settings['resultsdb_api_url'] + "/api/v1.0/" + entity
if kwargs:
url = url + "?" + urllib.urlencode(kwargs)
data = True
try:
while data:
log.debug("Grabbing %r" % url)
response = requests.get(url)
if response.status_code != 200:
raise IOError("status code was %r" % response.status_code)
json = response.json()
url, data = json['next'], json['data']
for datum in data:
# Skip ABORTED results
# https://github.com/fedora-infra/bodhi/issues/167
if entity == 'results' and datum.get('outcome') == 'ABORTED':
continue
yield datum
except Exception:
log.exception("Problem talking to %r" % url)
@contextmanager
def transactional_session_maker():
"""Provide a transactional scope around a series of operations."""
from .models import DBSession
session = DBSession()
transaction.begin()
try:
yield session
transaction.commit()
except:
transaction.abort()
raise
finally:
session.close()
| gpl-2.0 |
teamfx/openjfx-10-dev-rt | modules/javafx.web/src/main/native/Tools/Scripts/webkitpy/layout_tests/controllers/manager.py | 2 | 29529 | # Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
The Manager runs a series of tests (TestType interface) against a set
of test files. If a test file fails a TestType, it returns a list of TestFailure
objects to the Manager. The Manager then aggregates the TestFailures to
create a final report.
"""
import json
import logging
import random
import sys
import time
from collections import defaultdict
from webkitpy.common.checkout.scm.detection import SCMDetector
from webkitpy.common.net.file_uploader import FileUploader
from webkitpy.layout_tests.controllers.layout_test_finder import LayoutTestFinder
from webkitpy.layout_tests.controllers.layout_test_runner import LayoutTestRunner
from webkitpy.layout_tests.controllers.test_result_writer import TestResultWriter
from webkitpy.layout_tests.layout_package import json_layout_results_generator
from webkitpy.layout_tests.layout_package import json_results_generator
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
from webkitpy.layout_tests.models import test_results
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.models.test_input import TestInput
from webkitpy.layout_tests.models.test_run_results import INTERRUPTED_EXIT_STATUS
from webkitpy.tool.grammar import pluralize
_log = logging.getLogger(__name__)
TestExpectations = test_expectations.TestExpectations
class Manager(object):
"""A class for managing running a series of tests on a series of layout
test files."""
def __init__(self, port, options, printer):
"""Initialize test runner data structures.
Args:
port: an object implementing port-specific
options: a dictionary of command line options
printer: a Printer object to record updates to.
"""
self._port = port
self._filesystem = port.host.filesystem
self._options = options
self._printer = printer
self._expectations = None
self.HTTP_SUBDIR = 'http' + port.TEST_PATH_SEPARATOR
self.WEBSOCKET_SUBDIR = 'websocket' + port.TEST_PATH_SEPARATOR
self.web_platform_test_subdir = self._port.web_platform_test_server_doc_root()
self.LAYOUT_TESTS_DIRECTORY = 'LayoutTests'
self._results_directory = self._port.results_directory()
self._finder = LayoutTestFinder(self._port, self._options)
self._runner = LayoutTestRunner(self._options, self._port, self._printer, self._results_directory, self._test_is_slow)
test_options_json_path = self._port.path_from_webkit_base(self.LAYOUT_TESTS_DIRECTORY, "tests-options.json")
self._tests_options = json.loads(self._filesystem.read_text_file(test_options_json_path)) if self._filesystem.exists(test_options_json_path) else {}
def _collect_tests(self, args):
return self._finder.find_tests(self._options, args)
def _is_http_test(self, test):
return self.HTTP_SUBDIR in test or self._is_websocket_test(test) or self._is_web_platform_test(test)
def _is_websocket_test(self, test):
return self.WEBSOCKET_SUBDIR in test
def _is_web_platform_test(self, test):
return self.web_platform_test_subdir in test
def _custom_device_for_test(self, test):
for device_class in self._port.CUSTOM_DEVICE_CLASSES:
directory_suffix = device_class + self._port.TEST_PATH_SEPARATOR
if directory_suffix in test:
return device_class
return None
def _http_tests(self, test_names):
return set(test for test in test_names if self._is_http_test(test))
def _prepare_lists(self, paths, test_names):
tests_to_skip = self._finder.skip_tests(paths, test_names, self._expectations, self._http_tests(test_names))
tests_to_run = [test for test in test_names if test not in tests_to_skip]
# Create a sorted list of test files so the subset chunk,
# if used, contains alphabetically consecutive tests.
if self._options.order == 'natural':
tests_to_run.sort(key=self._port.test_key)
elif self._options.order == 'random':
random.shuffle(tests_to_run)
tests_to_run, tests_in_other_chunks = self._finder.split_into_chunks(tests_to_run)
self._expectations.add_skipped_tests(tests_in_other_chunks)
tests_to_skip.update(tests_in_other_chunks)
return tests_to_run, tests_to_skip
def _test_input_for_file(self, test_file):
return TestInput(test_file,
self._options.slow_time_out_ms if self._test_is_slow(test_file) else self._options.time_out_ms,
self._is_http_test(test_file),
should_dump_jsconsolelog_in_stderr=self._test_should_dump_jsconsolelog_in_stderr(test_file))
def _test_is_slow(self, test_file):
if self._expectations.model().has_modifier(test_file, test_expectations.SLOW):
return True
return "slow" in self._tests_options.get(test_file, [])
def _test_should_dump_jsconsolelog_in_stderr(self, test_file):
return self._expectations.model().has_modifier(test_file, test_expectations.DUMPJSCONSOLELOGINSTDERR)
def needs_servers(self, test_names):
return any(self._is_http_test(test_name) for test_name in test_names) and self._options.http
def _get_test_inputs(self, tests_to_run, repeat_each, iterations):
test_inputs = []
for _ in xrange(iterations):
for test in tests_to_run:
for _ in xrange(repeat_each):
test_inputs.append(self._test_input_for_file(test))
return test_inputs
def _update_worker_count(self, test_names):
test_inputs = self._get_test_inputs(test_names, self._options.repeat_each, self._options.iterations)
worker_count = self._runner.get_worker_count(test_inputs, int(self._options.child_processes))
self._options.child_processes = worker_count
def _set_up_run(self, test_names, device_class=None):
self._printer.write_update("Checking build ...")
if not self._port.check_build(self.needs_servers(test_names)):
_log.error("Build check failed")
return False
self._options.device_class = device_class
# This must be started before we check the system dependencies,
# since the helper may do things to make the setup correct.
self._printer.write_update("Starting helper ...")
if not self._port.start_helper(self._options.pixel_tests):
return False
self._update_worker_count(test_names)
self._port.reset_preferences()
# Check that the system dependencies (themes, fonts, ...) are correct.
if not self._options.nocheck_sys_deps:
self._printer.write_update("Checking system dependencies ...")
if not self._port.check_sys_deps(self.needs_servers(test_names)):
self._port.stop_helper()
return False
if self._options.clobber_old_results:
self._clobber_old_results()
# Create the output directory if it doesn't already exist.
self._port.host.filesystem.maybe_make_directory(self._results_directory)
self._port.setup_test_run(self._options.device_class)
return True
def run(self, args):
"""Run the tests and return a RunDetails object with the results."""
self._printer.write_update("Collecting tests ...")
try:
paths, test_names = self._collect_tests(args)
except IOError:
# This is raised if --test-list doesn't exist
return test_run_results.RunDetails(exit_code=-1)
self._printer.write_update("Parsing expectations ...")
self._expectations = test_expectations.TestExpectations(self._port, test_names, force_expectations_pass=self._options.force)
self._expectations.parse_all_expectations()
tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)
self._printer.print_found(len(test_names), len(tests_to_run), self._options.repeat_each, self._options.iterations)
start_time = time.time()
# Check to make sure we're not skipping every test.
if not tests_to_run:
_log.critical('No tests to run.')
return test_run_results.RunDetails(exit_code=-1)
default_device_tests = []
# Look for tests with custom device requirements.
custom_device_tests = defaultdict(list)
for test_file in tests_to_run:
custom_device = self._custom_device_for_test(test_file)
if custom_device:
custom_device_tests[custom_device].append(test_file)
else:
default_device_tests.append(test_file)
if custom_device_tests:
for device_class in custom_device_tests:
_log.debug('{} tests use device {}'.format(len(custom_device_tests[device_class]), device_class))
initial_results = None
retry_results = None
enabled_pixel_tests_in_retry = False
if default_device_tests:
_log.info('')
_log.info("Running %s", pluralize(len(tests_to_run), "test"))
_log.info('')
if not self._set_up_run(tests_to_run):
return test_run_results.RunDetails(exit_code=-1)
initial_results, retry_results, enabled_pixel_tests_in_retry = self._run_test_subset(default_device_tests, tests_to_skip)
for device_class in custom_device_tests:
device_tests = custom_device_tests[device_class]
if device_tests:
_log.info('')
_log.info('Running %s for %s', pluralize(len(device_tests), "test"), device_class)
_log.info('')
if not self._set_up_run(device_tests, device_class):
return test_run_results.RunDetails(exit_code=-1)
device_initial_results, device_retry_results, device_enabled_pixel_tests_in_retry = self._run_test_subset(device_tests, tests_to_skip)
initial_results = initial_results.merge(device_initial_results) if initial_results else device_initial_results
retry_results = retry_results.merge(device_retry_results) if retry_results else device_retry_results
enabled_pixel_tests_in_retry |= device_enabled_pixel_tests_in_retry
end_time = time.time()
return self._end_test_run(start_time, end_time, initial_results, retry_results, enabled_pixel_tests_in_retry)
def _run_test_subset(self, tests_to_run, tests_to_skip):
try:
enabled_pixel_tests_in_retry = False
initial_results = self._run_tests(tests_to_run, tests_to_skip, self._options.repeat_each, self._options.iterations, int(self._options.child_processes), retrying=False)
tests_to_retry = self._tests_to_retry(initial_results, include_crashes=self._port.should_retry_crashes())
# Don't retry failures when interrupted by user or failures limit exception.
retry_failures = self._options.retry_failures and not (initial_results.interrupted or initial_results.keyboard_interrupted)
if retry_failures and tests_to_retry:
enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed()
_log.info('')
_log.info("Retrying %s ..." % pluralize(len(tests_to_retry), "unexpected failure"))
_log.info('')
retry_results = self._run_tests(tests_to_retry, tests_to_skip=set(), repeat_each=1, iterations=1, num_workers=1, retrying=True)
if enabled_pixel_tests_in_retry:
self._options.pixel_tests = False
else:
retry_results = None
finally:
self._clean_up_run()
return (initial_results, retry_results, enabled_pixel_tests_in_retry)
def _end_test_run(self, start_time, end_time, initial_results, retry_results, enabled_pixel_tests_in_retry):
# Some crash logs can take a long time to be written out so look
# for new logs after the test run finishes.
_log.debug("looking for new crash logs")
self._look_for_new_crash_logs(initial_results, start_time)
if retry_results:
self._look_for_new_crash_logs(retry_results, start_time)
_log.debug("summarizing results")
summarized_results = test_run_results.summarize_results(self._port, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_retry)
results_including_passes = None
if self._options.results_server_host:
results_including_passes = test_run_results.summarize_results(self._port, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_retry, include_passes=True, include_time_and_modifiers=True)
self._printer.print_results(end_time - start_time, initial_results, summarized_results)
exit_code = -1
if not self._options.dry_run:
self._port.print_leaks_summary()
self._upload_json_files(summarized_results, initial_results, results_including_passes, start_time, end_time)
results_path = self._filesystem.join(self._results_directory, "results.html")
self._copy_results_html_file(results_path)
if initial_results.keyboard_interrupted:
exit_code = INTERRUPTED_EXIT_STATUS
else:
if self._options.show_results and (initial_results.unexpected_results_by_name or
(self._options.full_results_html and initial_results.total_failures)):
self._port.show_results_html_file(results_path)
exit_code = self._port.exit_code_from_summarized_results(summarized_results)
return test_run_results.RunDetails(exit_code, summarized_results, initial_results, retry_results, enabled_pixel_tests_in_retry)
def _run_tests(self, tests_to_run, tests_to_skip, repeat_each, iterations, num_workers, retrying):
needs_http = any((self._is_http_test(test) and not self._is_web_platform_test(test)) for test in tests_to_run)
needs_web_platform_test_server = any(self._is_web_platform_test(test) for test in tests_to_run)
needs_websockets = any(self._is_websocket_test(test) for test in tests_to_run)
test_inputs = self._get_test_inputs(tests_to_run, repeat_each, iterations)
return self._runner.run_tests(self._expectations, test_inputs, tests_to_skip, num_workers, needs_http, needs_websockets, needs_web_platform_test_server, retrying)
def _clean_up_run(self):
_log.debug("Flushing stdout")
sys.stdout.flush()
_log.debug("Flushing stderr")
sys.stderr.flush()
_log.debug("Stopping helper")
self._port.stop_helper()
_log.debug("Cleaning up port")
self._port.clean_up_test_run()
def _force_pixel_tests_if_needed(self):
if self._options.pixel_tests:
return False
_log.debug("Restarting helper")
self._port.stop_helper()
self._options.pixel_tests = True
return self._port.start_helper()
def _look_for_new_crash_logs(self, run_results, start_time):
"""Since crash logs can take a long time to be written out if the system is
under stress do a second pass at the end of the test run.
run_results: the results of the test run
start_time: time the tests started at. We're looking for crash
logs after that time.
"""
crashed_processes = []
for test, result in run_results.unexpected_results_by_name.iteritems():
if (result.type != test_expectations.CRASH):
continue
for failure in result.failures:
if not isinstance(failure, test_failures.FailureCrash):
continue
crashed_processes.append([test, failure.process_name, failure.pid])
sample_files = self._port.look_for_new_samples(crashed_processes, start_time)
if sample_files:
for test, sample_file in sample_files.iteritems():
writer = TestResultWriter(self._port._filesystem, self._port, self._port.results_directory(), test)
writer.copy_sample_file(sample_file)
crash_logs = self._port.look_for_new_crash_logs(crashed_processes, start_time)
if crash_logs:
for test, crash_log in crash_logs.iteritems():
writer = TestResultWriter(self._port._filesystem, self._port, self._port.results_directory(), test)
writer.write_crash_log(crash_log)
# Check if this crashing 'test' is already in list of crashed_processes, if not add it to the run_results
if not any(process[0] == test for process in crashed_processes):
result = test_results.TestResult(test)
result.type = test_expectations.CRASH
result.is_other_crash = True
run_results.add(result, expected=False, test_is_slow=False)
_log.debug("Adding results for other crash: " + str(test))
def _clobber_old_results(self):
# Just clobber the actual test results directories since the other
# files in the results directory are explicitly used for cross-run
# tracking.
self._printer.write_update("Clobbering old results in %s" %
self._results_directory)
layout_tests_dir = self._port.layout_tests_dir()
possible_dirs = self._port.test_dirs()
for dirname in possible_dirs:
if self._filesystem.isdir(self._filesystem.join(layout_tests_dir, dirname)):
self._filesystem.rmtree(self._filesystem.join(self._results_directory, dirname))
def _tests_to_retry(self, run_results, include_crashes):
return [result.test_name for result in run_results.unexpected_results_by_name.values() if
((result.type != test_expectations.PASS) and
(result.type != test_expectations.MISSING) and
(result.type != test_expectations.CRASH or include_crashes))]
def _upload_json_files(self, summarized_results, initial_results, results_including_passes=None, start_time=None, end_time=None):
"""Writes the results of the test run as JSON files into the results
dir and upload the files to the appengine server.
Args:
summarized_results: dict of results
initial_results: full summary object
"""
_log.debug("Writing JSON files in %s." % self._results_directory)
# FIXME: Upload stats.json to the server and delete times_ms.
times_trie = json_results_generator.test_timings_trie(self._port, initial_results.results_by_name.values())
times_json_path = self._filesystem.join(self._results_directory, "times_ms.json")
json_results_generator.write_json(self._filesystem, times_trie, times_json_path)
stats_trie = self._stats_trie(initial_results)
stats_path = self._filesystem.join(self._results_directory, "stats.json")
self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))
full_results_path = self._filesystem.join(self._results_directory, "full_results.json")
# We write full_results.json out as jsonp because we need to load it from a file url and Chromium doesn't allow that.
json_results_generator.write_json(self._filesystem, summarized_results, full_results_path, callback="ADD_RESULTS")
results_json_path = self._filesystem.join(self._results_directory, "results_including_passes.json")
if results_including_passes:
json_results_generator.write_json(self._filesystem, results_including_passes, results_json_path)
generator = json_layout_results_generator.JSONLayoutResultsGenerator(
self._port, self._options.builder_name, self._options.build_name,
self._options.build_number, self._results_directory,
self._expectations, initial_results,
self._options.test_results_server,
"layout-tests",
self._options.master_name)
if generator.generate_json_output():
_log.debug("Finished writing JSON file for the test results server.")
else:
_log.debug("Failed to generate JSON file for the test results server.")
return
json_files = ["incremental_results.json", "full_results.json", "times_ms.json"]
generator.upload_json_files(json_files)
if results_including_passes:
self.upload_results(results_json_path, start_time, end_time)
incremental_results_path = self._filesystem.join(self._results_directory, "incremental_results.json")
# Remove these files from the results directory so they don't take up too much space on the buildbot.
# The tools use the version we uploaded to the results server anyway.
self._filesystem.remove(times_json_path)
self._filesystem.remove(incremental_results_path)
if results_including_passes:
self._filesystem.remove(results_json_path)
def upload_results(self, results_json_path, start_time, end_time):
hostname = self._options.results_server_host
if not hostname:
return
master_name = self._options.master_name
builder_name = self._options.builder_name
build_number = self._options.build_number
build_slave = self._options.build_slave
if not master_name or not builder_name or not build_number or not build_slave:
_log.error("--results-server-host was set, but --master-name, --builder-name, --build-number, or --build-slave was not. Not uploading JSON files.")
return
revisions = {}
# FIXME: This code is duplicated in PerfTestRunner._generate_results_dict
for (name, path) in self._port.repository_paths():
scm = SCMDetector(self._port.host.filesystem, self._port.host.executive).detect_scm_system(path) or self._port.host.scm()
revision = scm.svn_revision(path)
revisions[name] = {'revision': revision, 'timestamp': scm.timestamp_of_revision(path, revision)}
_log.info("Uploading JSON files for master: %s builder: %s build: %s slave: %s to %s", master_name, builder_name, build_number, build_slave, hostname)
attrs = [
('master', 'build.webkit.org' if master_name == 'webkit.org' else master_name), # FIXME: Pass in build.webkit.org.
('builder_name', builder_name),
('build_number', build_number),
('build_slave', build_slave),
('revisions', json.dumps(revisions)),
('start_time', str(start_time)),
('end_time', str(end_time)),
]
uploader = FileUploader("http://%s/api/report" % hostname, 360)
try:
response = uploader.upload_as_multipart_form_data(self._filesystem, [('results.json', results_json_path)], attrs)
if not response:
_log.error("JSON upload failed; no response returned")
return
if response.code != 200:
_log.error("JSON upload failed, %d: '%s'" % (response.code, response.read()))
return
response_text = response.read()
try:
response_json = json.loads(response_text)
except ValueError, error:
_log.error("JSON upload failed; failed to parse the response: %s", response_text)
return
if response_json['status'] != 'OK':
_log.error("JSON upload failed, %s: %s", response_json['status'], response_text)
return
_log.info("JSON uploaded.")
except Exception, error:
_log.error("Upload failed: %s" % error)
return
def _copy_results_html_file(self, destination_path):
base_dir = self._port.path_from_webkit_base('LayoutTests', 'fast', 'harness')
results_file = self._filesystem.join(base_dir, 'results.html')
# Note that the results.html template file won't exist when we're using a MockFileSystem during unit tests,
# so make sure it exists before we try to copy it.
if self._filesystem.exists(results_file):
self._filesystem.copyfile(results_file, destination_path)
def _stats_trie(self, initial_results):
def _worker_number(worker_name):
return int(worker_name.split('/')[1]) if worker_name else -1
stats = {}
for result in initial_results.results_by_name.values():
if result.type != test_expectations.SKIP:
stats[result.test_name] = {'results': (_worker_number(result.worker_name), result.test_number, result.pid, int(result.test_run_time * 1000), int(result.total_run_time * 1000))}
stats_trie = {}
for name, value in stats.iteritems():
json_results_generator.add_path_to_trie(name, value, stats_trie)
return stats_trie
def _print_expectation_line_for_test(self, format_string, test):
line = self._expectations.model().get_expectation_line(test)
print format_string.format(test, line.expected_behavior, self._expectations.readable_filename_and_line_number(line), line.original_string or '')
def _print_expectations_for_subset(self, device_class, test_col_width, tests_to_run, tests_to_skip={}):
format_string = '{{:{width}}} {{}} {{}} {{}}'.format(width=test_col_width)
if tests_to_skip:
print ''
print 'Tests to skip ({})'.format(len(tests_to_skip))
for test in sorted(tests_to_skip):
self._print_expectation_line_for_test(format_string, test)
print ''
print 'Tests to run{} ({})'.format(' for ' + device_class if device_class else '', len(tests_to_run))
for test in sorted(tests_to_run):
self._print_expectation_line_for_test(format_string, test)
def print_expectations(self, args):
self._printer.write_update("Collecting tests ...")
try:
paths, test_names = self._collect_tests(args)
except IOError:
# This is raised if --test-list doesn't exist
return -1
self._printer.write_update("Parsing expectations ...")
self._expectations = test_expectations.TestExpectations(self._port, test_names, force_expectations_pass=self._options.force)
self._expectations.parse_all_expectations()
tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)
self._printer.print_found(len(test_names), len(tests_to_run), self._options.repeat_each, self._options.iterations)
test_col_width = len(max(tests_to_run + list(tests_to_skip), key=len)) + 1
default_device_tests = []
# Look for tests with custom device requirements.
custom_device_tests = defaultdict(list)
for test_file in tests_to_run:
custom_device = self._custom_device_for_test(test_file)
if custom_device:
custom_device_tests[custom_device].append(test_file)
else:
default_device_tests.append(test_file)
if custom_device_tests:
for device_class in custom_device_tests:
_log.debug('{} tests use device {}'.format(len(custom_device_tests[device_class]), device_class))
self._print_expectations_for_subset(None, test_col_width, tests_to_run, tests_to_skip)
for device_class in custom_device_tests:
device_tests = custom_device_tests[device_class]
self._print_expectations_for_subset(device_class, test_col_width, device_tests)
return 0
| gpl-2.0 |
ShaneHarvey/mongo-python-driver | pymongo/client_session.py | 2 | 40387 | # Copyright 2017 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Logical sessions for ordering sequential operations.
Requires MongoDB 3.6.
.. versionadded:: 3.6
Causally Consistent Reads
=========================
.. code-block:: python
with client.start_session(causal_consistency=True) as session:
collection = client.db.collection
collection.update_one({'_id': 1}, {'$set': {'x': 10}}, session=session)
secondary_c = collection.with_options(
read_preference=ReadPreference.SECONDARY)
# A secondary read waits for replication of the write.
secondary_c.find_one({'_id': 1}, session=session)
If `causal_consistency` is True (the default), read operations that use
the session are causally after previous read and write operations. Using a
causally consistent session, an application can read its own writes and is
guaranteed monotonic reads, even when reading from replica set secondaries.
.. mongodoc:: causal-consistency
.. _transactions-ref:
Transactions
============
.. versionadded:: 3.7
MongoDB 4.0 adds support for transactions on replica set primaries. A
transaction is associated with a :class:`ClientSession`. To start a transaction
on a session, use :meth:`ClientSession.start_transaction` in a with-statement.
Then, execute an operation within the transaction by passing the session to the
operation:
.. code-block:: python
orders = client.db.orders
inventory = client.db.inventory
with client.start_session() as session:
with session.start_transaction():
orders.insert_one({"sku": "abc123", "qty": 100}, session=session)
inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}},
{"$inc": {"qty": -100}}, session=session)
Upon normal completion of ``with session.start_transaction()`` block, the
transaction automatically calls :meth:`ClientSession.commit_transaction`.
If the block exits with an exception, the transaction automatically calls
:meth:`ClientSession.abort_transaction`.
In general, multi-document transactions only support read/write (CRUD)
operations on existing collections. However, MongoDB 4.4 adds support for
creating collections and indexes with some limitations, including an
insert operation that would result in the creation of a new collection.
For a complete description of all the supported and unsupported operations
see the `MongoDB server's documentation for transactions
<http://dochub.mongodb.org/core/transactions>`_.
A session may only have a single active transaction at a time, multiple
transactions on the same session can be executed in sequence.
Sharded Transactions
^^^^^^^^^^^^^^^^^^^^
.. versionadded:: 3.9
PyMongo 3.9 adds support for transactions on sharded clusters running MongoDB
>=4.2. Sharded transactions have the same API as replica set transactions.
When running a transaction against a sharded cluster, the session is
pinned to the mongos server selected for the first operation in the
transaction. All subsequent operations that are part of the same transaction
are routed to the same mongos server. When the transaction is completed, by
running either commitTransaction or abortTransaction, the session is unpinned.
.. mongodoc:: transactions
Snapshot Reads
==============
.. versionadded:: 3.12
MongoDB 5.0 adds support for snapshot reads. Snapshot reads are requested by
passing the ``snapshot`` option to
:meth:`~pymongo.mongo_client.MongoClient.start_session`.
If ``snapshot`` is True, all read operations that use this session read data
from the same snapshot timestamp. The server chooses the latest
majority-committed snapshot timestamp when executing the first read operation
using the session. Subsequent reads on this session read from the same
snapshot timestamp. Snapshot reads are also supported when reading from
replica set secondaries.
.. code-block:: python
# Each read using this session reads data from the same point in time.
with client.start_session(snapshot=True) as session:
order = orders.find_one({"sku": "abc123"}, session=session)
inventory = inventory.find_one({"sku": "abc123"}, session=session)
Snapshot Reads Limitations
^^^^^^^^^^^^^^^^^^^^^^^^^^
Snapshot reads sessions are incompatible with ``causal_consistency=True``.
Only the following read operations are supported in a snapshot reads session:
- :meth:`~pymongo.collection.Collection.find`
- :meth:`~pymongo.collection.Collection.find_one`
- :meth:`~pymongo.collection.Collection.aggregate`
- :meth:`~pymongo.collection.Collection.count_documents`
- :meth:`~pymongo.collection.Collection.distinct` (on unsharded collections)
Classes
=======
"""
import collections
import time
import uuid
from collections.abc import Mapping as _Mapping
from bson.binary import Binary
from bson.int64 import Int64
from bson.son import SON
from bson.timestamp import Timestamp
from pymongo.cursor import _SocketManager
from pymongo.errors import (ConfigurationError,
ConnectionFailure,
InvalidOperation,
OperationFailure,
PyMongoError,
WTimeoutError)
from pymongo.helpers import _RETRYABLE_ERROR_CODES
from pymongo.read_concern import ReadConcern
from pymongo.read_preferences import ReadPreference, _ServerMode
from pymongo.server_type import SERVER_TYPE
from pymongo.write_concern import WriteConcern
class SessionOptions(object):
"""Options for a new :class:`ClientSession`.
:Parameters:
- `causal_consistency` (optional): If True, read operations are causally
ordered within the session. Defaults to True when the ``snapshot``
option is ``False``.
- `default_transaction_options` (optional): The default
TransactionOptions to use for transactions started on this session.
- `snapshot` (optional): If True, then all reads performed using this
session will read from the same snapshot. This option is incompatible
with ``causal_consistency=True``. Defaults to ``False``.
.. versionchanged:: 3.12
Added the ``snapshot`` parameter.
"""
def __init__(self,
causal_consistency=None,
default_transaction_options=None,
snapshot=False):
if snapshot:
if causal_consistency:
raise ConfigurationError('snapshot reads do not support '
'causal_consistency=True')
causal_consistency = False
elif causal_consistency is None:
causal_consistency = True
self._causal_consistency = causal_consistency
if default_transaction_options is not None:
if not isinstance(default_transaction_options, TransactionOptions):
raise TypeError(
"default_transaction_options must be an instance of "
"pymongo.client_session.TransactionOptions, not: %r" %
(default_transaction_options,))
self._default_transaction_options = default_transaction_options
self._snapshot = snapshot
@property
def causal_consistency(self):
"""Whether causal consistency is configured."""
return self._causal_consistency
@property
def default_transaction_options(self):
"""The default TransactionOptions to use for transactions started on
this session.
.. versionadded:: 3.7
"""
return self._default_transaction_options
@property
def snapshot(self):
"""Whether snapshot reads are configured.
.. versionadded:: 3.12
"""
return self._snapshot
class TransactionOptions(object):
"""Options for :meth:`ClientSession.start_transaction`.
:Parameters:
- `read_concern` (optional): The
:class:`~pymongo.read_concern.ReadConcern` to use for this transaction.
If ``None`` (the default) the :attr:`read_preference` of
the :class:`MongoClient` is used.
- `write_concern` (optional): The
:class:`~pymongo.write_concern.WriteConcern` to use for this
transaction. If ``None`` (the default) the :attr:`read_preference` of
the :class:`MongoClient` is used.
- `read_preference` (optional): The read preference to use. If
``None`` (the default) the :attr:`read_preference` of this
:class:`MongoClient` is used. See :mod:`~pymongo.read_preferences`
for options. Transactions which read must use
:attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`.
- `max_commit_time_ms` (optional): The maximum amount of time to allow a
single commitTransaction command to run. This option is an alias for
maxTimeMS option on the commitTransaction command. If ``None`` (the
default) maxTimeMS is not used.
.. versionchanged:: 3.9
Added the ``max_commit_time_ms`` option.
.. versionadded:: 3.7
"""
def __init__(self, read_concern=None, write_concern=None,
read_preference=None, max_commit_time_ms=None):
self._read_concern = read_concern
self._write_concern = write_concern
self._read_preference = read_preference
self._max_commit_time_ms = max_commit_time_ms
if read_concern is not None:
if not isinstance(read_concern, ReadConcern):
raise TypeError("read_concern must be an instance of "
"pymongo.read_concern.ReadConcern, not: %r" %
(read_concern,))
if write_concern is not None:
if not isinstance(write_concern, WriteConcern):
raise TypeError("write_concern must be an instance of "
"pymongo.write_concern.WriteConcern, not: %r" %
(write_concern,))
if not write_concern.acknowledged:
raise ConfigurationError(
"transactions do not support unacknowledged write concern"
": %r" % (write_concern,))
if read_preference is not None:
if not isinstance(read_preference, _ServerMode):
raise TypeError("%r is not valid for read_preference. See "
"pymongo.read_preferences for valid "
"options." % (read_preference,))
if max_commit_time_ms is not None:
if not isinstance(max_commit_time_ms, int):
raise TypeError(
"max_commit_time_ms must be an integer or None")
@property
def read_concern(self):
"""This transaction's :class:`~pymongo.read_concern.ReadConcern`."""
return self._read_concern
@property
def write_concern(self):
"""This transaction's :class:`~pymongo.write_concern.WriteConcern`."""
return self._write_concern
@property
def read_preference(self):
"""This transaction's :class:`~pymongo.read_preferences.ReadPreference`.
"""
return self._read_preference
@property
def max_commit_time_ms(self):
"""The maxTimeMS to use when running a commitTransaction command.
.. versionadded:: 3.9
"""
return self._max_commit_time_ms
def _validate_session_write_concern(session, write_concern):
"""Validate that an explicit session is not used with an unack'ed write.
Returns the session to use for the next operation.
"""
if session:
if write_concern is not None and not write_concern.acknowledged:
# For unacknowledged writes without an explicit session,
# drivers SHOULD NOT use an implicit session. If a driver
# creates an implicit session for unacknowledged writes
# without an explicit session, the driver MUST NOT send the
# session ID.
if session._implicit:
return None
else:
raise ConfigurationError(
'Explicit sessions are incompatible with '
'unacknowledged write concern: %r' % (
write_concern,))
return session
class _TransactionContext(object):
"""Internal transaction context manager for start_transaction."""
def __init__(self, session):
self.__session = session
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.__session.in_transaction:
if exc_val is None:
self.__session.commit_transaction()
else:
self.__session.abort_transaction()
class _TxnState(object):
NONE = 1
STARTING = 2
IN_PROGRESS = 3
COMMITTED = 4
COMMITTED_EMPTY = 5
ABORTED = 6
class _Transaction(object):
"""Internal class to hold transaction information in a ClientSession."""
def __init__(self, opts, client):
self.opts = opts
self.state = _TxnState.NONE
self.sharded = False
self.pinned_address = None
self.sock_mgr = None
self.recovery_token = None
self.attempt = 0
self.client = client
def active(self):
return self.state in (_TxnState.STARTING, _TxnState.IN_PROGRESS)
def starting(self):
return self.state == _TxnState.STARTING
@property
def pinned_conn(self):
if self.active() and self.sock_mgr:
return self.sock_mgr.sock
return None
def pin(self, server, sock_info):
self.sharded = True
self.pinned_address = server.description.address
if server.description.server_type == SERVER_TYPE.LoadBalancer:
sock_info.pin_txn()
self.sock_mgr = _SocketManager(sock_info, False)
def unpin(self):
self.pinned_address = None
if self.sock_mgr:
self.sock_mgr.close()
self.sock_mgr = None
def reset(self):
self.unpin()
self.state = _TxnState.NONE
self.sharded = False
self.recovery_token = None
self.attempt = 0
def __del__(self):
if self.sock_mgr:
# Reuse the cursor closing machinery to return the socket to the
# pool soon.
self.client._close_cursor_soon(0, None, self.sock_mgr)
self.sock_mgr = None
def _reraise_with_unknown_commit(exc):
"""Re-raise an exception with the UnknownTransactionCommitResult label."""
exc._add_error_label("UnknownTransactionCommitResult")
raise
def _max_time_expired_error(exc):
"""Return true if exc is a MaxTimeMSExpired error."""
return isinstance(exc, OperationFailure) and exc.code == 50
# From the transactions spec, all the retryable writes errors plus
# WriteConcernFailed.
_UNKNOWN_COMMIT_ERROR_CODES = _RETRYABLE_ERROR_CODES | frozenset([
64, # WriteConcernFailed
50, # MaxTimeMSExpired
])
# From the Convenient API for Transactions spec, with_transaction must
# halt retries after 120 seconds.
# This limit is non-configurable and was chosen to be twice the 60 second
# default value of MongoDB's `transactionLifetimeLimitSeconds` parameter.
_WITH_TRANSACTION_RETRY_TIME_LIMIT = 120
def _within_time_limit(start_time):
"""Are we within the with_transaction retry limit?"""
return time.monotonic() - start_time < _WITH_TRANSACTION_RETRY_TIME_LIMIT
class ClientSession(object):
"""A session for ordering sequential operations.
:class:`ClientSession` instances are **not thread-safe or fork-safe**.
They can only be used by one thread or process at a time. A single
:class:`ClientSession` cannot be used to run multiple operations
concurrently.
Should not be initialized directly by application developers - to create a
:class:`ClientSession`, call
:meth:`~pymongo.mongo_client.MongoClient.start_session`.
"""
def __init__(self, client, server_session, options, implicit):
# A MongoClient, a _ServerSession, a SessionOptions, and a set.
self._client = client
self._server_session = server_session
self._options = options
self._cluster_time = None
self._operation_time = None
self._snapshot_time = None
# Is this an implicitly created session?
self._implicit = implicit
self._transaction = _Transaction(None, client)
def end_session(self):
"""Finish this session. If a transaction has started, abort it.
It is an error to use the session after the session has ended.
"""
self._end_session(lock=True)
def _end_session(self, lock):
if self._server_session is not None:
try:
if self.in_transaction:
self.abort_transaction()
# It's possible we're still pinned here when the transaction
# is in the committed state when the session is discarded.
self._unpin()
finally:
self._client._return_server_session(self._server_session, lock)
self._server_session = None
def _check_ended(self):
if self._server_session is None:
raise InvalidOperation("Cannot use ended session")
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._end_session(lock=True)
@property
def client(self):
"""The :class:`~pymongo.mongo_client.MongoClient` this session was
created from.
"""
return self._client
@property
def options(self):
"""The :class:`SessionOptions` this session was created with."""
return self._options
@property
def session_id(self):
"""A BSON document, the opaque server session identifier."""
self._check_ended()
return self._server_session.session_id
@property
def cluster_time(self):
"""The cluster time returned by the last operation executed
in this session.
"""
return self._cluster_time
@property
def operation_time(self):
"""The operation time returned by the last operation executed
in this session.
"""
return self._operation_time
def _inherit_option(self, name, val):
"""Return the inherited TransactionOption value."""
if val:
return val
txn_opts = self.options.default_transaction_options
val = txn_opts and getattr(txn_opts, name)
if val:
return val
return getattr(self.client, name)
def with_transaction(self, callback, read_concern=None, write_concern=None,
read_preference=None, max_commit_time_ms=None):
"""Execute a callback in a transaction.
This method starts a transaction on this session, executes ``callback``
once, and then commits the transaction. For example::
def callback(session):
orders = session.client.db.orders
inventory = session.client.db.inventory
orders.insert_one({"sku": "abc123", "qty": 100}, session=session)
inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}},
{"$inc": {"qty": -100}}, session=session)
with client.start_session() as session:
session.with_transaction(callback)
To pass arbitrary arguments to the ``callback``, wrap your callable
with a ``lambda`` like this::
def callback(session, custom_arg, custom_kwarg=None):
# Transaction operations...
with client.start_session() as session:
session.with_transaction(
lambda s: callback(s, "custom_arg", custom_kwarg=1))
In the event of an exception, ``with_transaction`` may retry the commit
or the entire transaction, therefore ``callback`` may be invoked
multiple times by a single call to ``with_transaction``. Developers
should be mindful of this possiblity when writing a ``callback`` that
modifies application state or has any other side-effects.
Note that even when the ``callback`` is invoked multiple times,
``with_transaction`` ensures that the transaction will be committed
at-most-once on the server.
The ``callback`` should not attempt to start new transactions, but
should simply run operations meant to be contained within a
transaction. The ``callback`` should also not commit the transaction;
this is handled automatically by ``with_transaction``. If the
``callback`` does commit or abort the transaction without error,
however, ``with_transaction`` will return without taking further
action.
:class:`ClientSession` instances are **not thread-safe or fork-safe**.
Consequently, the ``callback`` must not attempt to execute multiple
operations concurrently.
When ``callback`` raises an exception, ``with_transaction``
automatically aborts the current transaction. When ``callback`` or
:meth:`~ClientSession.commit_transaction` raises an exception that
includes the ``"TransientTransactionError"`` error label,
``with_transaction`` starts a new transaction and re-executes
the ``callback``.
When :meth:`~ClientSession.commit_transaction` raises an exception with
the ``"UnknownTransactionCommitResult"`` error label,
``with_transaction`` retries the commit until the result of the
transaction is known.
This method will cease retrying after 120 seconds has elapsed. This
timeout is not configurable and any exception raised by the
``callback`` or by :meth:`ClientSession.commit_transaction` after the
timeout is reached will be re-raised. Applications that desire a
different timeout duration should not use this method.
:Parameters:
- `callback`: The callable ``callback`` to run inside a transaction.
The callable must accept a single argument, this session. Note,
under certain error conditions the callback may be run multiple
times.
- `read_concern` (optional): The
:class:`~pymongo.read_concern.ReadConcern` to use for this
transaction.
- `write_concern` (optional): The
:class:`~pymongo.write_concern.WriteConcern` to use for this
transaction.
- `read_preference` (optional): The read preference to use for this
transaction. If ``None`` (the default) the :attr:`read_preference`
of this :class:`Database` is used. See
:mod:`~pymongo.read_preferences` for options.
:Returns:
The return value of the ``callback``.
.. versionadded:: 3.9
"""
start_time = time.monotonic()
while True:
self.start_transaction(
read_concern, write_concern, read_preference,
max_commit_time_ms)
try:
ret = callback(self)
except Exception as exc:
if self.in_transaction:
self.abort_transaction()
if (isinstance(exc, PyMongoError) and
exc.has_error_label("TransientTransactionError") and
_within_time_limit(start_time)):
# Retry the entire transaction.
continue
raise
if not self.in_transaction:
# Assume callback intentionally ended the transaction.
return ret
while True:
try:
self.commit_transaction()
except PyMongoError as exc:
if (exc.has_error_label("UnknownTransactionCommitResult")
and _within_time_limit(start_time)
and not _max_time_expired_error(exc)):
# Retry the commit.
continue
if (exc.has_error_label("TransientTransactionError") and
_within_time_limit(start_time)):
# Retry the entire transaction.
break
raise
# Commit succeeded.
return ret
def start_transaction(self, read_concern=None, write_concern=None,
read_preference=None, max_commit_time_ms=None):
"""Start a multi-statement transaction.
Takes the same arguments as :class:`TransactionOptions`.
.. versionchanged:: 3.9
Added the ``max_commit_time_ms`` option.
.. versionadded:: 3.7
"""
self._check_ended()
if self.options.snapshot:
raise InvalidOperation("Transactions are not supported in "
"snapshot sessions")
if self.in_transaction:
raise InvalidOperation("Transaction already in progress")
read_concern = self._inherit_option("read_concern", read_concern)
write_concern = self._inherit_option("write_concern", write_concern)
read_preference = self._inherit_option(
"read_preference", read_preference)
if max_commit_time_ms is None:
opts = self.options.default_transaction_options
if opts:
max_commit_time_ms = opts.max_commit_time_ms
self._transaction.opts = TransactionOptions(
read_concern, write_concern, read_preference, max_commit_time_ms)
self._transaction.reset()
self._transaction.state = _TxnState.STARTING
self._start_retryable_write()
return _TransactionContext(self)
def commit_transaction(self):
"""Commit a multi-statement transaction.
.. versionadded:: 3.7
"""
self._check_ended()
state = self._transaction.state
if state is _TxnState.NONE:
raise InvalidOperation("No transaction started")
elif state in (_TxnState.STARTING, _TxnState.COMMITTED_EMPTY):
# Server transaction was never started, no need to send a command.
self._transaction.state = _TxnState.COMMITTED_EMPTY
return
elif state is _TxnState.ABORTED:
raise InvalidOperation(
"Cannot call commitTransaction after calling abortTransaction")
elif state is _TxnState.COMMITTED:
# We're explicitly retrying the commit, move the state back to
# "in progress" so that in_transaction returns true.
self._transaction.state = _TxnState.IN_PROGRESS
try:
self._finish_transaction_with_retry("commitTransaction")
except ConnectionFailure as exc:
# We do not know if the commit was successfully applied on the
# server or if it satisfied the provided write concern, set the
# unknown commit error label.
exc._remove_error_label("TransientTransactionError")
_reraise_with_unknown_commit(exc)
except WTimeoutError as exc:
# We do not know if the commit has satisfied the provided write
# concern, add the unknown commit error label.
_reraise_with_unknown_commit(exc)
except OperationFailure as exc:
if exc.code not in _UNKNOWN_COMMIT_ERROR_CODES:
# The server reports errorLabels in the case.
raise
# We do not know if the commit was successfully applied on the
# server or if it satisfied the provided write concern, set the
# unknown commit error label.
_reraise_with_unknown_commit(exc)
finally:
self._transaction.state = _TxnState.COMMITTED
def abort_transaction(self):
"""Abort a multi-statement transaction.
.. versionadded:: 3.7
"""
self._check_ended()
state = self._transaction.state
if state is _TxnState.NONE:
raise InvalidOperation("No transaction started")
elif state is _TxnState.STARTING:
# Server transaction was never started, no need to send a command.
self._transaction.state = _TxnState.ABORTED
return
elif state is _TxnState.ABORTED:
raise InvalidOperation("Cannot call abortTransaction twice")
elif state in (_TxnState.COMMITTED, _TxnState.COMMITTED_EMPTY):
raise InvalidOperation(
"Cannot call abortTransaction after calling commitTransaction")
try:
self._finish_transaction_with_retry("abortTransaction")
except (OperationFailure, ConnectionFailure):
# The transactions spec says to ignore abortTransaction errors.
pass
finally:
self._transaction.state = _TxnState.ABORTED
self._unpin()
def _finish_transaction_with_retry(self, command_name):
"""Run commit or abort with one retry after any retryable error.
:Parameters:
- `command_name`: Either "commitTransaction" or "abortTransaction".
"""
def func(session, sock_info, retryable):
return self._finish_transaction(sock_info, command_name)
return self._client._retry_internal(True, func, self, None)
def _finish_transaction(self, sock_info, command_name):
self._transaction.attempt += 1
opts = self._transaction.opts
wc = opts.write_concern
cmd = SON([(command_name, 1)])
if command_name == "commitTransaction":
if opts.max_commit_time_ms:
cmd['maxTimeMS'] = opts.max_commit_time_ms
# Transaction spec says that after the initial commit attempt,
# subsequent commitTransaction commands should be upgraded to use
# w:"majority" and set a default value of 10 seconds for wtimeout.
if self._transaction.attempt > 1:
wc_doc = wc.document
wc_doc["w"] = "majority"
wc_doc.setdefault("wtimeout", 10000)
wc = WriteConcern(**wc_doc)
if self._transaction.recovery_token:
cmd['recoveryToken'] = self._transaction.recovery_token
return self._client.admin._command(
sock_info,
cmd,
session=self,
write_concern=wc,
parse_write_concern_error=True)
def _advance_cluster_time(self, cluster_time):
"""Internal cluster time helper."""
if self._cluster_time is None:
self._cluster_time = cluster_time
elif cluster_time is not None:
if cluster_time["clusterTime"] > self._cluster_time["clusterTime"]:
self._cluster_time = cluster_time
def advance_cluster_time(self, cluster_time):
"""Update the cluster time for this session.
:Parameters:
- `cluster_time`: The
:data:`~pymongo.client_session.ClientSession.cluster_time` from
another `ClientSession` instance.
"""
if not isinstance(cluster_time, _Mapping):
raise TypeError(
"cluster_time must be a subclass of collections.Mapping")
if not isinstance(cluster_time.get("clusterTime"), Timestamp):
raise ValueError("Invalid cluster_time")
self._advance_cluster_time(cluster_time)
def _advance_operation_time(self, operation_time):
"""Internal operation time helper."""
if self._operation_time is None:
self._operation_time = operation_time
elif operation_time is not None:
if operation_time > self._operation_time:
self._operation_time = operation_time
def advance_operation_time(self, operation_time):
"""Update the operation time for this session.
:Parameters:
- `operation_time`: The
:data:`~pymongo.client_session.ClientSession.operation_time` from
another `ClientSession` instance.
"""
if not isinstance(operation_time, Timestamp):
raise TypeError("operation_time must be an instance "
"of bson.timestamp.Timestamp")
self._advance_operation_time(operation_time)
def _process_response(self, reply):
"""Process a response to a command that was run with this session."""
self._advance_cluster_time(reply.get('$clusterTime'))
self._advance_operation_time(reply.get('operationTime'))
if self._options.snapshot and self._snapshot_time is None:
if 'cursor' in reply:
ct = reply['cursor'].get('atClusterTime')
else:
ct = reply.get('atClusterTime')
self._snapshot_time = ct
if self.in_transaction and self._transaction.sharded:
recovery_token = reply.get('recoveryToken')
if recovery_token:
self._transaction.recovery_token = recovery_token
@property
def has_ended(self):
"""True if this session is finished."""
return self._server_session is None
@property
def in_transaction(self):
"""True if this session has an active multi-statement transaction.
.. versionadded:: 3.10
"""
return self._transaction.active()
@property
def _starting_transaction(self):
"""True if this session is starting a multi-statement transaction.
"""
return self._transaction.starting()
@property
def _pinned_address(self):
"""The mongos address this transaction was created on."""
if self._transaction.active():
return self._transaction.pinned_address
return None
@property
def _pinned_connection(self):
"""The connection this transaction was started on."""
return self._transaction.pinned_conn
def _pin(self, server, sock_info):
"""Pin this session to the given Server or to the given connection."""
self._transaction.pin(server, sock_info)
def _unpin(self):
"""Unpin this session from any pinned Server."""
self._transaction.unpin()
def _txn_read_preference(self):
"""Return read preference of this transaction or None."""
if self.in_transaction:
return self._transaction.opts.read_preference
return None
def _apply_to(self, command, is_retryable, read_preference, sock_info):
self._check_ended()
if self.options.snapshot:
self._update_read_concern(command, sock_info)
self._server_session.last_use = time.monotonic()
command['lsid'] = self._server_session.session_id
if is_retryable:
command['txnNumber'] = self._server_session.transaction_id
return
if self.in_transaction:
if read_preference != ReadPreference.PRIMARY:
raise InvalidOperation(
'read preference in a transaction must be primary, not: '
'%r' % (read_preference,))
if self._transaction.state == _TxnState.STARTING:
# First command begins a new transaction.
self._transaction.state = _TxnState.IN_PROGRESS
command['startTransaction'] = True
if self._transaction.opts.read_concern:
rc = self._transaction.opts.read_concern.document
if rc:
command['readConcern'] = rc
self._update_read_concern(command, sock_info)
command['txnNumber'] = self._server_session.transaction_id
command['autocommit'] = False
def _start_retryable_write(self):
self._check_ended()
self._server_session.inc_transaction_id()
def _update_read_concern(self, cmd, sock_info):
if (self.options.causal_consistency
and self.operation_time is not None):
cmd.setdefault('readConcern', {})[
'afterClusterTime'] = self.operation_time
if self.options.snapshot:
if sock_info.max_wire_version < 13:
raise ConfigurationError(
'Snapshot reads require MongoDB 5.0 or later')
rc = cmd.setdefault('readConcern', {})
rc['level'] = 'snapshot'
if self._snapshot_time is not None:
rc['atClusterTime'] = self._snapshot_time
class _ServerSession(object):
def __init__(self, generation):
# Ensure id is type 4, regardless of CodecOptions.uuid_representation.
self.session_id = {'id': Binary(uuid.uuid4().bytes, 4)}
self.last_use = time.monotonic()
self._transaction_id = 0
self.dirty = False
self.generation = generation
def mark_dirty(self):
"""Mark this session as dirty.
A server session is marked dirty when a command fails with a network
error. Dirty sessions are later discarded from the server session pool.
"""
self.dirty = True
def timed_out(self, session_timeout_minutes):
idle_seconds = time.monotonic() - self.last_use
# Timed out if we have less than a minute to live.
return idle_seconds > (session_timeout_minutes - 1) * 60
@property
def transaction_id(self):
"""Positive 64-bit integer."""
return Int64(self._transaction_id)
def inc_transaction_id(self):
self._transaction_id += 1
class _ServerSessionPool(collections.deque):
"""Pool of _ServerSession objects.
This class is not thread-safe, access it while holding the Topology lock.
"""
def __init__(self, *args, **kwargs):
super(_ServerSessionPool, self).__init__(*args, **kwargs)
self.generation = 0
def reset(self):
self.generation += 1
self.clear()
def pop_all(self):
ids = []
while self:
ids.append(self.pop().session_id)
return ids
def get_server_session(self, session_timeout_minutes):
# Although the Driver Sessions Spec says we only clear stale sessions
# in return_server_session, PyMongo can't take a lock when returning
# sessions from a __del__ method (like in Cursor.__die), so it can't
# clear stale sessions there. In case many sessions were returned via
# __del__, check for stale sessions here too.
self._clear_stale(session_timeout_minutes)
# The most recently used sessions are on the left.
while self:
s = self.popleft()
if not s.timed_out(session_timeout_minutes):
return s
return _ServerSession(self.generation)
def return_server_session(self, server_session, session_timeout_minutes):
if session_timeout_minutes is not None:
self._clear_stale(session_timeout_minutes)
if server_session.timed_out(session_timeout_minutes):
return
self.return_server_session_no_lock(server_session)
def return_server_session_no_lock(self, server_session):
# Discard sessions from an old pool to avoid duplicate sessions in the
# child process after a fork.
if (server_session.generation == self.generation and
not server_session.dirty):
self.appendleft(server_session)
def _clear_stale(self, session_timeout_minutes):
# Clear stale sessions. The least recently used are on the right.
while self:
if self[-1].timed_out(session_timeout_minutes):
self.pop()
else:
# The remaining sessions also haven't timed out.
break
| apache-2.0 |
nikhilm/muzicast | muzicast/web/genre.py | 1 | 1359 | from sqlobject import DESC
from sqlobject.main import SQLObjectNotFound
from flask import Module, url_for, redirect, session, escape, request
from muzicast.const import DB_FILE
from muzicast.meta import Genre, GenreStatistics
from muzicast.web import playlist
from muzicast.web.util import render_master_page, page_view
genre = Module(__name__)
def top_genres(n):
try:
top = [t.genre for t in GenreStatistics.select(orderBy=DESC(GenreStatistics.q.play_count))[:n]]
if len(top) < n:
top = top + [genre for genre in Genre.select()[:n-len(top)] if genre not in top]
return top
except SQLObjectNotFound:
return []
@genre.route('s')
def genres():
return genres_page(1)
@genre.route('s/<int:page>')
def genres_page(page):
return page_view(page, Genre, "genres.html", "genres", top_genres=top_genres(10), title='Genres')
@genre.route('/<id>')
def index(id):
try:
genre = Genre.get(id)
return render_master_page("genre.html", title="genre", genre=genre)
except SQLObjectNotFound:
abort(404)
@genre.route('/add/<int:id>')
def add_genre_to_playlist(id):
try:
genre = Genre.get(id)
for track in genre.tracks:
playlist.add_to_playlist(track.id)
return redirect(request.headers['referer'])
except LookupError:
abort(400)
| mit |
nikste/tensorflow | tensorflow/python/util/nest_test.py | 23 | 13634 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for utilities working with arbitrarily nested structures."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.util import nest
class NestTest(test.TestCase):
def testFlattenAndPack(self):
structure = ((3, 4), 5, (6, 7, (9, 10), 8))
flat = ["a", "b", "c", "d", "e", "f", "g", "h"]
self.assertEqual(nest.flatten(structure), [3, 4, 5, 6, 7, 9, 10, 8])
self.assertEqual(
nest.pack_sequence_as(structure, flat), (("a", "b"), "c",
("d", "e", ("f", "g"), "h")))
point = collections.namedtuple("Point", ["x", "y"])
structure = (point(x=4, y=2), ((point(x=1, y=0),),))
flat = [4, 2, 1, 0]
self.assertEqual(nest.flatten(structure), flat)
restructured_from_flat = nest.pack_sequence_as(structure, flat)
self.assertEqual(restructured_from_flat, structure)
self.assertEqual(restructured_from_flat[0].x, 4)
self.assertEqual(restructured_from_flat[0].y, 2)
self.assertEqual(restructured_from_flat[1][0][0].x, 1)
self.assertEqual(restructured_from_flat[1][0][0].y, 0)
self.assertEqual([5], nest.flatten(5))
self.assertEqual([np.array([5])], nest.flatten(np.array([5])))
self.assertEqual("a", nest.pack_sequence_as(5, ["a"]))
self.assertEqual(
np.array([5]), nest.pack_sequence_as("scalar", [np.array([5])]))
with self.assertRaisesRegexp(ValueError, "Structure is a scalar"):
nest.pack_sequence_as("scalar", [4, 5])
with self.assertRaisesRegexp(TypeError, "flat_sequence"):
nest.pack_sequence_as([4, 5], "bad_sequence")
with self.assertRaises(ValueError):
nest.pack_sequence_as([5, 6, [7, 8]], ["a", "b", "c"])
def testIsSequence(self):
self.assertFalse(nest.is_sequence("1234"))
self.assertTrue(nest.is_sequence([1, 3, [4, 5]]))
self.assertTrue(nest.is_sequence(((7, 8), (5, 6))))
self.assertTrue(nest.is_sequence([]))
self.assertFalse(nest.is_sequence(set([1, 2])))
ones = array_ops.ones([2, 3])
self.assertFalse(nest.is_sequence(ones))
self.assertFalse(nest.is_sequence(math_ops.tanh(ones)))
self.assertFalse(nest.is_sequence(np.ones((4, 5))))
def testFlattenDictItems(self):
dictionary = {(4, 5, (6, 8)): ("a", "b", ("c", "d"))}
flat = {4: "a", 5: "b", 6: "c", 8: "d"}
self.assertEqual(nest.flatten_dict_items(dictionary), flat)
with self.assertRaises(TypeError):
nest.flatten_dict_items(4)
bad_dictionary = {(4, 5, (4, 8)): ("a", "b", ("c", "d"))}
with self.assertRaisesRegexp(ValueError, "not unique"):
nest.flatten_dict_items(bad_dictionary)
another_bad_dictionary = {(4, 5, (6, 8)): ("a", "b", ("c", ("d", "e")))}
with self.assertRaisesRegexp(
ValueError, "Key had [0-9]* elements, but value had [0-9]* elements"):
nest.flatten_dict_items(another_bad_dictionary)
def testAssertSameStructure(self):
structure1 = (((1, 2), 3), 4, (5, 6))
structure2 = ((("foo1", "foo2"), "foo3"), "foo4", ("foo5", "foo6"))
structure_different_num_elements = ("spam", "eggs")
structure_different_nesting = (((1, 2), 3), 4, 5, (6,))
nest.assert_same_structure(structure1, structure2)
nest.assert_same_structure("abc", 1.0)
nest.assert_same_structure("abc", np.array([0, 1]))
nest.assert_same_structure("abc", constant_op.constant([0, 1]))
with self.assertRaisesRegexp(ValueError,
"don't have the same number of elements"):
nest.assert_same_structure(structure1, structure_different_num_elements)
with self.assertRaisesRegexp(ValueError,
"don't have the same number of elements"):
nest.assert_same_structure([0, 1], np.array([0, 1]))
with self.assertRaisesRegexp(ValueError,
"don't have the same number of elements"):
nest.assert_same_structure(0, [0, 1])
self.assertRaises(TypeError, nest.assert_same_structure, (0, 1), [0, 1])
with self.assertRaisesRegexp(ValueError,
"don't have the same nested structure"):
nest.assert_same_structure(structure1, structure_different_nesting)
named_type_0 = collections.namedtuple("named_0", ("a", "b"))
named_type_1 = collections.namedtuple("named_1", ("a", "b"))
self.assertRaises(TypeError, nest.assert_same_structure, (0, 1),
named_type_0("a", "b"))
nest.assert_same_structure(named_type_0(3, 4), named_type_0("a", "b"))
self.assertRaises(TypeError, nest.assert_same_structure,
named_type_0(3, 4), named_type_1(3, 4))
with self.assertRaisesRegexp(ValueError,
"don't have the same nested structure"):
nest.assert_same_structure(named_type_0(3, 4), named_type_0([3], 4))
with self.assertRaisesRegexp(ValueError,
"don't have the same nested structure"):
nest.assert_same_structure([[3], 4], [3, [4]])
def testMapStructure(self):
structure1 = (((1, 2), 3), 4, (5, 6))
structure2 = (((7, 8), 9), 10, (11, 12))
structure1_plus1 = nest.map_structure(lambda x: x + 1, structure1)
nest.assert_same_structure(structure1, structure1_plus1)
self.assertAllEqual(
[2, 3, 4, 5, 6, 7],
nest.flatten(structure1_plus1))
structure1_plus_structure2 = nest.map_structure(
lambda x, y: x + y, structure1, structure2)
self.assertEqual(
(((1 + 7, 2 + 8), 3 + 9), 4 + 10, (5 + 11, 6 + 12)),
structure1_plus_structure2)
self.assertEqual(3, nest.map_structure(lambda x: x - 1, 4))
self.assertEqual(7, nest.map_structure(lambda x, y: x + y, 3, 4))
with self.assertRaisesRegexp(TypeError, "callable"):
nest.map_structure("bad", structure1_plus1)
with self.assertRaisesRegexp(ValueError, "same nested structure"):
nest.map_structure(lambda x, y: None, 3, (3,))
with self.assertRaisesRegexp(TypeError, "same sequence type"):
nest.map_structure(lambda x, y: None, ((3, 4), 5), [(3, 4), 5])
with self.assertRaisesRegexp(ValueError, "same nested structure"):
nest.map_structure(lambda x, y: None, ((3, 4), 5), (3, (4, 5)))
def testAssertShallowStructure(self):
inp_ab = ["a", "b"]
inp_abc = ["a", "b", "c"]
expected_message = (
"The two structures don't have the same sequence length. Input "
"structure has length 2, while shallow structure has length 3.")
with self.assertRaisesRegexp(ValueError, expected_message):
nest.assert_shallow_structure(inp_abc, inp_ab)
inp_ab1 = [(1, 1), (2, 2)]
inp_ab2 = [[1, 1], [2, 2]]
expected_message = (
"The two structures don't have the same sequence type. Input structure "
"has type <(type|class) 'tuple'>, while shallow structure has type "
"<(type|class) 'list'>.")
with self.assertRaisesRegexp(TypeError, expected_message):
nest.assert_shallow_structure(inp_ab2, inp_ab1)
def testFlattenUpTo(self):
input_tree = [[[2, 2], [3, 3]], [[4, 9], [5, 5]]]
shallow_tree = [[True, True], [False, True]]
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_input_tree, [[2, 2], [3, 3], [4, 9], [5, 5]])
self.assertEqual(flattened_shallow_tree, [True, True, False, True])
input_tree = [[("a", 1), [("b", 2), [("c", 3), [("d", 4)]]]]]
shallow_tree = [["level_1", ["level_2", ["level_3", ["level_4"]]]]]
input_tree_flattened_as_shallow_tree = nest.flatten_up_to(shallow_tree,
input_tree)
input_tree_flattened = nest.flatten(input_tree)
self.assertEqual(input_tree_flattened_as_shallow_tree,
[("a", 1), ("b", 2), ("c", 3), ("d", 4)])
self.assertEqual(input_tree_flattened, ["a", 1, "b", 2, "c", 3, "d", 4])
## Shallow non-list edge-case.
# Using iterable elements.
input_tree = ["input_tree"]
shallow_tree = "shallow_tree"
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_input_tree, [input_tree])
self.assertEqual(flattened_shallow_tree, [shallow_tree])
input_tree = ["input_tree_0", "input_tree_1"]
shallow_tree = "shallow_tree"
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_input_tree, [input_tree])
self.assertEqual(flattened_shallow_tree, [shallow_tree])
# Using non-iterable elements.
input_tree = [0]
shallow_tree = 9
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_input_tree, [input_tree])
self.assertEqual(flattened_shallow_tree, [shallow_tree])
input_tree = [0, 1]
shallow_tree = 9
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_input_tree, [input_tree])
self.assertEqual(flattened_shallow_tree, [shallow_tree])
## Both non-list edge-case.
# Using iterable elements.
input_tree = "input_tree"
shallow_tree = "shallow_tree"
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_input_tree, [input_tree])
self.assertEqual(flattened_shallow_tree, [shallow_tree])
# Using non-iterable elements.
input_tree = 0
shallow_tree = 0
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_input_tree, [input_tree])
self.assertEqual(flattened_shallow_tree, [shallow_tree])
## Input non-list edge-case.
# Using iterable elements.
input_tree = "input_tree"
shallow_tree = ["shallow_tree"]
expected_message = ("If shallow structure is a sequence, input must also "
"be a sequence. Input has type: <(type|class) 'str'>.")
with self.assertRaisesRegexp(TypeError, expected_message):
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_shallow_tree, shallow_tree)
input_tree = "input_tree"
shallow_tree = ["shallow_tree_9", "shallow_tree_8"]
with self.assertRaisesRegexp(TypeError, expected_message):
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_shallow_tree, shallow_tree)
# Using non-iterable elements.
input_tree = 0
shallow_tree = [9]
expected_message = ("If shallow structure is a sequence, input must also "
"be a sequence. Input has type: <(type|class) 'int'>.")
with self.assertRaisesRegexp(TypeError, expected_message):
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_shallow_tree, shallow_tree)
input_tree = 0
shallow_tree = [9, 8]
with self.assertRaisesRegexp(TypeError, expected_message):
flattened_input_tree = nest.flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = nest.flatten_up_to(shallow_tree, shallow_tree)
self.assertEqual(flattened_shallow_tree, shallow_tree)
def testMapStructureUpTo(self):
ab_tuple = collections.namedtuple("ab_tuple", "a, b")
op_tuple = collections.namedtuple("op_tuple", "add, mul")
inp_val = ab_tuple(a=2, b=3)
inp_ops = ab_tuple(a=op_tuple(add=1, mul=2), b=op_tuple(add=2, mul=3))
out = nest.map_structure_up_to(
inp_val, lambda val, ops: (val + ops.add) * ops.mul, inp_val, inp_ops)
self.assertEqual(out.a, 6)
self.assertEqual(out.b, 15)
data_list = [[2, 4, 6, 8], [[1, 3, 5, 7, 9], [3, 5, 7]]]
name_list = ["evens", ["odds", "primes"]]
out = nest.map_structure_up_to(
name_list, lambda name, sec: "first_{}_{}".format(len(sec), name),
name_list, data_list)
self.assertEqual(out, ["first_4_evens", ["first_5_odds", "first_3_primes"]])
if __name__ == "__main__":
test.main()
| apache-2.0 |
zephyrplugins/zephyr | zephyr.plugin.jython/jython2.5.2rc3/Lib/test/test_urlparse.py | 16 | 14795 | #! /usr/bin/env python
from test import test_support
import unittest
import urlparse
RFC1808_BASE = "http://a/b/c/d;p?q#f"
RFC2396_BASE = "http://a/b/c/d;p?q"
class UrlParseTestCase(unittest.TestCase):
def checkRoundtrips(self, url, parsed, split):
result = urlparse.urlparse(url)
self.assertEqual(result, parsed)
t = (result.scheme, result.netloc, result.path,
result.params, result.query, result.fragment)
self.assertEqual(t, parsed)
# put it back together and it should be the same
result2 = urlparse.urlunparse(result)
self.assertEqual(result2, url)
self.assertEqual(result2, result.geturl())
# the result of geturl() is a fixpoint; we can always parse it
# again to get the same result:
result3 = urlparse.urlparse(result.geturl())
self.assertEqual(result3.geturl(), result.geturl())
self.assertEqual(result3, result)
self.assertEqual(result3.scheme, result.scheme)
self.assertEqual(result3.netloc, result.netloc)
self.assertEqual(result3.path, result.path)
self.assertEqual(result3.params, result.params)
self.assertEqual(result3.query, result.query)
self.assertEqual(result3.fragment, result.fragment)
self.assertEqual(result3.username, result.username)
self.assertEqual(result3.password, result.password)
self.assertEqual(result3.hostname, result.hostname)
self.assertEqual(result3.port, result.port)
# check the roundtrip using urlsplit() as well
result = urlparse.urlsplit(url)
self.assertEqual(result, split)
t = (result.scheme, result.netloc, result.path,
result.query, result.fragment)
self.assertEqual(t, split)
result2 = urlparse.urlunsplit(result)
self.assertEqual(result2, url)
self.assertEqual(result2, result.geturl())
# check the fixpoint property of re-parsing the result of geturl()
result3 = urlparse.urlsplit(result.geturl())
self.assertEqual(result3.geturl(), result.geturl())
self.assertEqual(result3, result)
self.assertEqual(result3.scheme, result.scheme)
self.assertEqual(result3.netloc, result.netloc)
self.assertEqual(result3.path, result.path)
self.assertEqual(result3.query, result.query)
self.assertEqual(result3.fragment, result.fragment)
self.assertEqual(result3.username, result.username)
self.assertEqual(result3.password, result.password)
self.assertEqual(result3.hostname, result.hostname)
self.assertEqual(result3.port, result.port)
def test_roundtrips(self):
testcases = [
('file:///tmp/junk.txt',
('file', '', '/tmp/junk.txt', '', '', ''),
('file', '', '/tmp/junk.txt', '', '')),
('imap://mail.python.org/mbox1',
('imap', 'mail.python.org', '/mbox1', '', '', ''),
('imap', 'mail.python.org', '/mbox1', '', '')),
('mms://wms.sys.hinet.net/cts/Drama/09006251100.asf',
('mms', 'wms.sys.hinet.net', '/cts/Drama/09006251100.asf',
'', '', ''),
('mms', 'wms.sys.hinet.net', '/cts/Drama/09006251100.asf',
'', '')),
('svn+ssh://svn.zope.org/repos/main/ZConfig/trunk/',
('svn+ssh', 'svn.zope.org', '/repos/main/ZConfig/trunk/',
'', '', ''),
('svn+ssh', 'svn.zope.org', '/repos/main/ZConfig/trunk/',
'', ''))
]
for url, parsed, split in testcases:
self.checkRoundtrips(url, parsed, split)
def test_http_roundtrips(self):
# urlparse.urlsplit treats 'http:' as an optimized special case,
# so we test both 'http:' and 'https:' in all the following.
# Three cheers for white box knowledge!
testcases = [
('://www.python.org',
('www.python.org', '', '', '', ''),
('www.python.org', '', '', '')),
('://www.python.org#abc',
('www.python.org', '', '', '', 'abc'),
('www.python.org', '', '', 'abc')),
('://www.python.org?q=abc',
('www.python.org', '', '', 'q=abc', ''),
('www.python.org', '', 'q=abc', '')),
('://www.python.org/#abc',
('www.python.org', '/', '', '', 'abc'),
('www.python.org', '/', '', 'abc')),
('://a/b/c/d;p?q#f',
('a', '/b/c/d', 'p', 'q', 'f'),
('a', '/b/c/d;p', 'q', 'f')),
]
for scheme in ('http', 'https'):
for url, parsed, split in testcases:
url = scheme + url
parsed = (scheme,) + parsed
split = (scheme,) + split
self.checkRoundtrips(url, parsed, split)
def checkJoin(self, base, relurl, expected):
self.assertEqual(urlparse.urljoin(base, relurl), expected,
(base, relurl, expected))
def test_unparse_parse(self):
for u in ['Python', './Python']:
self.assertEqual(urlparse.urlunsplit(urlparse.urlsplit(u)), u)
self.assertEqual(urlparse.urlunparse(urlparse.urlparse(u)), u)
def test_RFC1808(self):
# "normal" cases from RFC 1808:
self.checkJoin(RFC1808_BASE, 'g:h', 'g:h')
self.checkJoin(RFC1808_BASE, 'g', 'http://a/b/c/g')
self.checkJoin(RFC1808_BASE, './g', 'http://a/b/c/g')
self.checkJoin(RFC1808_BASE, 'g/', 'http://a/b/c/g/')
self.checkJoin(RFC1808_BASE, '/g', 'http://a/g')
self.checkJoin(RFC1808_BASE, '//g', 'http://g')
self.checkJoin(RFC1808_BASE, 'g?y', 'http://a/b/c/g?y')
self.checkJoin(RFC1808_BASE, 'g?y/./x', 'http://a/b/c/g?y/./x')
self.checkJoin(RFC1808_BASE, '#s', 'http://a/b/c/d;p?q#s')
self.checkJoin(RFC1808_BASE, 'g#s', 'http://a/b/c/g#s')
self.checkJoin(RFC1808_BASE, 'g#s/./x', 'http://a/b/c/g#s/./x')
self.checkJoin(RFC1808_BASE, 'g?y#s', 'http://a/b/c/g?y#s')
self.checkJoin(RFC1808_BASE, 'g;x', 'http://a/b/c/g;x')
self.checkJoin(RFC1808_BASE, 'g;x?y#s', 'http://a/b/c/g;x?y#s')
self.checkJoin(RFC1808_BASE, '.', 'http://a/b/c/')
self.checkJoin(RFC1808_BASE, './', 'http://a/b/c/')
self.checkJoin(RFC1808_BASE, '..', 'http://a/b/')
self.checkJoin(RFC1808_BASE, '../', 'http://a/b/')
self.checkJoin(RFC1808_BASE, '../g', 'http://a/b/g')
self.checkJoin(RFC1808_BASE, '../..', 'http://a/')
self.checkJoin(RFC1808_BASE, '../../', 'http://a/')
self.checkJoin(RFC1808_BASE, '../../g', 'http://a/g')
# "abnormal" cases from RFC 1808:
self.checkJoin(RFC1808_BASE, '', 'http://a/b/c/d;p?q#f')
self.checkJoin(RFC1808_BASE, '../../../g', 'http://a/../g')
self.checkJoin(RFC1808_BASE, '../../../../g', 'http://a/../../g')
self.checkJoin(RFC1808_BASE, '/./g', 'http://a/./g')
self.checkJoin(RFC1808_BASE, '/../g', 'http://a/../g')
self.checkJoin(RFC1808_BASE, 'g.', 'http://a/b/c/g.')
self.checkJoin(RFC1808_BASE, '.g', 'http://a/b/c/.g')
self.checkJoin(RFC1808_BASE, 'g..', 'http://a/b/c/g..')
self.checkJoin(RFC1808_BASE, '..g', 'http://a/b/c/..g')
self.checkJoin(RFC1808_BASE, './../g', 'http://a/b/g')
self.checkJoin(RFC1808_BASE, './g/.', 'http://a/b/c/g/')
self.checkJoin(RFC1808_BASE, 'g/./h', 'http://a/b/c/g/h')
self.checkJoin(RFC1808_BASE, 'g/../h', 'http://a/b/c/h')
# RFC 1808 and RFC 1630 disagree on these (according to RFC 1808),
# so we'll not actually run these tests (which expect 1808 behavior).
#self.checkJoin(RFC1808_BASE, 'http:g', 'http:g')
#self.checkJoin(RFC1808_BASE, 'http:', 'http:')
def test_RFC2396(self):
# cases from RFC 2396
self.checkJoin(RFC2396_BASE, '?y', 'http://a/b/c/?y')
self.checkJoin(RFC2396_BASE, ';x', 'http://a/b/c/;x')
self.checkJoin(RFC2396_BASE, 'g:h', 'g:h')
self.checkJoin(RFC2396_BASE, 'g', 'http://a/b/c/g')
self.checkJoin(RFC2396_BASE, './g', 'http://a/b/c/g')
self.checkJoin(RFC2396_BASE, 'g/', 'http://a/b/c/g/')
self.checkJoin(RFC2396_BASE, '/g', 'http://a/g')
self.checkJoin(RFC2396_BASE, '//g', 'http://g')
self.checkJoin(RFC2396_BASE, 'g?y', 'http://a/b/c/g?y')
self.checkJoin(RFC2396_BASE, '#s', 'http://a/b/c/d;p?q#s')
self.checkJoin(RFC2396_BASE, 'g#s', 'http://a/b/c/g#s')
self.checkJoin(RFC2396_BASE, 'g?y#s', 'http://a/b/c/g?y#s')
self.checkJoin(RFC2396_BASE, 'g;x', 'http://a/b/c/g;x')
self.checkJoin(RFC2396_BASE, 'g;x?y#s', 'http://a/b/c/g;x?y#s')
self.checkJoin(RFC2396_BASE, '.', 'http://a/b/c/')
self.checkJoin(RFC2396_BASE, './', 'http://a/b/c/')
self.checkJoin(RFC2396_BASE, '..', 'http://a/b/')
self.checkJoin(RFC2396_BASE, '../', 'http://a/b/')
self.checkJoin(RFC2396_BASE, '../g', 'http://a/b/g')
self.checkJoin(RFC2396_BASE, '../..', 'http://a/')
self.checkJoin(RFC2396_BASE, '../../', 'http://a/')
self.checkJoin(RFC2396_BASE, '../../g', 'http://a/g')
self.checkJoin(RFC2396_BASE, '', RFC2396_BASE)
self.checkJoin(RFC2396_BASE, '../../../g', 'http://a/../g')
self.checkJoin(RFC2396_BASE, '../../../../g', 'http://a/../../g')
self.checkJoin(RFC2396_BASE, '/./g', 'http://a/./g')
self.checkJoin(RFC2396_BASE, '/../g', 'http://a/../g')
self.checkJoin(RFC2396_BASE, 'g.', 'http://a/b/c/g.')
self.checkJoin(RFC2396_BASE, '.g', 'http://a/b/c/.g')
self.checkJoin(RFC2396_BASE, 'g..', 'http://a/b/c/g..')
self.checkJoin(RFC2396_BASE, '..g', 'http://a/b/c/..g')
self.checkJoin(RFC2396_BASE, './../g', 'http://a/b/g')
self.checkJoin(RFC2396_BASE, './g/.', 'http://a/b/c/g/')
self.checkJoin(RFC2396_BASE, 'g/./h', 'http://a/b/c/g/h')
self.checkJoin(RFC2396_BASE, 'g/../h', 'http://a/b/c/h')
self.checkJoin(RFC2396_BASE, 'g;x=1/./y', 'http://a/b/c/g;x=1/y')
self.checkJoin(RFC2396_BASE, 'g;x=1/../y', 'http://a/b/c/y')
self.checkJoin(RFC2396_BASE, 'g?y/./x', 'http://a/b/c/g?y/./x')
self.checkJoin(RFC2396_BASE, 'g?y/../x', 'http://a/b/c/g?y/../x')
self.checkJoin(RFC2396_BASE, 'g#s/./x', 'http://a/b/c/g#s/./x')
self.checkJoin(RFC2396_BASE, 'g#s/../x', 'http://a/b/c/g#s/../x')
def test_urldefrag(self):
for url, defrag, frag in [
('http://python.org#frag', 'http://python.org', 'frag'),
('http://python.org', 'http://python.org', ''),
('http://python.org/#frag', 'http://python.org/', 'frag'),
('http://python.org/', 'http://python.org/', ''),
('http://python.org/?q#frag', 'http://python.org/?q', 'frag'),
('http://python.org/?q', 'http://python.org/?q', ''),
('http://python.org/p#frag', 'http://python.org/p', 'frag'),
('http://python.org/p?q', 'http://python.org/p?q', ''),
(RFC1808_BASE, 'http://a/b/c/d;p?q', 'f'),
(RFC2396_BASE, 'http://a/b/c/d;p?q', ''),
]:
self.assertEqual(urlparse.urldefrag(url), (defrag, frag))
def test_urlsplit_attributes(self):
url = "HTTP://WWW.PYTHON.ORG/doc/#frag"
p = urlparse.urlsplit(url)
self.assertEqual(p.scheme, "http")
self.assertEqual(p.netloc, "WWW.PYTHON.ORG")
self.assertEqual(p.path, "/doc/")
self.assertEqual(p.query, "")
self.assertEqual(p.fragment, "frag")
self.assertEqual(p.username, None)
self.assertEqual(p.password, None)
self.assertEqual(p.hostname, "www.python.org")
self.assertEqual(p.port, None)
# geturl() won't return exactly the original URL in this case
# since the scheme is always case-normalized
#self.assertEqual(p.geturl(), url)
url = "http://User:Pass@www.python.org:080/doc/?query=yes#frag"
p = urlparse.urlsplit(url)
self.assertEqual(p.scheme, "http")
self.assertEqual(p.netloc, "User:Pass@www.python.org:080")
self.assertEqual(p.path, "/doc/")
self.assertEqual(p.query, "query=yes")
self.assertEqual(p.fragment, "frag")
self.assertEqual(p.username, "User")
self.assertEqual(p.password, "Pass")
self.assertEqual(p.hostname, "www.python.org")
self.assertEqual(p.port, 80)
self.assertEqual(p.geturl(), url)
def test_attributes_bad_port(self):
"""Check handling of non-integer ports."""
p = urlparse.urlsplit("http://www.example.net:foo")
self.assertEqual(p.netloc, "www.example.net:foo")
self.assertRaises(ValueError, lambda: p.port)
p = urlparse.urlparse("http://www.example.net:foo")
self.assertEqual(p.netloc, "www.example.net:foo")
self.assertRaises(ValueError, lambda: p.port)
def test_attributes_without_netloc(self):
# This example is straight from RFC 3261. It looks like it
# should allow the username, hostname, and port to be filled
# in, but doesn't. Since it's a URI and doesn't use the
# scheme://netloc syntax, the netloc and related attributes
# should be left empty.
uri = "sip:alice@atlanta.com;maddr=239.255.255.1;ttl=15"
p = urlparse.urlsplit(uri)
self.assertEqual(p.netloc, "")
self.assertEqual(p.username, None)
self.assertEqual(p.password, None)
self.assertEqual(p.hostname, None)
self.assertEqual(p.port, None)
self.assertEqual(p.geturl(), uri)
p = urlparse.urlparse(uri)
self.assertEqual(p.netloc, "")
self.assertEqual(p.username, None)
self.assertEqual(p.password, None)
self.assertEqual(p.hostname, None)
self.assertEqual(p.port, None)
self.assertEqual(p.geturl(), uri)
def test_caching(self):
# Test case for bug #1313119
uri = "http://example.com/doc/"
unicode_uri = unicode(uri)
urlparse.urlparse(unicode_uri)
p = urlparse.urlparse(uri)
self.assertEqual(type(p.scheme), type(uri))
self.assertEqual(type(p.hostname), type(uri))
self.assertEqual(type(p.path), type(uri))
def test_noslash(self):
# Issue 1637: http://foo.com?query is legal
self.assertEqual(urlparse.urlparse("http://example.com?blahblah=/foo"),
('http', 'example.com', '', '', 'blahblah=/foo', ''))
def test_main():
test_support.run_unittest(UrlParseTestCase)
if __name__ == "__main__":
test_main()
| epl-1.0 |
python-rope/rope | rope/base/oi/memorydb.py | 14 | 3098 | from rope.base.oi import objectdb
class MemoryDB(objectdb.FileDict):
def __init__(self, project, persist=None):
self.project = project
self._persist = persist
self.files = self
self._load_files()
self.project.data_files.add_write_hook(self.write)
def _load_files(self):
self._files = {}
if self.persist:
result = self.project.data_files.read_data(
'objectdb', compress=self.compress, import_=True)
if result is not None:
self._files = result
def keys(self):
return self._files.keys()
def __iter__(self):
for f in self._files:
yield f
def __len__(self):
return len(self._files)
def __setitem__(self):
raise NotImplementedError()
def __contains__(self, key):
return key in self._files
def __getitem__(self, key):
return FileInfo(self._files[key])
def create(self, path):
self._files[path] = {}
def rename(self, file, newfile):
if file not in self._files:
return
self._files[newfile] = self._files[file]
del self[file]
def __delitem__(self, file):
del self._files[file]
def write(self):
if self.persist:
self.project.data_files.write_data('objectdb', self._files,
self.compress)
@property
def compress(self):
return self.project.prefs.get('compress_objectdb', False)
@property
def persist(self):
if self._persist is not None:
return self._persist
else:
return self.project.prefs.get('save_objectdb', False)
class FileInfo(objectdb.FileInfo):
def __init__(self, scopes):
self.scopes = scopes
def create_scope(self, key):
self.scopes[key] = ScopeInfo()
def keys(self):
return self.scopes.keys()
def __contains__(self, key):
return key in self.scopes
def __getitem__(self, key):
return self.scopes[key]
def __delitem__(self, key):
del self.scopes[key]
def __iter__(self):
for s in self.scopes:
yield s
def __len__(self):
return len(self.scopes)
def __setitem__(self):
raise NotImplementedError()
class ScopeInfo(objectdb.ScopeInfo):
def __init__(self):
self.call_info = {}
self.per_name = {}
def get_per_name(self, name):
return self.per_name.get(name, None)
def save_per_name(self, name, value):
self.per_name[name] = value
def get_returned(self, parameters):
return self.call_info.get(parameters, None)
def get_call_infos(self):
for args, returned in self.call_info.items():
yield objectdb.CallInfo(args, returned)
def add_call(self, parameters, returned):
self.call_info[parameters] = returned
def __getstate__(self):
return (self.call_info, self.per_name)
def __setstate__(self, data):
self.call_info, self.per_name = data
| lgpl-3.0 |
jundongl/PyFeaST | skfeature/function/sparse_learning_based/UDFS.py | 3 | 3369 | import numpy as np
import scipy
import math
from skfeature.utility.sparse_learning import generate_diagonal_matrix, calculate_l21_norm
from sklearn.metrics.pairwise import pairwise_distances
def udfs(X, **kwargs):
"""
This function implements l2,1-norm regularized discriminative feature
selection for unsupervised learning, i.e., min_W Tr(W^T M W) + gamma ||W||_{2,1}, s.t. W^T W = I
Input
-----
X: {numpy array}, shape (n_samples, n_features)
input data
kwargs: {dictionary}
gamma: {float}
parameter in the objective function of UDFS (default is 1)
n_clusters: {int}
Number of clusters
k: {int}
number of nearest neighbor
verbose: {boolean}
True if want to display the objective function value, false if not
Output
------
W: {numpy array}, shape(n_features, n_clusters)
feature weight matrix
Reference
Yang, Yi et al. "l2,1-Norm Regularized Discriminative Feature Selection for Unsupervised Learning." AAAI 2012.
"""
# default gamma is 0.1
if 'gamma' not in kwargs:
gamma = 0.1
else:
gamma = kwargs['gamma']
# default k is set to be 5
if 'k' not in kwargs:
k = 5
else:
k = kwargs['k']
if 'n_clusters' not in kwargs:
n_clusters = 5
else:
n_clusters = kwargs['n_clusters']
if 'verbose' not in kwargs:
verbose = False
else:
verbose = kwargs['verbose']
# construct M
n_sample, n_feature = X.shape
M = construct_M(X, k, gamma)
D = np.eye(n_feature)
max_iter = 1000
obj = np.zeros(max_iter)
for iter_step in range(max_iter):
# update W as the eigenvectors of P corresponding to the first n_clusters
# smallest eigenvalues
P = M + gamma*D
eigen_value, eigen_vector = scipy.linalg.eigh(a=P)
W = eigen_vector[:, 0:n_clusters]
# update D as D_ii = 1 / 2 / ||W(i,:)||
D = generate_diagonal_matrix(W)
obj[iter_step] = calculate_obj(X, W, M, gamma)
if verbose:
print('obj at iter {0}: {1}'.format(iter_step+1, obj[iter_step]))
if iter_step >= 1 and math.fabs(obj[iter_step] - obj[iter_step-1]) < 1e-3:
break
return W
def construct_M(X, k, gamma):
"""
This function constructs the M matrix described in the paper
"""
n_sample, n_feature = X.shape
Xt = X.T
D = pairwise_distances(X)
# sort the distance matrix D in ascending order
idx = np.argsort(D, axis=1)
# choose the k-nearest neighbors for each instance
idx_new = idx[:, 0:k+1]
H = np.eye(k+1) - 1/(k+1) * np.ones((k+1, k+1))
I = np.eye(k+1)
Mi = np.zeros((n_sample, n_sample))
for i in range(n_sample):
Xi = Xt[:, idx_new[i, :]]
Xi_tilde =np.dot(Xi, H)
Bi = np.linalg.inv(np.dot(Xi_tilde.T, Xi_tilde) + gamma*I)
Si = np.zeros((n_sample, k+1))
for q in range(k+1):
Si[idx_new[q], q] = 1
Mi = Mi + np.dot(np.dot(Si, np.dot(np.dot(H, Bi), H)), Si.T)
M = np.dot(np.dot(X.T, Mi), X)
return M
def calculate_obj(X, W, M, gamma):
"""
This function calculates the objective function of ls_l21 described in the paper
"""
return np.trace(np.dot(np.dot(W.T, M), W)) + gamma*calculate_l21_norm(W) | gpl-2.0 |
Pallokala/ansible-modules-core | cloud/amazon/ec2_group.py | 37 | 16735 | #!/usr/bin/python
# -*- coding: utf-8 -*-
DOCUMENTATION = '''
---
module: ec2_group
author: "Andrew de Quincey (@adq)"
version_added: "1.3"
short_description: maintain an ec2 VPC security group.
description:
- maintains ec2 security groups. This module has a dependency on python-boto >= 2.5
options:
name:
description:
- Name of the security group.
required: true
description:
description:
- Description of the security group.
required: true
vpc_id:
description:
- ID of the VPC to create the group in.
required: false
rules:
description:
- List of firewall inbound rules to enforce in this group (see example). If none are supplied, a default all-out rule is assumed. If an empty list is supplied, no inbound rules will be enabled.
required: false
rules_egress:
description:
- List of firewall outbound rules to enforce in this group (see example). If none are supplied, a default all-out rule is assumed. If an empty list is supplied, no outbound rules will be enabled.
required: false
version_added: "1.6"
region:
description:
- the EC2 region to use
required: false
default: null
aliases: []
state:
version_added: "1.4"
description:
- Create or delete a security group
required: false
default: 'present'
choices: [ "present", "absent" ]
aliases: []
purge_rules:
version_added: "1.8"
description:
- Purge existing rules on security group that are not found in rules
required: false
default: 'true'
aliases: []
purge_rules_egress:
version_added: "1.8"
description:
- Purge existing rules_egress on security group that are not found in rules_egress
required: false
default: 'true'
aliases: []
extends_documentation_fragment: aws
notes:
- If a rule declares a group_name and that group doesn't exist, it will be
automatically created. In that case, group_desc should be provided as well.
The module will refuse to create a depended-on group without a description.
'''
EXAMPLES = '''
- name: example ec2 group
ec2_group:
name: example
description: an example EC2 group
vpc_id: 12345
region: eu-west-1a
aws_secret_key: SECRET
aws_access_key: ACCESS
rules:
- proto: tcp
from_port: 80
to_port: 80
cidr_ip: 0.0.0.0/0
- proto: tcp
from_port: 22
to_port: 22
cidr_ip: 10.0.0.0/8
- proto: tcp
from_port: 443
to_port: 443
group_id: amazon-elb/sg-87654321/amazon-elb-sg
- proto: tcp
from_port: 3306
to_port: 3306
group_id: 123412341234/sg-87654321/exact-name-of-sg
- proto: udp
from_port: 10050
to_port: 10050
cidr_ip: 10.0.0.0/8
- proto: udp
from_port: 10051
to_port: 10051
group_id: sg-12345678
- proto: all
# the containing group name may be specified here
group_name: example
rules_egress:
- proto: tcp
from_port: 80
to_port: 80
cidr_ip: 0.0.0.0/0
group_name: example-other
# description to use if example-other needs to be created
group_desc: other example EC2 group
'''
try:
import boto.ec2
from boto.ec2.securitygroup import SecurityGroup
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def make_rule_key(prefix, rule, group_id, cidr_ip):
"""Creates a unique key for an individual group rule"""
if isinstance(rule, dict):
proto, from_port, to_port = [rule.get(x, None) for x in ('proto', 'from_port', 'to_port')]
#fix for 11177
if proto not in ['icmp', 'tcp', 'udp'] and from_port == -1 and to_port == -1:
from_port = 'none'
to_port = 'none'
else: # isinstance boto.ec2.securitygroup.IPPermissions
proto, from_port, to_port = [getattr(rule, x, None) for x in ('ip_protocol', 'from_port', 'to_port')]
key = "%s-%s-%s-%s-%s-%s" % (prefix, proto, from_port, to_port, group_id, cidr_ip)
return key.lower().replace('-none', '-None')
def addRulesToLookup(rules, prefix, dict):
for rule in rules:
for grant in rule.grants:
dict[make_rule_key(prefix, rule, grant.group_id, grant.cidr_ip)] = (rule, grant)
def validate_rule(module, rule):
VALID_PARAMS = ('cidr_ip',
'group_id', 'group_name', 'group_desc',
'proto', 'from_port', 'to_port')
for k in rule:
if k not in VALID_PARAMS:
module.fail_json(msg='Invalid rule parameter \'{}\''.format(k))
if 'group_id' in rule and 'cidr_ip' in rule:
module.fail_json(msg='Specify group_id OR cidr_ip, not both')
elif 'group_name' in rule and 'cidr_ip' in rule:
module.fail_json(msg='Specify group_name OR cidr_ip, not both')
elif 'group_id' in rule and 'group_name' in rule:
module.fail_json(msg='Specify group_id OR group_name, not both')
def get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id):
"""
Returns tuple of (group_id, ip) after validating rule params.
rule: Dict describing a rule.
name: Name of the security group being managed.
groups: Dict of all available security groups.
AWS accepts an ip range or a security group as target of a rule. This
function validate the rule specification and return either a non-None
group_id or a non-None ip range.
"""
FOREIGN_SECURITY_GROUP_REGEX = '^(\S+)/(sg-\S+)/(\S+)'
group_id = None
group_name = None
ip = None
target_group_created = False
if 'group_id' in rule and 'cidr_ip' in rule:
module.fail_json(msg="Specify group_id OR cidr_ip, not both")
elif 'group_name' in rule and 'cidr_ip' in rule:
module.fail_json(msg="Specify group_name OR cidr_ip, not both")
elif 'group_id' in rule and 'group_name' in rule:
module.fail_json(msg="Specify group_id OR group_name, not both")
elif 'group_id' in rule and re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']):
# this is a foreign Security Group. Since you can't fetch it you must create an instance of it
owner_id, group_id, group_name = re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']).groups()
group_instance = SecurityGroup(owner_id=owner_id, name=group_name, id=group_id)
groups[group_id] = group_instance
groups[group_name] = group_instance
elif 'group_id' in rule:
group_id = rule['group_id']
elif 'group_name' in rule:
group_name = rule['group_name']
if group_name == name:
group_id = group.id
groups[group_id] = group
groups[group_name] = group
elif group_name in groups:
group_id = groups[group_name].id
else:
if not rule.get('group_desc', '').strip():
module.fail_json(msg="group %s will be automatically created by rule %s and no description was provided" % (group_name, rule))
if not module.check_mode:
auto_group = ec2.create_security_group(group_name, rule['group_desc'], vpc_id=vpc_id)
group_id = auto_group.id
groups[group_id] = auto_group
groups[group_name] = auto_group
target_group_created = True
elif 'cidr_ip' in rule:
ip = rule['cidr_ip']
return group_id, ip, target_group_created
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
description=dict(required=True),
vpc_id=dict(),
rules=dict(),
rules_egress=dict(),
state = dict(default='present', choices=['present', 'absent']),
purge_rules=dict(default=True, required=False, type='bool'),
purge_rules_egress=dict(default=True, required=False, type='bool'),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
name = module.params['name']
description = module.params['description']
vpc_id = module.params['vpc_id']
rules = module.params['rules']
rules_egress = module.params['rules_egress']
state = module.params.get('state')
purge_rules = module.params['purge_rules']
purge_rules_egress = module.params['purge_rules_egress']
changed = False
ec2 = ec2_connect(module)
# find the group if present
group = None
groups = {}
for curGroup in ec2.get_all_security_groups():
groups[curGroup.id] = curGroup
if curGroup.name in groups:
# Prioritise groups from the current VPC
if vpc_id is None or curGroup.vpc_id == vpc_id:
groups[curGroup.name] = curGroup
else:
groups[curGroup.name] = curGroup
if curGroup.name == name and (vpc_id is None or curGroup.vpc_id == vpc_id):
group = curGroup
# Ensure requested group is absent
if state == 'absent':
if group:
'''found a match, delete it'''
try:
group.delete()
except Exception, e:
module.fail_json(msg="Unable to delete security group '%s' - %s" % (group, e))
else:
group = None
changed = True
else:
'''no match found, no changes required'''
# Ensure requested group is present
elif state == 'present':
if group:
'''existing group found'''
# check the group parameters are correct
group_in_use = False
rs = ec2.get_all_instances()
for r in rs:
for i in r.instances:
group_in_use |= reduce(lambda x, y: x | (y.name == 'public-ssh'), i.groups, False)
if group.description != description:
if group_in_use:
module.fail_json(msg="Group description does not match, but it is in use so cannot be changed.")
# if the group doesn't exist, create it now
else:
'''no match found, create it'''
if not module.check_mode:
group = ec2.create_security_group(name, description, vpc_id=vpc_id)
# When a group is created, an egress_rule ALLOW ALL
# to 0.0.0.0/0 is added automatically but it's not
# reflected in the object returned by the AWS API
# call. We re-read the group for getting an updated object
# amazon sometimes takes a couple seconds to update the security group so wait till it exists
while len(ec2.get_all_security_groups(filters={ 'group_id': group.id, })) == 0:
time.sleep(0.1)
group = ec2.get_all_security_groups(group_ids=(group.id,))[0]
changed = True
else:
module.fail_json(msg="Unsupported state requested: %s" % state)
# create a lookup for all existing rules on the group
if group:
# Manage ingress rules
groupRules = {}
addRulesToLookup(group.rules, 'in', groupRules)
# Now, go through all provided rules and ensure they are there.
if rules is not None:
for rule in rules:
validate_rule(module, rule)
group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id)
if target_group_created:
changed = True
if rule['proto'] in ('all', '-1', -1):
rule['proto'] = -1
rule['from_port'] = None
rule['to_port'] = None
# If rule already exists, don't later delete it
ruleId = make_rule_key('in', rule, group_id, ip)
if ruleId in groupRules:
del groupRules[ruleId]
# Otherwise, add new rule
else:
grantGroup = None
if group_id:
grantGroup = groups[group_id]
if not module.check_mode:
group.authorize(rule['proto'], rule['from_port'], rule['to_port'], ip, grantGroup)
changed = True
# Finally, remove anything left in the groupRules -- these will be defunct rules
if purge_rules:
for (rule, grant) in groupRules.itervalues() :
grantGroup = None
if grant.group_id:
if grant.owner_id != group.owner_id:
# this is a foreign Security Group. Since you can't fetch it you must create an instance of it
group_instance = SecurityGroup(owner_id=grant.owner_id, name=grant.name, id=grant.group_id)
groups[grant.group_id] = group_instance
groups[grant.name] = group_instance
grantGroup = groups[grant.group_id]
if not module.check_mode:
group.revoke(rule.ip_protocol, rule.from_port, rule.to_port, grant.cidr_ip, grantGroup)
changed = True
# Manage egress rules
groupRules = {}
addRulesToLookup(group.rules_egress, 'out', groupRules)
# Now, go through all provided rules and ensure they are there.
if rules_egress is not None:
for rule in rules_egress:
validate_rule(module, rule)
group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id)
if target_group_created:
changed = True
if rule['proto'] in ('all', '-1', -1):
rule['proto'] = -1
rule['from_port'] = None
rule['to_port'] = None
# If rule already exists, don't later delete it
ruleId = make_rule_key('out', rule, group_id, ip)
if ruleId in groupRules:
del groupRules[ruleId]
# Otherwise, add new rule
else:
grantGroup = None
if group_id:
grantGroup = groups[group_id].id
if not module.check_mode:
ec2.authorize_security_group_egress(
group_id=group.id,
ip_protocol=rule['proto'],
from_port=rule['from_port'],
to_port=rule['to_port'],
src_group_id=grantGroup,
cidr_ip=ip)
changed = True
elif vpc_id and not module.check_mode:
# when using a vpc, but no egress rules are specified,
# we add in a default allow all out rule, which was the
# default behavior before egress rules were added
default_egress_rule = 'out--1-None-None-None-0.0.0.0/0'
if default_egress_rule not in groupRules:
ec2.authorize_security_group_egress(
group_id=group.id,
ip_protocol=-1,
from_port=None,
to_port=None,
src_group_id=None,
cidr_ip='0.0.0.0/0'
)
changed = True
else:
# make sure the default egress rule is not removed
del groupRules[default_egress_rule]
# Finally, remove anything left in the groupRules -- these will be defunct rules
if purge_rules_egress:
for (rule, grant) in groupRules.itervalues():
grantGroup = None
if grant.group_id:
grantGroup = groups[grant.group_id].id
if not module.check_mode:
ec2.revoke_security_group_egress(
group_id=group.id,
ip_protocol=rule.ip_protocol,
from_port=rule.from_port,
to_port=rule.to_port,
src_group_id=grantGroup,
cidr_ip=grant.cidr_ip)
changed = True
if group:
module.exit_json(changed=changed, group_id=group.id)
else:
module.exit_json(changed=changed, group_id=None)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
HenryHu/pybbs | bbsauth.py | 1 | 1665 | """bbsauth -- verifies session token
<http://www.ietf.org/rfc/rfc4616.txt>
Copyright (c) 2009, Coptix, Inc. All rights reserved.
See the LICENSE file for license terms and warranty disclaimer.
"""
from __future__ import absolute_import
from sasl import mechanism as mech, auth
__all__ = ('BBSAuth')
class BBSAuth(mech.Mechanism):
"""The bbsauth mechanism simply submits the optional authorization
id, the authentication id, and token separated by null
bytes."""
NULL = u'\x00'
def __init__(self, auth):
self.auth = auth
def verify(self, *args):
return self.auth.verify_token(*args)
state = mech.AuthState
## Server
def challenge(self):
return self.state(self.verify_challenge, None, '')
def verify_challenge(self, entity, response):
try:
token = response.decode('utf-8')
except ValueError as exc:
return self.state(False, entity, None)
try:
result = self.verify(token)
if result:
entity = entity or self.auth.username()
return self.state(result, entity, None)
except auth.PasswordError as exc:
return self.state(False, entity, None)
## Client
def respond(self, data):
assert data == ''
auth = self.auth
zid = auth.authorization_id()
cid = auth.username()
response = self.NULL.join((
u'' if (not zid or zid == cid) else zid,
(cid or u''),
(auth.token() or u'')
)).encode('utf-8')
self.authorized = zid or cid
return self.state(None, zid or cid, response)
| bsd-2-clause |
arbrandes/edx-platform | openedx/core/djangoapps/user_authn/migrations/0001_data__add_login_service.py | 4 | 1740 | from django.conf import settings
from django.db import migrations
def add_login_service(apps, schema_editor):
"""
Adds a user and DOT application for the login service.
"""
login_service_name = 'Login Service for JWT Cookies'
login_service_client_id = settings.JWT_AUTH['JWT_LOGIN_CLIENT_ID']
login_service_username = settings.JWT_AUTH['JWT_LOGIN_SERVICE_USERNAME']
login_service_email = login_service_username + '@fake.email'
Application = apps.get_model(settings.OAUTH2_PROVIDER_APPLICATION_MODEL)
if Application.objects.filter(client_id=login_service_client_id).exists():
return
# Get the User model using the AUTH_USER_MODEL settings since that is
# what the Application model expects at this time in the migration phase.
User = apps.get_model(settings.AUTH_USER_MODEL)
login_service_user, created = User.objects.get_or_create(
username=login_service_username,
email=login_service_email,
)
if created:
# Make sure the login service user's password is unusable.
# The set_unusable_password method is available on the other User model.
from django.contrib.auth.models import User
user = User.objects.get(username=login_service_username)
user.set_unusable_password()
user.save()
login_service_app = Application.objects.create(
name=login_service_name,
client_id=login_service_client_id,
user=login_service_user,
client_type='public',
authorization_grant_type='password',
redirect_uris='',
)
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_login_service, reverse_code=migrations.RunPython.noop),
]
| agpl-3.0 |
be-cloud-be/horizon-addons | server-tools/database_cleanup/models/purge_tables.py | 4 | 4291 | # -*- coding: utf-8 -*-
# © 2014-2016 Therp BV <http://therp.nl>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, fields, models, _
from openerp.exceptions import UserError
from ..identifier_adapter import IdentifierAdapter
class CleanupPurgeLineTable(models.TransientModel):
_inherit = 'cleanup.purge.line'
_name = 'cleanup.purge.line.table'
wizard_id = fields.Many2one(
'cleanup.purge.wizard.table', 'Purge Wizard', readonly=True)
@api.multi
def purge(self):
"""
Unlink tables upon manual confirmation.
"""
tables = self.mapped('name')
for line in self:
if line.purged:
continue
# Retrieve constraints on the tables to be dropped
# This query is referenced in numerous places
# on the Internet but credits probably go to Tom Lane
# in this post http://www.postgresql.org/\
# message-id/22895.1226088573@sss.pgh.pa.us
# Only using the constraint name and the source table,
# but I'm leaving the rest in for easier debugging
self.env.cr.execute(
"""
SELECT conname, confrelid::regclass, af.attname AS fcol,
conrelid::regclass, a.attname AS col
FROM pg_attribute af, pg_attribute a,
(SELECT conname, conrelid, confrelid,conkey[i] AS conkey,
confkey[i] AS confkey
FROM (select conname, conrelid, confrelid, conkey,
confkey, generate_series(1,array_upper(conkey,1)) AS i
FROM pg_constraint WHERE contype = 'f') ss) ss2
WHERE af.attnum = confkey AND af.attrelid = confrelid AND
a.attnum = conkey AND a.attrelid = conrelid
AND confrelid::regclass = '%s'::regclass;
""", (IdentifierAdapter(line.name, quote=False),))
for constraint in self.env.cr.fetchall():
if constraint[3] in tables:
self.logger.info(
'Dropping constraint %s on table %s (to be dropped)',
constraint[0], constraint[3])
self.env.cr.execute(
"ALTER TABLE %s DROP CONSTRAINT %s",
(
IdentifierAdapter(constraint[3]),
IdentifierAdapter(constraint[0])
))
self.logger.info(
'Dropping table %s', line.name)
self.env.cr.execute(
"DROP TABLE %s", (IdentifierAdapter(line.name),))
line.write({'purged': True})
return True
class CleanupPurgeWizardTable(models.TransientModel):
_inherit = 'cleanup.purge.wizard'
_name = 'cleanup.purge.wizard.table'
_description = 'Purge tables'
@api.model
def find(self):
"""
Search for tables that cannot be instantiated.
Ignore views for now.
"""
# Start out with known tables with no model
known_tables = ['wkf_witm_trans']
for model in self.env['ir.model'].search([]):
if model.model not in self.env:
continue
model_pool = self.env[model.model]
known_tables.append(model_pool._table)
known_tables += [
column._sql_names(model_pool)[0]
for column in model_pool._columns.values()
if (column._type == 'many2many' and
hasattr(column, '_rel')) # unstored function fields of
# type m2m don't have _rel
]
self.env.cr.execute(
"""
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
AND table_name NOT IN %s""", (tuple(known_tables),))
res = [(0, 0, {'name': row[0]}) for row in self.env.cr.fetchall()]
if not res:
raise UserError(_('No orphaned tables found'))
return res
purge_line_ids = fields.One2many(
'cleanup.purge.line.table', 'wizard_id', 'Tables to purge')
| agpl-3.0 |
QuLogic/meson | tools/boost_names.py | 1 | 9365 | #!/usr/bin/env python3
# Copyright 2017 Niklas Claesson
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is two implementations for how to get module names from the boost
sources. One relies on json metadata files in the sources, the other relies on
the folder names.
Run the tool in the boost directory and append the stdout to the misc.py:
boost/$ path/to/meson/tools/boost_names.py >> path/to/meson/dependencies/misc.py
"""
import sys
import json
import re
import textwrap
import functools
import typing as T
from pathlib import Path
lib_dir = Path('libs')
jamroot = Path('Jamroot')
not_modules = ['config', 'disjoint_sets', 'headers']
export_modules = False
@functools.total_ordering
class BoostLibrary():
def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
self.name = name
self.shared = sorted(set(shared))
self.static = sorted(set(static))
self.single = sorted(set(single))
self.multi = sorted(set(multi))
def __lt__(self, other: object) -> bool:
if isinstance(other, BoostLibrary):
return self.name < other.name
return NotImplemented
def __eq__(self, other: object) -> bool:
if isinstance(other, BoostLibrary):
return self.name == other.name
elif isinstance(other, str):
return self.name == other
return NotImplemented
def __hash__(self) -> int:
return hash(self.name)
@functools.total_ordering
class BoostModule():
def __init__(self, name: str, key: str, desc: str, libs: T.List[BoostLibrary]):
self.name = name
self.key = key
self.desc = desc
self.libs = libs
def __lt__(self, other: object) -> bool:
if isinstance(other, BoostModule):
return self.key < other.key
return NotImplemented
def get_boost_version() -> T.Optional[str]:
raw = jamroot.read_text()
m = re.search(r'BOOST_VERSION\s*:\s*([0-9\.]+)\s*;', raw)
if m:
return m.group(1)
return None
def get_libraries(jamfile: Path) -> T.List[BoostLibrary]:
# Extract libraries from the boost Jamfiles. This includes:
# - library name
# - compiler flags
libs: T.List[BoostLibrary] = []
raw = jamfile.read_text()
raw = re.sub(r'#.*\n', '\n', raw) # Remove comments
raw = re.sub(r'\s+', ' ', raw) # Force single space
raw = re.sub(r'}', ';', raw) # Cheat code blocks by converting } to ;
cmds = raw.split(';') # Commands always terminate with a ; (I hope)
cmds = [x.strip() for x in cmds] # Some cleanup
project_usage_requirements: T.List[str] = []
# "Parse" the relevant sections
for i in cmds:
parts = i.split(' ')
parts = [x for x in parts if x not in ['']]
if not parts:
continue
# Parse project
if parts[0] in ['project']:
attributes: T.Dict[str, T.List[str]] = {}
curr: T.Optional[str] = None
for j in parts:
if j == ':':
curr = None
elif curr is None:
curr = j
else:
if curr not in attributes:
attributes[curr] = []
attributes[curr] += [j]
if 'usage-requirements' in attributes:
project_usage_requirements = attributes['usage-requirements']
# Parse libraries
elif parts[0] in ['lib', 'boost-lib']:
assert len(parts) >= 2
# Get and check the library name
lname = parts[1]
if parts[0] == 'boost-lib':
lname = f'boost_{lname}'
if not lname.startswith('boost_'):
continue
# Count `:` to only select the 'usage-requirements'
# See https://boostorg.github.io/build/manual/master/index.html#bbv2.main-target-rule-syntax
colon_counter = 0
usage_requirements: T.List[str] = []
for j in parts:
if j == ':':
colon_counter += 1
elif colon_counter >= 4:
usage_requirements += [j]
# Get shared / static defines
shared: T.List[str] = []
static: T.List[str] = []
single: T.List[str] = []
multi: T.List[str] = []
for j in usage_requirements + project_usage_requirements:
m1 = re.match(r'<link>shared:<define>(.*)', j)
m2 = re.match(r'<link>static:<define>(.*)', j)
m3 = re.match(r'<threading>single:<define>(.*)', j)
m4 = re.match(r'<threading>multi:<define>(.*)', j)
if m1:
shared += [f'-D{m1.group(1)}']
if m2:
static += [f'-D{m2.group(1)}']
if m3:
single +=[f'-D{m3.group(1)}']
if m4:
multi += [f'-D{m4.group(1)}']
libs += [BoostLibrary(lname, shared, static, single, multi)]
return libs
def process_lib_dir(ldir: Path) -> T.List[BoostModule]:
meta_file = ldir / 'meta' / 'libraries.json'
bjam_file = ldir / 'build' / 'Jamfile.v2'
if not meta_file.exists():
print(f'WARNING: Meta file {meta_file} does not exist')
return []
# Extract libs
libs: T.List[BoostLibrary] = []
if bjam_file.exists():
libs = get_libraries(bjam_file)
# Extract metadata
data = json.loads(meta_file.read_text())
if not isinstance(data, list):
data = [data]
modules: T.List[BoostModule] = []
for i in data:
modules += [BoostModule(i['name'], i['key'], i['description'], libs)]
return modules
def get_modules() -> T.List[BoostModule]:
modules: T.List[BoostModule] = []
for i in lib_dir.iterdir():
if not i.is_dir() or i.name in not_modules:
continue
# numeric has sub libs
subdirs = i / 'sublibs'
metadir = i / 'meta'
if subdirs.exists() and not metadir.exists():
for j in i.iterdir():
if not j.is_dir():
continue
modules += process_lib_dir(j)
else:
modules += process_lib_dir(i)
return modules
def main() -> int:
if not lib_dir.is_dir() or not jamroot.exists():
print("ERROR: script must be run in boost source directory")
return 1
vers = get_boost_version()
modules = get_modules()
modules = sorted(modules)
libraries = [x for y in modules for x in y.libs]
libraries = sorted(set(libraries))
print(textwrap.dedent(f'''\
#### ---- BEGIN GENERATED ---- ####
# #
# Generated with tools/boost_names.py:
# - boost version: {vers}
# - modules found: {len(modules)}
# - libraries found: {len(libraries)}
#
class BoostLibrary():
def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
self.name = name
self.shared = shared
self.static = static
self.single = single
self.multi = multi
class BoostModule():
def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
self.name = name
self.key = key
self.desc = desc
self.libs = libs
# dict of all know libraries with additional compile options
boost_libraries = {{\
'''))
for i in libraries:
print(textwrap.indent(textwrap.dedent(f"""\
'{i.name}': BoostLibrary(
name='{i.name}',
shared={i.shared},
static={i.static},
single={i.single},
multi={i.multi},
),\
"""), ' '))
if export_modules:
print(textwrap.dedent(f'''\
}}
# dict of all modules with metadata
boost_modules = {{\
'''))
for mod in modules:
desc_excaped = re.sub(r"'", "\\'", mod.desc)
print(textwrap.indent(textwrap.dedent(f"""\
'{mod.key}': BoostModule(
name='{mod.name}',
key='{mod.key}',
desc='{desc_excaped}',
libs={[x.name for x in mod.libs]},
),\
"""), ' '))
print(textwrap.dedent(f'''\
}}
# #
#### ---- END GENERATED ---- ####\
'''))
return 0
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
freevo/kaa-metadata | src/image/core.py | 1 | 4330 | # -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------------
# core.py - basic image class
# -----------------------------------------------------------------------------
# $Id$
#
# -----------------------------------------------------------------------------
# kaa-Metadata - Media Metadata for Python
# Copyright (C) 2003-2006 Thomas Schueppel, Dirk Meyer
#
# First Edition: Thomas Schueppel <stain@acm.org>
# Maintainer: Dirk Meyer <https://github.com/Dischi>
#
# Please see the file AUTHORS for a complete list of authors.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# -----------------------------------------------------------------------------
# python imports
import os
import sys
import logging
import xml.sax
# kaa.metadata imports
from ..core import ParseError, Media, MEDIA_IMAGE
# get logging object
log = logging.getLogger('metadata')
# attributes for image files
ATTRIBUTES = ['description', 'people', 'location', 'event', 'width', 'height',
'thumbnail','software','hardware', 'dpi', 'city', 'rotation', 'author' ]
class BinsParser(xml.sax.ContentHandler):
def __init__(self, filename):
xml.sax.ContentHandler.__init__(self)
self.mode = 0
self.var = None
self.dict = {}
parser = xml.sax.make_parser()
parser.setContentHandler(self)
try:
parser.parse(filename)
except ParseError:
pass
except Exception as e:
log.exception('bins parser')
def items(self):
return list(self.dict.items())
def startElement(self, name, attr):
if self.mode == 0:
if name not in ('album', 'image'):
raise ParseError
self.mode = 1
if self.mode == 2 and name == 'field':
self.var = attr['name']
self.chars = ''
if self.mode == 1 and name == 'description':
self.mode = 2
def endElement(self, name):
if self.mode == 2 and name == 'description':
self.mode = 1
if self.var:
value = self.chars.strip()
if value:
self.dict[self.var] = value
self.var = None
def characters(self, c):
if self.var:
self.chars += c
class Image(Media):
"""
Digital Images, Photos, Pictures.
"""
_keys = Media._keys + ATTRIBUTES
media = MEDIA_IMAGE
def _finalize(self):
"""
Add additional information and correct data.
FIXME: parse_external_files here is very wrong
"""
if self.url and self.url.startswith('file://'):
self.parse_external_files(self.url[7:])
Media._finalize(self)
def parse_external_files(self, filename):
"""
Parse external files like bins and .comments.
"""
# Parse bins xml files
binsxml = filename + '.xml'
if os.path.isfile(binsxml):
bins = BinsParser(binsxml)
for key, value in list(bins.items()):
self._set(key, value)
# FIXME: this doesn't work anymore
comment_file = os.path.join(os.path.dirname(filename), '.comments',
os.path.basename(filename) + '.xml')
if not os.path.isfile(comment_file) or 1:
return
# FIXME: replace kaa.xml stuff with sax or minidom
doc = xml.Document(comment_file, 'Comment')
for child in doc.children:
if child.name == 'Place':
self.location = child.content
if child.name == 'Note':
self.description = child.content
| gpl-2.0 |
ubgarbage/gae-blog | blog/tests.py | 1 | 3078 | from django.test.client import Client
from django.test import TestCase
from models import Post, Subscriber
from django.contrib.auth.models import User
from django.contrib.comments.models import Comment
from test_utils import AuthViews
class PostTest(TestCase):
def setUp(self):
self.user = User.objects.create_user( username="test", email="test@test.com", password="test" )
self.user.save()
def test_unicode_returns_post_title(self):
post = Post.objects.create( title='test_title', content='test_content', author=self.user )
self.assertEquals( 'test_title', str(post) )
def test_preview_is_generated(self):
post = Post( title='test preview', content='word1 word2', author=self.user )
post.save()
self.assertEquals( post.preview, 'word1 word2' )
class BlogViews(AuthViews):
def test_ok_for_auth_users(self):
self.get_response_check_ok()
class PostsViewTest(BlogViews, TestCase):
def setUp(self):
self.set_url('/blog/posts/')
def test_user_see_posts_list(self):
post1 = Post( title='test post', content='test post content', author=self.user )
post1.save()
post2 = Post( title='test post 2', content='test post content 2', author=self.user )
post2.save()
response = self.get_response()
self.assertIn( post1, response.context['posts'] )
self.assertIn( post2, response.context['posts'] )
class PostViewTest(BlogViews, TestCase):
def setUp(self):
self.client = Client( HTTP_HOST='test_host' )
self.set_url('/blog/posts/post/')
self.post = Post( title='test post', content='test post content', author=self.user )
self.post.save()
self.set_url_params( str(self.post.id) + '/' )
def test_returns_post(self):
response = self.get_response()
self.assertEquals( self.post, response.context['post'] )
def test_returns_404_on_notfound(self):
self.set_url_params( '2222/' )
response = self.get_response()
self.assertEquals( 404, response.status_code )
class SubscribeViewTest(BlogViews, TestCase):
def setUp(self):
self.set_url('/blog/subscribe/')
def test_subscribe_add_user(self):
response = self.get_response_check_ok()
#execute without exception
Subscriber.objects.get( user=self.user )
def test_subscribe_doesnt_add_if_exists(self):
Subscriber.objects.create( user=self.user )
response = self.get_response_check_ok()
self.assertEquals( 1, len(Subscriber.objects.all()) )
class UnsubscribeViewTest(BlogViews, TestCase):
def setUp(self):
self.set_url('/blog/unsubscribe/')
def test_unsubscribe_removes_user_from_subscribers(self):
Subscriber.objects.create( user=self.user )
response = self.get_response_check_ok()
self.assertEquals( 0, len(Subscriber.objects.all()) )
def test_unsubscribe_returns_ok_if_not_subscribed(self):
response = self.get_response_check_ok()
| bsd-3-clause |
kvar/ansible | lib/ansible/modules/notification/pushover.py | 37 | 3699 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2012, Jim Richardson <weaselkeeper@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pushover
version_added: "2.0"
short_description: Send notifications via U(https://pushover.net)
description:
- Send notifications via pushover, to subscriber list of devices, and email
addresses. Requires pushover app on devices.
notes:
- You will require a pushover.net account to use this module. But no account
is required to receive messages.
options:
msg:
description:
- What message you wish to send.
required: true
app_token:
description:
- Pushover issued token identifying your pushover app.
required: true
user_key:
description:
- Pushover issued authentication key for your user.
required: true
title:
description:
- Message title.
required: false
version_added: "2.8"
pri:
description:
- Message priority (see U(https://pushover.net) for details).
required: false
author: "Jim Richardson (@weaselkeeper)"
'''
EXAMPLES = '''
- pushover:
msg: '{{ inventory_hostname }} is acting strange ...'
app_token: wxfdksl
user_key: baa5fe97f2c5ab3ca8f0bb59
delegate_to: localhost
- pushover:
title: 'Alert!'
msg: '{{ inventory_hostname }} has exploded in flames, It is now time to panic'
pri: 1
app_token: wxfdksl
user_key: baa5fe97f2c5ab3ca8f0bb59
delegate_to: localhost
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.urls import fetch_url
class Pushover(object):
''' Instantiates a pushover object, use it to send notifications '''
base_uri = 'https://api.pushover.net'
def __init__(self, module, user, token):
self.module = module
self.user = user
self.token = token
def run(self, priority, msg, title):
''' Do, whatever it is, we do. '''
url = '%s/1/messages.json' % (self.base_uri)
# parse config
options = dict(user=self.user,
token=self.token,
priority=priority,
message=msg)
if title is not None:
options = dict(options,
title=title)
data = urlencode(options)
headers = {"Content-type": "application/x-www-form-urlencoded"}
r, info = fetch_url(self.module, url, method='POST', data=data, headers=headers)
if info['status'] != 200:
raise Exception(info)
return r.read()
def main():
module = AnsibleModule(
argument_spec=dict(
title=dict(type='str'),
msg=dict(required=True),
app_token=dict(required=True, no_log=True),
user_key=dict(required=True, no_log=True),
pri=dict(required=False, default='0', choices=['-2', '-1', '0', '1', '2']),
),
)
msg_object = Pushover(module, module.params['user_key'], module.params['app_token'])
try:
response = msg_object.run(module.params['pri'], module.params['msg'], module.params['title'])
except Exception:
module.fail_json(msg='Unable to send msg via pushover')
module.exit_json(msg='message sent successfully: %s' % response, changed=False)
if __name__ == '__main__':
main()
| gpl-3.0 |
thaim/ansible | lib/ansible/module_utils/network/iosxr/facts/l2_interfaces/l2_interfaces.py | 21 | 4504 | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The iosxr l2_interfaces fact class
It is in this file the configuration is collected from the device
for a given resource, parsed, and the facts tree is populated
based on the configuration.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from copy import deepcopy
import re
from ansible.module_utils.network.common import utils
from ansible.module_utils.network.iosxr.utils.utils import get_interface_type
from ansible.module_utils.network.iosxr.argspec.l2_interfaces.l2_interfaces import L2_InterfacesArgs
class L2_InterfacesFacts(object):
""" The iosxr l2_interfaces fact class
"""
def __init__(self, module, subspec='config', options='options'):
self._module = module
self.argument_spec = L2_InterfacesArgs.argument_spec
spec = deepcopy(self.argument_spec)
if subspec:
if options:
facts_argument_spec = spec[subspec][options]
else:
facts_argument_spec = spec[subspec]
else:
facts_argument_spec = spec
self.generated_spec = utils.generate_dict(facts_argument_spec)
def populate_facts(self, connection, ansible_facts, data=None):
""" Populate the facts for l2_interfaces
:param module: the module instance
:param connection: the device connection
:param data: previously collected conf
:rtype: dictionary
:returns: facts
"""
objs = []
if not data:
data = connection.get('show running-config interface')
# operate on a collection of resource x
config = data.split('interface ')
for conf in config:
if conf:
obj = self.render_config(self.generated_spec, conf)
if obj:
objs.append(obj)
facts = {}
if objs:
facts['l2_interfaces'] = []
params = utils.validate_config(self.argument_spec, {'config': objs})
for cfg in params['config']:
facts['l2_interfaces'].append(utils.remove_empties(cfg))
ansible_facts['ansible_network_resources'].update(facts)
return ansible_facts
def render_config(self, spec, conf):
"""
Render config as dictionary structure and delete keys from spec for null values
:param spec: The facts tree, generated from the argspec
:param conf: The configuration
:rtype: dictionary
:returns: The generated config
"""
config = deepcopy(spec)
match = re.search(r'^(\S+)', conf)
intf = match.group(1)
if match.group(1).lower() == "preconfigure":
match = re.search(r'^(\S+) (.*)', conf)
if match:
intf = match.group(2)
if get_interface_type(intf) == 'unknown':
return {}
if intf.lower().startswith('gi'):
config['name'] = intf
# populate the facts from the configuration
native_vlan = re.search(r"dot1q native vlan (\d+)", conf)
if native_vlan:
config["native_vlan"] = int(native_vlan.group(1))
dot1q = utils.parse_conf_arg(conf, 'encapsulation dot1q')
config['q_vlan'] = []
if dot1q:
config['q_vlan'].append(int(dot1q.split(' ')[0]))
if len(dot1q.split(' ')) > 1:
config['q_vlan'].append(int(dot1q.split(' ')[2]))
if utils.parse_conf_cmd_arg(conf, 'l2transport', True):
config['l2transport'] = True
if utils.parse_conf_arg(conf, 'propagate'):
config['propagate'] = True
config['l2protocol'] = []
cdp = utils.parse_conf_arg(conf, 'l2protocol cdp')
pvst = utils.parse_conf_arg(conf, 'l2protocol pvst')
stp = utils.parse_conf_arg(conf, 'l2protocol stp')
vtp = utils.parse_conf_arg(conf, 'l2protocol vtp')
if cdp:
config['l2protocol'].append({'cdp': cdp})
if pvst:
config['l2protocol'].append({'pvst': pvst})
if stp:
config['l2protocol'].append({'stp': stp})
if vtp:
config['l2protocol'].append({'vtp': vtp})
return utils.remove_empties(config)
| mit |
shaz13/oppia | core/storage/email/gae_models.py | 2 | 14295 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for the content of sent emails."""
import datetime
from core.platform import models
import feconf
import utils
from google.appengine.ext import ndb
(base_models,) = models.Registry.import_models([models.NAMES.base_model])
class SentEmailModel(base_models.BaseModel):
"""Records the content and metadata of an email sent from Oppia.
This model is read-only; entries cannot be modified once created. The
id/key of instances of this class has the form
[INTENT].[random hash].
"""
# TODO(sll): Implement functionality to get all emails sent to a particular
# user with a given intent within a given time period.
# The user ID of the email recipient.
recipient_id = ndb.StringProperty(required=True, indexed=True)
# The email address of the recipient.
recipient_email = ndb.StringProperty(required=True)
# The user ID of the email sender. For site-generated emails this is equal
# to feconf.SYSTEM_COMMITTER_ID.
sender_id = ndb.StringProperty(required=True)
# The email address used to send the notification.
sender_email = ndb.StringProperty(required=True)
# The intent of the email.
intent = ndb.StringProperty(required=True, indexed=True, choices=[
feconf.EMAIL_INTENT_SIGNUP,
feconf.EMAIL_INTENT_MARKETING,
feconf.EMAIL_INTENT_DAILY_BATCH,
feconf.EMAIL_INTENT_EDITOR_ROLE_NOTIFICATION,
feconf.EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION,
feconf.EMAIL_INTENT_SUBSCRIPTION_NOTIFICATION,
feconf.EMAIL_INTENT_SUGGESTION_NOTIFICATION,
feconf.EMAIL_INTENT_PUBLICIZE_EXPLORATION,
feconf.EMAIL_INTENT_UNPUBLISH_EXPLORATION,
feconf.EMAIL_INTENT_DELETE_EXPLORATION,
feconf.EMAIL_INTENT_REPORT_BAD_CONTENT,
feconf.EMAIL_INTENT_QUERY_STATUS_NOTIFICATION,
feconf.BULK_EMAIL_INTENT_TEST
])
# The subject line of the email.
subject = ndb.TextProperty(required=True)
# The HTML content of the email body.
html_body = ndb.TextProperty(required=True)
# The datetime the email was sent, in UTC.
sent_datetime = ndb.DateTimeProperty(required=True, indexed=True)
# The hash of the recipient id, email subject and message body.
email_hash = ndb.StringProperty(indexed=True)
@classmethod
def _generate_id(cls, intent):
"""Generates an ID for a new SentEmailModel instance.
Args:
intent: str. The intent string, i.e. the purpose of the email.
Valid intent strings are defined in feconf.py.
Returns:
str. The newly-generated ID for the SentEmailModel instance.
Raises:
Exception: The id generator for SentEmailModel is producing
too many collisions.
"""
id_prefix = '%s.' % intent
for _ in range(base_models.MAX_RETRIES):
new_id = '%s.%s' % (
id_prefix,
utils.convert_to_hash(
str(utils.get_random_int(base_models.RAND_RANGE)),
base_models.ID_LENGTH))
if not cls.get_by_id(new_id):
return new_id
raise Exception(
'The id generator for SentEmailModel is producing too many '
'collisions.')
@classmethod
def create(
cls, recipient_id, recipient_email, sender_id, sender_email,
intent, subject, html_body, sent_datetime):
"""Creates a new SentEmailModel entry.
Args:
recipient_id: str. The user ID of the email recipient.
recipient_email: str. The email address of the recipient.
sender_id: str. The user ID of the email sender.
sender_email: str. The email address used to send the notification.
intent: str. The intent string, i.e. the purpose of the email.
subject: str. The subject line of the email.
html_body: str. The HTML content of the email body.
sent_datetime: datetime.datetime. The datetime the email was sent,
in UTC.
"""
instance_id = cls._generate_id(intent)
email_model_instance = cls(
id=instance_id, recipient_id=recipient_id,
recipient_email=recipient_email, sender_id=sender_id,
sender_email=sender_email, intent=intent, subject=subject,
html_body=html_body, sent_datetime=sent_datetime)
email_model_instance.put()
def put(self):
"""Saves this SentEmailModel instance to the datastore."""
email_hash = self._generate_hash(
self.recipient_id, self.subject, self.html_body)
self.email_hash = email_hash
super(SentEmailModel, self).put()
@classmethod
def get_by_hash(cls, email_hash, sent_datetime_lower_bound=None):
"""Returns all messages with a given email_hash.
This also takes an optional sent_datetime_lower_bound argument,
which is a datetime instance. If this is given, only
SentEmailModel instances sent after sent_datetime_lower_bound
should be returned.
Args:
email_hash: str. The hash value of the email.
sent_datetime_lower_bound: datetime.datetime. The lower bound on
sent_datetime of the email to be searched.
Returns:
list(SentEmailModel). A list of emails which have the given hash
value and sent more recently than sent_datetime_lower_bound.
Raises:
Exception: sent_datetime_lower_bound is not a valid
datetime.datetime.
"""
if sent_datetime_lower_bound is not None:
if not isinstance(sent_datetime_lower_bound, datetime.datetime):
raise Exception(
'Expected datetime, received %s of type %s' %
(sent_datetime_lower_bound,
type(sent_datetime_lower_bound)))
query = cls.query().filter(cls.email_hash == email_hash)
if sent_datetime_lower_bound is not None:
query = query.filter(cls.sent_datetime > sent_datetime_lower_bound)
messages = query.fetch()
return messages
@classmethod
def _generate_hash(cls, recipient_id, email_subject, email_body):
"""Generate hash for a given recipient_id, email_subject and cleaned
email_body.
Args:
recipient_id: str. The user ID of the email recipient.
email_subject: str. The subject line of the email.
email_body: str. The HTML content of the email body.
Returns:
str. The generated hash value of the given email.
"""
hash_value = utils.convert_to_hash(
recipient_id + email_subject + email_body,
100)
return hash_value
@classmethod
def check_duplicate_message(cls, recipient_id, email_subject, email_body):
"""Check for a given recipient_id, email_subject and cleaned
email_body, whether a similar message has been sent in the last
DUPLICATE_EMAIL_INTERVAL_MINS.
Args:
recipient_id: str. The user ID of the email recipient.
email_subject: str. The subject line of the email.
email_body: str. The HTML content of the email body.
Returns:
bool. Whether a similar message has been sent to the same recipient
in the last DUPLICATE_EMAIL_INTERVAL_MINS.
"""
email_hash = cls._generate_hash(
recipient_id, email_subject, email_body)
datetime_now = datetime.datetime.utcnow()
time_interval = datetime.timedelta(
minutes=feconf.DUPLICATE_EMAIL_INTERVAL_MINS)
sent_datetime_lower_bound = datetime_now - time_interval
messages = cls.get_by_hash(
email_hash, sent_datetime_lower_bound=sent_datetime_lower_bound)
for message in messages:
if (message.recipient_id == recipient_id and
message.subject == email_subject and
message.html_body == email_body):
return True
return False
class BulkEmailModel(base_models.BaseModel):
"""Records the content of an email sent from Oppia to multiple users.
This model is read-only; entries cannot be modified once created. The
id/key of instances of this model is randomly generated string of
length 12.
"""
# The user IDs of the email recipients.
recipient_ids = ndb.JsonProperty(default=[], compressed=True)
# The user ID of the email sender. For site-generated emails this is equal
# to feconf.SYSTEM_COMMITTER_ID.
sender_id = ndb.StringProperty(required=True)
# The email address used to send the notification.
sender_email = ndb.StringProperty(required=True)
# The intent of the email.
intent = ndb.StringProperty(required=True, indexed=True, choices=[
feconf.BULK_EMAIL_INTENT_MARKETING,
feconf.BULK_EMAIL_INTENT_IMPROVE_EXPLORATION,
feconf.BULK_EMAIL_INTENT_CREATE_EXPLORATION,
feconf.BULK_EMAIL_INTENT_CREATOR_REENGAGEMENT,
feconf.BULK_EMAIL_INTENT_LEARNER_REENGAGEMENT
])
# The subject line of the email.
subject = ndb.TextProperty(required=True)
# The HTML content of the email body.
html_body = ndb.TextProperty(required=True)
# The datetime the email was sent, in UTC.
sent_datetime = ndb.DateTimeProperty(required=True, indexed=True)
@classmethod
def create(
cls, instance_id, recipient_ids, sender_id, sender_email,
intent, subject, html_body, sent_datetime):
"""Creates a new BulkEmailModel entry.
Args:
instance_id: str. The ID of the instance.
recipient_ids: list(str). The user IDs of the email recipients.
sender_id: str. The user ID of the email sender.
sender_email: str. The email address used to send the notification.
intent: str. The intent string, i.e. the purpose of the email.
subject: str. The subject line of the email.
html_body: str. The HTML content of the email body.
sent_datetime: datetime.datetime. The date and time the email
was sent, in UTC.
"""
email_model_instance = cls(
id=instance_id, recipient_ids=recipient_ids, sender_id=sender_id,
sender_email=sender_email, intent=intent, subject=subject,
html_body=html_body, sent_datetime=sent_datetime)
email_model_instance.put()
REPLY_TO_ID_LENGTH = 84
class FeedbackEmailReplyToIdModel(base_models.BaseModel):
"""This model stores unique_id for each <user, exploration, thread>
combination.
This unique_id is used in reply-to email address in outgoing feedback and
suggestion emails. The id/key of instances of this model has form of
[USER_ID].[EXPLORATION_ID].[THREAD_ID]
"""
# The reply-to ID that is used in the reply-to email address.
reply_to_id = ndb.StringProperty(indexed=True, required=True)
@classmethod
def _generate_id(cls, user_id, exploration_id, thread_id):
return '.'.join([user_id, exploration_id, thread_id])
@classmethod
def _generate_unique_reply_to_id(cls):
for _ in range(base_models.MAX_RETRIES):
new_id = utils.convert_to_hash(
'%s' % (utils.get_random_int(base_models.RAND_RANGE)),
REPLY_TO_ID_LENGTH)
if not cls.get_by_reply_to_id(new_id):
return new_id
raise Exception('Unique id generator is producing too many collisions.')
@classmethod
def create(cls, user_id, exploration_id, thread_id):
"""Creates a new FeedbackEmailUniqueIDModel entry.
Args:
user_id: str. ID of the corresponding user.
exploration_id: str. ID of the corresponding exploration.
thread_id: str. ID of the corresponding thread.
Returns:
str. A unique ID that can be used in 'reply-to' email address.
Raises:
Exception: Model instance for given user_id, exploration_id and
thread_id already exists.
"""
instance_id = cls._generate_id(user_id, exploration_id, thread_id)
if cls.get_by_id(instance_id):
raise Exception('Unique reply-to ID for given user, exploration and'
' thread already exists.')
reply_to_id = cls._generate_unique_reply_to_id()
return cls(id=instance_id, reply_to_id=reply_to_id)
@classmethod
def get_by_reply_to_id(cls, reply_to_id):
model = cls.query(cls.reply_to_id == reply_to_id).fetch()
if not model:
return None
return model[0]
@classmethod
def get(cls, user_id, exploration_id, thread_id, strict=True):
instance_id = cls._generate_id(user_id, exploration_id, thread_id)
return super(
FeedbackEmailReplyToIdModel, cls).get(instance_id, strict=strict)
@classmethod
def get_multi_by_user_ids(cls, user_ids, exploration_id, thread_id):
instance_ids = [cls._generate_id(user_id, exploration_id, thread_id)
for user_id in user_ids]
user_models = cls.get_multi(instance_ids)
return {
user_id: model for user_id, model in zip(user_ids, user_models)}
@property
def user_id(self):
return self.id.split('.')[0]
@property
def exploration_id(self):
return self.id.split('.')[1]
@property
def thread_id(self):
return self.id.split('.')[2]
| apache-2.0 |
dh7/ML-Tutorial-Notebooks | images2gif.py | 1 | 36077 | # -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein, Ant1, Marius van Voorden
#
# This code is subject to the (new) BSD license:
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" Module images2gif
Provides functionality for reading and writing animated GIF images.
Use writeGif to write a series of numpy arrays or PIL images as an
animated GIF. Use readGif to read an animated gif as a series of numpy
arrays.
Note that since July 2004, all patents on the LZW compression patent have
expired. Therefore the GIF format may now be used freely.
Acknowledgements
----------------
Many thanks to Ant1 for:
* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly
improves the results.
* the modifications to save each image with its own palette, or optionally
the global palette (if its the same).
Many thanks to Marius van Voorden for porting the NeuQuant quantization
algorithm of Anthony Dekker to Python (See the NeuQuant class for its
license).
Many thanks to Alex Robinson for implementing the concept of subrectangles,
which (depening on image content) can give a very significant reduction in
file size.
This code is based on gifmaker (in the scripts folder of the source
distribution of PIL)
Usefull links
-------------
* http://tronche.com/computer-graphics/gif/
* http://en.wikipedia.org/wiki/Graphics_Interchange_Format
* http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
# todo: This module should be part of imageio (or at least based on)
import os, time
def encode(x):
if False:
return x.encode('utf-8')
return x
try:
import PIL
from PIL import Image
from PIL.GifImagePlugin import getheader, getdata
except ImportError:
PIL = None
try:
import numpy as np
except ImportError:
np = None
def get_cKDTree():
try:
from scipy.spatial import cKDTree
except ImportError:
cKDTree = None
return cKDTree
# getheader gives a 87a header and a color palette (two elements in a list).
# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size".
# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well
# technically the first byte says how many bytes follow, after which that
# amount (max 255) follows).
def checkImages(images):
""" checkImages(images)
Check numpy images and correct intensity range etc.
The same for all movie formats.
"""
# Init results
images2 = []
for im in images:
if PIL and isinstance(im, PIL.Image.Image):
# We assume PIL images are allright
images2.append(im)
elif np and isinstance(im, np.ndarray):
# Check and convert dtype
if im.dtype == np.uint8:
images2.append(im) # Ok
elif im.dtype in [np.float32, np.float64]:
im = im.copy()
im[im<0] = 0
im[im>1] = 1
im *= 255
images2.append( im.astype(np.uint8) )
else:
im = im.astype(np.uint8)
images2.append(im)
# Check size
if im.ndim == 2:
pass # ok
elif im.ndim == 3:
if im.shape[2] not in [3,4]:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('Invalid image type: ' + str(type(im)))
# Done
return images2
def intToBin(i):
""" Integer to two bytes """
# devide in two parts (bytes)
i1 = i % 256
i2 = int( i/256)
# make string (little endian)
return chr(i1) + chr(i2)
class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0,0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin( xy[0] ) # Left position
bb += intToBin( xy[1] ) # Top position
bb += intToBin( im.size[0] ) # image width
bb += intToBin( im.size[1] ) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops==0 or loops==float('inf'):
loops = 2**16-1
#bb = "" # application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin( int(duration*100) ) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple,list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0,0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0,0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape)==0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0,0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0,0)]
t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im-prev)
if diff.ndim==3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1]+1
y0, y1 = Y[0], Y[-1]+1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1,x0:x1]
prev = im
ims2.append(im2)
xy.append((x0,y0))
# Done
#print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim==3 and im.shape[2]==3:
im = Image.fromarray(im,'RGB')
elif im.ndim==3 and im.shape[2]==4:
im = Image.fromarray(im[:,:,:3],'RGB')
elif im.ndim==2:
im = Image.fromarray(im,'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
if nq >= 1:
# NeuQuant algorithm
for im in images:
im = im.convert("RGBA") # NQ assumes RGBA
nqInstance = NeuQuant(im, int(nq)) # Learn colors from image
if dither:
im = im.convert("RGB").quantize(palette=nqInstance.paletteImage())
else:
im = nqInstance.quantize(im) # Use to quantize the image itself
images2.append(im)
else:
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
#palette = getheader(im)[1]
palette = getheader(im)[0][-1]
if not palette:
#palette = PIL.ImagePalette.ImageColor
palette = im.palette.tobytes()
palettes.append(palette)
for palette in palettes:
occur.append( palettes.count( palette ) )
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[ occur.index(max(occur)) ]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(encode(header))
fp.write(globalPalette)
fp.write(encode(appext))
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = data[0], data[1:]
graphext = self.getGraphicsControlExt(durations[frames],
disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(encode(graphext))
fp.write(encode(lid)) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write(encode('\x08')) # LZW minimum size code
else:
# Use global color palette
fp.write(encode(graphext))
fp.write(imdes) # write suitable image descriptor
# Write image data
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(encode(";")) # end gif
return frames
## Exposed functions
def writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None):
""" writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None)
Write an animated gif from the specified images.
Parameters
----------
filename : string
The name of the file to write the image to.
images : list
Should be a list consisting of PIL images or numpy arrays.
The latter should be between 0 and 255 for integer types, and
between 0 and 1 for float types.
duration : scalar or list of scalars
The duration for all frames, or (if a list) for each frame.
repeat : bool or integer
The amount of loops. If True, loops infinitetely.
dither : bool
Whether to apply dithering
nq : integer
If nonzero, applies the NeuQuant quantization algorithm to create
the color palette. This algorithm is superior, but slower than
the standard PIL algorithm. The value of nq is the quality
parameter. 1 represents the best quality. 10 is in general a
good tradeoff between quality and speed. When using this option,
better results are usually obtained when subRectangles is False.
subRectangles : False, True, or a list of 2-element tuples
Whether to use sub-rectangles. If True, the minimal rectangle that
is required to update each frame is automatically detected. This
can give significant reductions in file size, particularly if only
a part of the image changes. One can also give a list of x-y
coordinates if you want to do the cropping yourself. The default
is True.
dispose : int
How to dispose each frame. 1 means that each frame is to be left
in place. 2 means the background color should be restored after
each frame. 3 means the decoder should restore the previous frame.
If subRectangles==False, the default is 2, otherwise it is 1.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to write animated gif files.")
# Check images
images = checkImages(images)
# Instantiate writer object
gifWriter = GifWriter()
# Check loops
if repeat is False:
loops = 1
elif repeat is True:
loops = 0 # zero means infinite
else:
loops = int(repeat)
# Check duration
if hasattr(duration, '__len__'):
if len(duration) == len(images):
duration = [d for d in duration]
else:
raise ValueError("len(duration) doesn't match amount of images.")
else:
duration = [duration for im in images]
# Check subrectangles
if subRectangles:
images, xy = gifWriter.handleSubRectangles(images, subRectangles)
defaultDispose = 1 # Leave image in place
else:
# Normal mode
xy = [(0,0) for im in images]
defaultDispose = 2 # Restore to background color.
# Check dispose
if dispose is None:
dispose = defaultDispose
if hasattr(dispose, '__len__'):
if len(dispose) != len(images):
raise ValueError("len(xy) doesn't match amount of images.")
else:
dispose = [dispose for im in images]
# Make images in a format that we can write easy
images = gifWriter.convertImagesToPIL(images, dither, nq)
# Write
fp = open(filename, 'wb')
try:
gifWriter.writeGifToFile(fp, images, duration, loops, xy, dispose)
finally:
fp.close()
def readGif(filename, asNumpy=True):
""" readGif(filename, asNumpy=True)
Read images from an animated GIF file. Returns a list of numpy
arrays, or, if asNumpy is false, a list if PIL images.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to read animated gif files.")
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to read animated gif files.")
# Check whether it exists
if not os.path.isfile(filename):
raise IOError('File not found: '+str(filename))
# Load file using PIL
pilIm = PIL.Image.open(filename)
pilIm.seek(0)
# Read all images inside
images = []
try:
while True:
# Get image as numpy array
tmp = pilIm.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape)==0:
raise MemoryError("Too little memory to convert PIL image to array")
# Store, and next
images.append(a)
pilIm.seek(pilIm.tell()+1)
except EOFError:
pass
# Convert to normal PIL images if needed
if not asNumpy:
images2 = images
images = []
for im in images2:
images.append( PIL.Image.fromarray(im) )
# Done
return images
class NeuQuant:
""" NeuQuant(image, samplefac=10, colors=256)
samplefac should be an integer number of 1 or higher, 1
being the highest quality, but the slowest performance.
With avalue of 10, one tenth of all pixels are used during
training. This value seems a nice tradeof between speed
and quality.
colors is the amount of colors to reduce the image to. This
should best be a power of two.
See also:
http://members.ozemail.com.au/~dekker/NEUQUANT.HTML
License of the NeuQuant Neural-Net Quantization Algorithm
---------------------------------------------------------
Copyright (c) 1994 Anthony Dekker
Ported to python by Marius van Voorden in 2010
NEUQUANT Neural-Net quantization algorithm by Anthony Dekker, 1994.
See "Kohonen neural networks for optimal colour quantization"
in "network: Computation in Neural Systems" Vol. 5 (1994) pp 351-367.
for a discussion of the algorithm.
See also http://members.ozemail.com.au/~dekker/NEUQUANT.HTML
Any party obtaining a copy of these files from the author, directly or
indirectly, is granted, free of charge, a full and unrestricted irrevocable,
world-wide, paid up, royalty-free, nonexclusive right and license to deal
in this software and documentation files (the "Software"), including without
limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons who receive
copies from any such party to do so, with the only requirement being
that this copyright notice remain intact.
"""
NCYCLES = None # Number of learning cycles
NETSIZE = None # Number of colours used
SPECIALS = None # Number of reserved colours used
BGCOLOR = None # Reserved background colour
CUTNETSIZE = None
MAXNETPOS = None
INITRAD = None # For 256 colours, radius starts at 32
RADIUSBIASSHIFT = None
RADIUSBIAS = None
INITBIASRADIUS = None
RADIUSDEC = None # Factor of 1/30 each cycle
ALPHABIASSHIFT = None
INITALPHA = None # biased by 10 bits
GAMMA = None
BETA = None
BETAGAMMA = None
network = None # The network itself
colormap = None # The network itself
netindex = None # For network lookup - really 256
bias = None # Bias and freq arrays for learning
freq = None
pimage = None
# Four primes near 500 - assume no image has a length so large
# that it is divisible by all four primes
PRIME1 = 499
PRIME2 = 491
PRIME3 = 487
PRIME4 = 503
MAXPRIME = PRIME4
pixels = None
samplefac = None
a_s = None
def setconstants(self, samplefac, colors):
self.NCYCLES = 100 # Number of learning cycles
self.NETSIZE = colors # Number of colours used
self.SPECIALS = 3 # Number of reserved colours used
self.BGCOLOR = self.SPECIALS-1 # Reserved background colour
self.CUTNETSIZE = self.NETSIZE - self.SPECIALS
self.MAXNETPOS = self.NETSIZE - 1
self.INITRAD = self.NETSIZE/8 # For 256 colours, radius starts at 32
self.RADIUSBIASSHIFT = 6
self.RADIUSBIAS = 1 << self.RADIUSBIASSHIFT
self.INITBIASRADIUS = self.INITRAD * self.RADIUSBIAS
self.RADIUSDEC = 30 # Factor of 1/30 each cycle
self.ALPHABIASSHIFT = 10 # Alpha starts at 1
self.INITALPHA = 1 << self.ALPHABIASSHIFT # biased by 10 bits
self.GAMMA = 1024.0
self.BETA = 1.0/1024.0
self.BETAGAMMA = self.BETA * self.GAMMA
self.network = np.empty((self.NETSIZE, 3), dtype='float64') # The network itself
self.colormap = np.empty((self.NETSIZE, 4), dtype='int32') # The network itself
self.netindex = np.empty(256, dtype='int32') # For network lookup - really 256
self.bias = np.empty(self.NETSIZE, dtype='float64') # Bias and freq arrays for learning
self.freq = np.empty(self.NETSIZE, dtype='float64')
self.pixels = None
self.samplefac = samplefac
self.a_s = {}
def __init__(self, image, samplefac=10, colors=256):
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy for the NeuQuant algorithm.")
# Check image
if image.size[0] * image.size[1] < NeuQuant.MAXPRIME:
raise IOError("Image is too small")
if image.mode != "RGBA":
raise IOError("Image mode should be RGBA.")
# Initialize
self.setconstants(samplefac, colors)
self.pixels = np.fromstring(image.tostring(), np.uint32)
self.setUpArrays()
self.learn()
self.fix()
self.inxbuild()
def writeColourMap(self, rgb, outstream):
for i in range(self.NETSIZE):
bb = self.colormap[i,0];
gg = self.colormap[i,1];
rr = self.colormap[i,2];
outstream.write(rr if rgb else bb)
outstream.write(gg)
outstream.write(bb if rgb else rr)
return self.NETSIZE
def setUpArrays(self):
self.network[0,0] = 0.0 # Black
self.network[0,1] = 0.0
self.network[0,2] = 0.0
self.network[1,0] = 255.0 # White
self.network[1,1] = 255.0
self.network[1,2] = 255.0
# RESERVED self.BGCOLOR # Background
for i in range(self.SPECIALS):
self.freq[i] = 1.0 / self.NETSIZE
self.bias[i] = 0.0
for i in range(self.SPECIALS, self.NETSIZE):
p = self.network[i]
p[:] = (255.0 * (i-self.SPECIALS)) / self.CUTNETSIZE
self.freq[i] = 1.0 / self.NETSIZE
self.bias[i] = 0.0
# Omitted: setPixels
def altersingle(self, alpha, i, b, g, r):
"""Move neuron i towards biased (b,g,r) by factor alpha"""
n = self.network[i] # Alter hit neuron
n[0] -= (alpha*(n[0] - b))
n[1] -= (alpha*(n[1] - g))
n[2] -= (alpha*(n[2] - r))
def geta(self, alpha, rad):
try:
return self.a_s[(alpha, rad)]
except KeyError:
length = rad*2-1
mid = int(length//2)
q = np.array(list(range(mid-1,-1,-1))+list(range(-1,mid)))
a = alpha*(rad*rad - q*q)/(rad*rad)
a[mid] = 0
self.a_s[(alpha, rad)] = a
return a
def alterneigh(self, alpha, rad, i, b, g, r):
if i-rad >= self.SPECIALS-1:
lo = i-rad
start = 0
else:
lo = self.SPECIALS-1
start = (self.SPECIALS-1 - (i-rad))
if i+rad <= self.NETSIZE:
hi = i+rad
end = rad*2-1
else:
hi = self.NETSIZE
end = (self.NETSIZE - (i+rad))
a = self.geta(alpha, rad)[start:end]
p = self.network[lo+1:hi]
p -= np.transpose(np.transpose(p - np.array([b, g, r])) * a)
#def contest(self, b, g, r):
# """ Search for biased BGR values
# Finds closest neuron (min dist) and updates self.freq
# finds best neuron (min dist-self.bias) and returns position
# for frequently chosen neurons, self.freq[i] is high and self.bias[i] is negative
# self.bias[i] = self.GAMMA*((1/self.NETSIZE)-self.freq[i])"""
#
# i, j = self.SPECIALS, self.NETSIZE
# dists = abs(self.network[i:j] - np.array([b,g,r])).sum(1)
# bestpos = i + np.argmin(dists)
# biasdists = dists - self.bias[i:j]
# bestbiaspos = i + np.argmin(biasdists)
# self.freq[i:j] -= self.BETA * self.freq[i:j]
# self.bias[i:j] += self.BETAGAMMA * self.freq[i:j]
# self.freq[bestpos] += self.BETA
# self.bias[bestpos] -= self.BETAGAMMA
# return bestbiaspos
def contest(self, b, g, r):
""" Search for biased BGR values
Finds closest neuron (min dist) and updates self.freq
finds best neuron (min dist-self.bias) and returns position
for frequently chosen neurons, self.freq[i] is high and self.bias[i] is negative
self.bias[i] = self.GAMMA*((1/self.NETSIZE)-self.freq[i])"""
i, j = self.SPECIALS, self.NETSIZE
dists = abs(self.network[i:j] - np.array([b,g,r])).sum(1)
bestpos = i + np.argmin(dists)
biasdists = dists - self.bias[i:j]
bestbiaspos = i + np.argmin(biasdists)
self.freq[i:j] *= (1-self.BETA)
self.bias[i:j] += self.BETAGAMMA * self.freq[i:j]
self.freq[bestpos] += self.BETA
self.bias[bestpos] -= self.BETAGAMMA
return bestbiaspos
def specialFind(self, b, g, r):
for i in range(self.SPECIALS):
n = self.network[i]
if n[0] == b and n[1] == g and n[2] == r:
return i
return -1
def learn(self):
biasRadius = self.INITBIASRADIUS
alphadec = 30 + ((self.samplefac-1)/3)
lengthcount = self.pixels.size
samplepixels = lengthcount / self.samplefac
delta = samplepixels / self.NCYCLES
alpha = self.INITALPHA
i = 0;
rad = biasRadius * 2**self.RADIUSBIASSHIFT
if rad <= 1:
rad = 0
print("Beginning 1D learning: samplepixels = %1.2f rad = %i" %
(samplepixels, rad) )
step = 0
pos = 0
if lengthcount%NeuQuant.PRIME1 != 0:
step = NeuQuant.PRIME1
elif lengthcount%NeuQuant.PRIME2 != 0:
step = NeuQuant.PRIME2
elif lengthcount%NeuQuant.PRIME3 != 0:
step = NeuQuant.PRIME3
else:
step = NeuQuant.PRIME4
i = 0
printed_string = ''
while i < samplepixels:
if i%100 == 99:
tmp = '\b'*len(printed_string)
printed_string = str((i+1)*100/samplepixels)+"%\n"
print(tmp + printed_string)
p = self.pixels[pos]
r = (p >> 16) & 0xff
g = (p >> 8) & 0xff
b = (p ) & 0xff
if i == 0: # Remember background colour
self.network[self.BGCOLOR] = [b, g, r]
j = self.specialFind(b, g, r)
if j < 0:
j = self.contest(b, g, r)
if j >= self.SPECIALS: # Don't learn for specials
a = (1.0 * alpha) / self.INITALPHA
self.altersingle(a, j, b, g, r)
if rad > 0:
self.alterneigh(a, rad, j, b, g, r)
pos = (pos+step)%lengthcount
i += 1
if i%delta == 0:
alpha -= alpha / alphadec
biasRadius -= biasRadius / self.RADIUSDEC
rad = biasRadius * 2**self.RADIUSBIASSHIFT
if rad <= 1:
rad = 0
finalAlpha = (1.0*alpha)/self.INITALPHA
print("Finished 1D learning: final alpha = %1.2f!" % finalAlpha)
def fix(self):
for i in range(self.NETSIZE):
for j in range(3):
x = int(0.5 + self.network[i,j])
x = max(0, x)
x = min(255, x)
self.colormap[i,j] = x
self.colormap[i,3] = i
def inxbuild(self):
previouscol = 0
startpos = 0
for i in range(self.NETSIZE):
p = self.colormap[i]
q = None
smallpos = i
smallval = p[1] # Index on g
# Find smallest in i..self.NETSIZE-1
for j in range(i+1, self.NETSIZE):
q = self.colormap[j]
if q[1] < smallval: # Index on g
smallpos = j
smallval = q[1] # Index on g
q = self.colormap[smallpos]
# Swap p (i) and q (smallpos) entries
if i != smallpos:
p[:],q[:] = q, p.copy()
# smallval entry is now in position i
if smallval != previouscol:
self.netindex[previouscol] = (startpos+i) >> 1
for j in range(previouscol+1, smallval):
self.netindex[j] = i
previouscol = smallval
startpos = i
self.netindex[previouscol] = (startpos+self.MAXNETPOS) >> 1
for j in range(previouscol+1, 256): # Really 256
self.netindex[j] = self.MAXNETPOS
def paletteImage(self):
""" PIL weird interface for making a paletted image: create an image which
already has the palette, and use that in Image.quantize. This function
returns this palette image. """
if self.pimage is None:
palette = []
for i in range(self.NETSIZE):
palette.extend(self.colormap[i][:3])
palette.extend([0]*(256-self.NETSIZE)*3)
# a palette image to use for quant
self.pimage = Image.new("P", (1, 1), 0)
self.pimage.putpalette(palette)
return self.pimage
def quantize(self, image):
""" Use a kdtree to quickly find the closest palette colors for the pixels """
if get_cKDTree():
return self.quantize_with_scipy(image)
else:
print('Scipy not available, falling back to slower version.')
return self.quantize_without_scipy(image)
def quantize_with_scipy(self, image):
w,h = image.size
px = np.asarray(image).copy()
px2 = px[:,:,:3].reshape((w*h,3))
cKDTree = get_cKDTree()
kdtree = cKDTree(self.colormap[:,:3],leafsize=10)
result = kdtree.query(px2)
colorindex = result[1]
print("Distance: %1.2f" % (result[0].sum()/(w*h)) )
px2[:] = self.colormap[colorindex,:3]
return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage())
def quantize_without_scipy(self, image):
"""" This function can be used if no scipy is availabe.
It's 7 times slower though.
"""
w,h = image.size
px = np.asarray(image).copy()
memo = {}
for j in range(w):
for i in range(h):
key = (px[i,j,0],px[i,j,1],px[i,j,2])
try:
val = memo[key]
except KeyError:
val = self.convert(*key)
memo[key] = val
px[i,j,0],px[i,j,1],px[i,j,2] = val
return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage())
def convert(self, *color):
i = self.inxsearch(*color)
return self.colormap[i,:3]
def inxsearch(self, r, g, b):
"""Search for BGR values 0..255 and return colour index"""
dists = (self.colormap[:,:3] - np.array([r,g,b]))
a= np.argmin((dists*dists).sum(1))
return a
if __name__ == '__main__':
im = np.zeros((200,200), dtype=np.uint8)
im[10:30,:] = 100
im[:,80:120] = 255
im[-50:-40,:] = 50
images = [im*1.0, im*0.8, im*0.6, im*0.4, im*0]
writeGif('lala3.gif',images, duration=0.5, dither=0)
| bsd-2-clause |
danielpronych/pyramid-doxygen | pyramid/tests/test_config/test_views.py | 1 | 165038 | import unittest
from pyramid import testing
from pyramid.tests.test_config import IDummy
from pyramid.tests.test_config import dummy_view
from pyramid.compat import (
im_func,
text_,
)
from pyramid.exceptions import ConfigurationError
from pyramid.exceptions import ConfigurationExecutionError
from pyramid.exceptions import ConfigurationConflictError
class TestViewsConfigurationMixin(unittest.TestCase):
def _makeOne(self, *arg, **kw):
from pyramid.config import Configurator
config = Configurator(*arg, **kw)
return config
def _getViewCallable(self, config, ctx_iface=None, request_iface=None,
name='', exception_view=False):
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IExceptionViewClassifier
if exception_view:
classifier = IExceptionViewClassifier
else:
classifier = IViewClassifier
if ctx_iface is None:
ctx_iface = Interface
if request_iface is None:
request_iface = IRequest
return config.registry.adapters.lookup(
(classifier, request_iface, ctx_iface), IView, name=name,
default=None)
def _registerRenderer(self, config, name='.txt'):
from pyramid.interfaces import IRendererFactory
from pyramid.interfaces import ITemplateRenderer
from zope.interface import implementer
@implementer(ITemplateRenderer)
class Renderer:
def __init__(self, info):
self.__class__.info = info
def __call__(self, *arg):
return b'Hello!'
config.registry.registerUtility(Renderer, IRendererFactory, name=name)
return Renderer
def _makeRequest(self, config):
request = DummyRequest()
request.registry = config.registry
return request
def _assertNotFound(self, wrapper, *arg):
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, wrapper, *arg)
def _getRouteRequestIface(self, config, name):
from pyramid.interfaces import IRouteRequest
iface = config.registry.getUtility(IRouteRequest, name)
return iface
def _assertRoute(self, config, name, path, num_predicates=0):
from pyramid.interfaces import IRoutesMapper
mapper = config.registry.getUtility(IRoutesMapper)
routes = mapper.get_routes()
route = routes[0]
self.assertEqual(len(routes), 1)
self.assertEqual(route.name, name)
self.assertEqual(route.path, path)
self.assertEqual(len(routes[0].predicates), num_predicates)
return route
def test_add_view_view_callable_None_no_renderer(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError, config.add_view)
def test_add_view_with_request_type_and_route_name(self):
config = self._makeOne(autocommit=True)
view = lambda *arg: 'OK'
self.assertRaises(ConfigurationError, config.add_view, view, '', None,
None, True, True)
def test_add_view_with_request_type(self):
from pyramid.renderers import null_renderer
from zope.interface import directlyProvides
from pyramid.interfaces import IRequest
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view,
request_type='pyramid.interfaces.IRequest',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = DummyRequest()
self._assertNotFound(wrapper, None, request)
directlyProvides(request, IRequest)
result = wrapper(None, request)
self.assertEqual(result, 'OK')
def test_add_view_view_callable_None_with_renderer(self):
config = self._makeOne(autocommit=True)
self._registerRenderer(config, name='dummy')
config.add_view(renderer='dummy')
view = self._getViewCallable(config)
self.assertTrue(b'Hello!' in view(None, None).body)
def test_add_view_with_tmpl_renderer_factory_introspector_missing(self):
config = self._makeOne(autocommit=True)
config.introspection = False
config.introspector = None
config.add_view(renderer='dummy.pt')
view = self._getViewCallable(config)
self.assertRaises(ValueError, view, None, None)
def test_add_view_with_tmpl_renderer_factory_no_renderer_factory(self):
config = self._makeOne(autocommit=True)
introspector = DummyIntrospector()
config.introspector = introspector
config.add_view(renderer='dummy.pt')
self.assertFalse(('renderer factories', '.pt') in
introspector.related[-1])
view = self._getViewCallable(config)
self.assertRaises(ValueError, view, None, None)
def test_add_view_with_tmpl_renderer_factory_with_renderer_factory(self):
config = self._makeOne(autocommit=True)
introspector = DummyIntrospector(True)
config.introspector = introspector
def dummy_factory(helper):
return lambda val, system_vals: 'Hello!'
config.add_renderer('.pt', dummy_factory)
config.add_view(renderer='dummy.pt')
self.assertTrue(
('renderer factories', '.pt') in introspector.related[-1])
view = self._getViewCallable(config)
self.assertTrue(b'Hello!' in view(None, None).body)
def test_add_view_wrapped_view_is_decorated(self):
def view(request): # request-only wrapper
""" """
config = self._makeOne(autocommit=True)
config.add_view(view=view)
wrapper = self._getViewCallable(config)
self.assertEqual(wrapper.__module__, view.__module__)
self.assertEqual(wrapper.__name__, view.__name__)
self.assertEqual(wrapper.__doc__, view.__doc__)
self.assertEqual(wrapper.__discriminator__(None, None).resolve()[0],
'view')
def test_add_view_view_callable_dottedname(self):
from pyramid.renderers import null_renderer
config = self._makeOne(autocommit=True)
config.add_view(view='pyramid.tests.test_config.dummy_view',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertEqual(wrapper(None, None), 'OK')
def test_add_view_with_function_callable(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_with_function_callable_requestonly(self):
from pyramid.renderers import null_renderer
def view(request):
return 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_with_name(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, name='abc', renderer=null_renderer)
wrapper = self._getViewCallable(config, name='abc')
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_with_name_unicode(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
name = text_(b'La Pe\xc3\xb1a', 'utf-8')
config.add_view(view=view, name=name, renderer=null_renderer)
wrapper = self._getViewCallable(config, name=name)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_with_decorator(self):
from pyramid.renderers import null_renderer
def view(request):
""" ABC """
return 'OK'
def view_wrapper(fn):
def inner(context, request):
return fn(context, request)
return inner
config = self._makeOne(autocommit=True)
config.add_view(view=view, decorator=view_wrapper,
renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertFalse(wrapper is view)
self.assertEqual(wrapper.__doc__, view.__doc__)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_with_decorator_tuple(self):
from pyramid.renderers import null_renderer
def view(request):
""" ABC """
return 'OK'
def view_wrapper1(fn):
def inner(context, request):
return 'wrapped1' + fn(context, request)
return inner
def view_wrapper2(fn):
def inner(context, request):
return 'wrapped2' + fn(context, request)
return inner
config = self._makeOne(autocommit=True)
config.add_view(view=view, decorator=(view_wrapper2, view_wrapper1),
renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertFalse(wrapper is view)
self.assertEqual(wrapper.__doc__, view.__doc__)
result = wrapper(None, None)
self.assertEqual(result, 'wrapped2wrapped1OK')
def test_add_view_with_http_cache(self):
import datetime
from pyramid.response import Response
response = Response('OK')
def view(request):
""" ABC """
return response
config = self._makeOne(autocommit=True)
config.add_view(view=view, http_cache=(86400, {'public':True}))
wrapper = self._getViewCallable(config)
self.assertFalse(wrapper is view)
self.assertEqual(wrapper.__doc__, view.__doc__)
request = testing.DummyRequest()
when = datetime.datetime.utcnow() + datetime.timedelta(days=1)
result = wrapper(None, request)
self.assertEqual(result, response)
headers = dict(response.headerlist)
self.assertEqual(headers['Cache-Control'], 'max-age=86400, public')
expires = parse_httpdate(headers['Expires'])
assert_similar_datetime(expires, when)
def test_add_view_as_instance(self):
from pyramid.renderers import null_renderer
class AView:
def __call__(self, context, request):
""" """
return 'OK'
view = AView()
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_as_instancemethod(self):
from pyramid.renderers import null_renderer
class View:
def index(self, context, request):
return 'OK'
view = View()
config=self._makeOne(autocommit=True)
config.add_view(view=view.index, renderer=null_renderer)
wrapper = self._getViewCallable(config)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_as_instancemethod_requestonly(self):
from pyramid.renderers import null_renderer
class View:
def index(self, request):
return 'OK'
view = View()
config=self._makeOne(autocommit=True)
config.add_view(view=view.index, renderer=null_renderer)
wrapper = self._getViewCallable(config)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_as_instance_requestonly(self):
from pyramid.renderers import null_renderer
class AView:
def __call__(self, request):
""" """
return 'OK'
view = AView()
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
result = wrapper(None, None)
self.assertEqual(result, 'OK')
def test_add_view_as_oldstyle_class(self):
from pyramid.renderers import null_renderer
class view:
def __init__(self, context, request):
self.context = context
self.request = request
def __call__(self):
return 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
result = wrapper(None, request)
self.assertEqual(result, 'OK')
self.assertEqual(request.__view__.__class__, view)
def test_add_view_as_oldstyle_class_requestonly(self):
from pyramid.renderers import null_renderer
class view:
def __init__(self, request):
self.request = request
def __call__(self):
return 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
result = wrapper(None, request)
self.assertEqual(result, 'OK')
self.assertEqual(request.__view__.__class__, view)
def test_add_view_context_as_class(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
view = lambda *arg: 'OK'
class Foo:
pass
config = self._makeOne(autocommit=True)
config.add_view(context=Foo, view=view, renderer=null_renderer)
foo = implementedBy(Foo)
wrapper = self._getViewCallable(config, foo)
self.assertEqual(wrapper, view)
def test_add_view_context_as_iface(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(context=IDummy, view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config, IDummy)
self.assertEqual(wrapper, view)
def test_add_view_context_as_dottedname(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(context='pyramid.tests.test_config.IDummy',
view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config, IDummy)
self.assertEqual(wrapper, view)
def test_add_view_for__as_dottedname(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(for_='pyramid.tests.test_config.IDummy',
view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config, IDummy)
self.assertEqual(wrapper, view)
def test_add_view_for_as_class(self):
# ``for_`` is older spelling for ``context``
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
view = lambda *arg: 'OK'
class Foo:
pass
config = self._makeOne(autocommit=True)
config.add_view(for_=Foo, view=view, renderer=null_renderer)
foo = implementedBy(Foo)
wrapper = self._getViewCallable(config, foo)
self.assertEqual(wrapper, view)
def test_add_view_for_as_iface(self):
# ``for_`` is older spelling for ``context``
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(for_=IDummy, view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config, IDummy)
self.assertEqual(wrapper, view)
def test_add_view_context_trumps_for(self):
# ``for_`` is older spelling for ``context``
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
class Foo:
pass
config.add_view(context=IDummy, for_=Foo, view=view,
renderer=null_renderer)
wrapper = self._getViewCallable(config, IDummy)
self.assertEqual(wrapper, view)
def test_add_view_register_secured_view(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import ISecuredView
from pyramid.interfaces import IViewClassifier
view = lambda *arg: 'OK'
view.__call_permissive__ = view
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
wrapper = config.registry.adapters.lookup(
(IViewClassifier, IRequest, Interface),
ISecuredView, name='', default=None)
self.assertEqual(wrapper, view)
def test_add_view_exception_register_secured_view(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IExceptionViewClassifier
view = lambda *arg: 'OK'
view.__call_permissive__ = view
config = self._makeOne(autocommit=True)
config.add_view(view=view, context=RuntimeError, renderer=null_renderer)
wrapper = config.registry.adapters.lookup(
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='', default=None)
self.assertEqual(wrapper, view)
def test_add_view_same_phash_overrides_existing_single_view(self):
from pyramid.renderers import null_renderer
from hashlib import md5
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IMultiView
phash = md5()
phash.update(b'xhr = True')
view = lambda *arg: 'NOT OK'
view.__phash__ = phash.hexdigest()
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface), IView, name='')
def newview(context, request):
return 'OK'
config.add_view(view=newview, xhr=True, renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertFalse(IMultiView.providedBy(wrapper))
request = DummyRequest()
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_exc_same_phash_overrides_existing_single_view(self):
from pyramid.renderers import null_renderer
from hashlib import md5
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IExceptionViewClassifier
from pyramid.interfaces import IMultiView
phash = md5()
phash.update(b'xhr = True')
view = lambda *arg: 'NOT OK'
view.__phash__ = phash.hexdigest()
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
def newview(context, request):
return 'OK'
config.add_view(view=newview, xhr=True, context=RuntimeError,
renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertFalse(IMultiView.providedBy(wrapper))
request = DummyRequest()
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_default_phash_overrides_no_phash(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IMultiView
view = lambda *arg: 'NOT OK'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface), IView, name='')
def newview(context, request):
return 'OK'
config.add_view(view=newview, renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertFalse(IMultiView.providedBy(wrapper))
request = DummyRequest()
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_exc_default_phash_overrides_no_phash(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IExceptionViewClassifier
from pyramid.interfaces import IMultiView
view = lambda *arg: 'NOT OK'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
def newview(context, request):
return 'OK'
config.add_view(view=newview, context=RuntimeError,
renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertFalse(IMultiView.providedBy(wrapper))
request = DummyRequest()
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_default_phash_overrides_default_phash(self):
from pyramid.renderers import null_renderer
from pyramid.config.util import DEFAULT_PHASH
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IMultiView
view = lambda *arg: 'NOT OK'
view.__phash__ = DEFAULT_PHASH
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface), IView, name='')
def newview(context, request):
return 'OK'
config.add_view(view=newview, renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertFalse(IMultiView.providedBy(wrapper))
request = DummyRequest()
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_exc_default_phash_overrides_default_phash(self):
from pyramid.renderers import null_renderer
from pyramid.config.util import DEFAULT_PHASH
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IExceptionViewClassifier
from pyramid.interfaces import IMultiView
view = lambda *arg: 'NOT OK'
view.__phash__ = DEFAULT_PHASH
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
def newview(context, request):
return 'OK'
config.add_view(view=newview, context=RuntimeError,
renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertFalse(IMultiView.providedBy(wrapper))
request = DummyRequest()
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_multiview_replaces_existing_view(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IMultiView
view = lambda *arg: 'OK'
view.__phash__ = 'abc'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface), IView, name='')
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(wrapper(None, None), 'OK')
def test_add_view_exc_multiview_replaces_existing_view(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IExceptionViewClassifier
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IMultiView
view = lambda *arg: 'OK'
view.__phash__ = 'abc'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
config.add_view(view=view, context=RuntimeError,
renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(wrapper(None, None), 'OK')
def test_add_view_multiview_replaces_existing_securedview(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import ISecuredView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
view = lambda *arg: 'OK'
view.__phash__ = 'abc'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface),
ISecuredView, name='')
config.add_view(view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(wrapper(None, None), 'OK')
def test_add_view_exc_multiview_replaces_existing_securedview(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import ISecuredView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IExceptionViewClassifier
view = lambda *arg: 'OK'
view.__phash__ = 'abc'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IViewClassifier, IRequest, implementedBy(RuntimeError)),
ISecuredView, name='')
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
ISecuredView, name='')
config.add_view(view=view, context=RuntimeError, renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(wrapper(None, None), 'OK')
def test_add_view_with_accept_multiview_replaces_existing_view(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
def view(context, request):
return 'OK'
def view2(context, request):
return 'OK2'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface), IView, name='')
config.add_view(view=view2, accept='text/html', renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(len(wrapper.views), 1)
self.assertEqual(len(wrapper.media_views), 1)
self.assertEqual(wrapper(None, None), 'OK')
request = DummyRequest()
request.accept = DummyAccept('text/html', 'text/html')
self.assertEqual(wrapper(None, request), 'OK2')
def test_add_view_mixed_case_replaces_existing_view(self):
from pyramid.renderers import null_renderer
def view(context, request): return 'OK'
def view2(context, request): return 'OK2'
def view3(context, request): return 'OK3'
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
config.add_view(view=view2, accept='text/html', renderer=null_renderer)
config.add_view(view=view3, accept='text/HTML', renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(len(wrapper.media_views.items()),1)
self.assertFalse('text/HTML' in wrapper.media_views)
self.assertEqual(wrapper(None, None), 'OK')
request = DummyRequest()
request.accept = DummyAccept('text/html', 'text/html')
self.assertEqual(wrapper(None, request), 'OK3')
def test_add_views_with_accept_multiview_replaces_existing(self):
from pyramid.renderers import null_renderer
def view(context, request): return 'OK'
def view2(context, request): return 'OK2'
def view3(context, request): return 'OK3'
config = self._makeOne(autocommit=True)
config.add_view(view=view, renderer=null_renderer)
config.add_view(view=view2, accept='text/html', renderer=null_renderer)
config.add_view(view=view3, accept='text/html', renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertEqual(len(wrapper.media_views['text/html']), 1)
self.assertEqual(wrapper(None, None), 'OK')
request = DummyRequest()
request.accept = DummyAccept('text/html', 'text/html')
self.assertEqual(wrapper(None, request), 'OK3')
def test_add_view_exc_with_accept_multiview_replaces_existing_view(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IExceptionViewClassifier
def view(context, request):
return 'OK'
def view2(context, request):
return 'OK2'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
config.add_view(view=view2, accept='text/html', context=RuntimeError,
renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(len(wrapper.views), 1)
self.assertEqual(len(wrapper.media_views), 1)
self.assertEqual(wrapper(None, None), 'OK')
request = DummyRequest()
request.accept = DummyAccept('text/html', 'text/html')
self.assertEqual(wrapper(None, request), 'OK2')
def test_add_view_multiview_replaces_existing_view_with___accept__(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
def view(context, request):
return 'OK'
def view2(context, request):
return 'OK2'
view.__accept__ = 'text/html'
view.__phash__ = 'abc'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface), IView, name='')
config.add_view(view=view2, renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(len(wrapper.views), 1)
self.assertEqual(len(wrapper.media_views), 1)
self.assertEqual(wrapper(None, None), 'OK2')
request = DummyRequest()
request.accept = DummyAccept('text/html')
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_exc_mulview_replaces_existing_view_with___accept__(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IExceptionViewClassifier
def view(context, request):
return 'OK'
def view2(context, request):
return 'OK2'
view.__accept__ = 'text/html'
view.__phash__ = 'abc'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IView, name='')
config.add_view(view=view2, context=RuntimeError,
renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual(len(wrapper.views), 1)
self.assertEqual(len(wrapper.media_views), 1)
self.assertEqual(wrapper(None, None), 'OK2')
request = DummyRequest()
request.accept = DummyAccept('text/html')
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_multiview_replaces_multiview(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
view = DummyMultiView()
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface),
IMultiView, name='')
view2 = lambda *arg: 'OK2'
config.add_view(view=view2, renderer=null_renderer)
wrapper = self._getViewCallable(config)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual([x[:2] for x in wrapper.views], [(view2, None)])
self.assertEqual(wrapper(None, None), 'OK1')
def test_add_view_exc_multiview_replaces_multiview(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IExceptionViewClassifier
view = DummyMultiView()
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view,
(IViewClassifier, IRequest, implementedBy(RuntimeError)),
IMultiView, name='')
config.registry.registerAdapter(
view,
(IExceptionViewClassifier, IRequest, implementedBy(RuntimeError)),
IMultiView, name='')
view2 = lambda *arg: 'OK2'
config.add_view(view=view2, context=RuntimeError,
renderer=null_renderer)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError), exception_view=True)
self.assertTrue(IMultiView.providedBy(wrapper))
self.assertEqual([x[:2] for x in wrapper.views], [(view2, None)])
self.assertEqual(wrapper(None, None), 'OK1')
def test_add_view_multiview_context_superclass_then_subclass(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
class ISuper(Interface):
pass
class ISub(ISuper):
pass
view = lambda *arg: 'OK'
view2 = lambda *arg: 'OK2'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, ISuper), IView, name='')
config.add_view(view=view2, for_=ISub, renderer=null_renderer)
wrapper = self._getViewCallable(config, ISuper, IRequest)
self.assertFalse(IMultiView.providedBy(wrapper))
self.assertEqual(wrapper(None, None), 'OK')
wrapper = self._getViewCallable(config, ISub, IRequest)
self.assertFalse(IMultiView.providedBy(wrapper))
self.assertEqual(wrapper(None, None), 'OK2')
def test_add_view_multiview_exception_superclass_then_subclass(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IMultiView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IExceptionViewClassifier
class Super(Exception):
pass
class Sub(Super):
pass
view = lambda *arg: 'OK'
view2 = lambda *arg: 'OK2'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Super), IView, name='')
config.registry.registerAdapter(
view, (IExceptionViewClassifier, IRequest, Super), IView, name='')
config.add_view(view=view2, for_=Sub, renderer=null_renderer)
wrapper = self._getViewCallable(
config, implementedBy(Super), IRequest)
wrapper_exc_view = self._getViewCallable(
config, implementedBy(Super), IRequest, exception_view=True)
self.assertEqual(wrapper_exc_view, wrapper)
self.assertFalse(IMultiView.providedBy(wrapper_exc_view))
self.assertEqual(wrapper_exc_view(None, None), 'OK')
wrapper = self._getViewCallable(
config, implementedBy(Sub), IRequest)
wrapper_exc_view = self._getViewCallable(
config, implementedBy(Sub), IRequest, exception_view=True)
self.assertEqual(wrapper_exc_view, wrapper)
self.assertFalse(IMultiView.providedBy(wrapper_exc_view))
self.assertEqual(wrapper_exc_view(None, None), 'OK2')
def test_add_view_multiview_call_ordering(self):
from pyramid.renderers import null_renderer as nr
from zope.interface import directlyProvides
def view1(context, request): return 'view1'
def view2(context, request): return 'view2'
def view3(context, request): return 'view3'
def view4(context, request): return 'view4'
def view5(context, request): return 'view5'
def view6(context, request): return 'view6'
def view7(context, request): return 'view7'
def view8(context, request): return 'view8'
config = self._makeOne(autocommit=True)
config.add_view(view=view1, renderer=nr)
config.add_view(view=view2, request_method='POST', renderer=nr)
config.add_view(view=view3,request_param='param', renderer=nr)
config.add_view(view=view4, containment=IDummy, renderer=nr)
config.add_view(view=view5, request_method='POST',
request_param='param', renderer=nr)
config.add_view(view=view6, request_method='POST', containment=IDummy,
renderer=nr)
config.add_view(view=view7, request_param='param', containment=IDummy,
renderer=nr)
config.add_view(view=view8, request_method='POST',request_param='param',
containment=IDummy, renderer=nr)
wrapper = self._getViewCallable(config)
ctx = DummyContext()
request = self._makeRequest(config)
request.method = 'GET'
request.params = {}
self.assertEqual(wrapper(ctx, request), 'view1')
ctx = DummyContext()
request = self._makeRequest(config)
request.params = {}
request.method = 'POST'
self.assertEqual(wrapper(ctx, request), 'view2')
ctx = DummyContext()
request = self._makeRequest(config)
request.params = {'param':'1'}
request.method = 'GET'
self.assertEqual(wrapper(ctx, request), 'view3')
ctx = DummyContext()
directlyProvides(ctx, IDummy)
request = self._makeRequest(config)
request.method = 'GET'
request.params = {}
self.assertEqual(wrapper(ctx, request), 'view4')
ctx = DummyContext()
request = self._makeRequest(config)
request.method = 'POST'
request.params = {'param':'1'}
self.assertEqual(wrapper(ctx, request), 'view5')
ctx = DummyContext()
directlyProvides(ctx, IDummy)
request = self._makeRequest(config)
request.params = {}
request.method = 'POST'
self.assertEqual(wrapper(ctx, request), 'view6')
ctx = DummyContext()
directlyProvides(ctx, IDummy)
request = self._makeRequest(config)
request.method = 'GET'
request.params = {'param':'1'}
self.assertEqual(wrapper(ctx, request), 'view7')
ctx = DummyContext()
directlyProvides(ctx, IDummy)
request = self._makeRequest(config)
request.method = 'POST'
request.params = {'param':'1'}
self.assertEqual(wrapper(ctx, request), 'view8')
def test_add_view_multiview___discriminator__(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
class IFoo(Interface):
pass
class IBar(Interface):
pass
@implementer(IFoo)
class Foo(object):
pass
@implementer(IBar)
class Bar(object):
pass
foo = Foo()
bar = Bar()
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IMultiView
view = lambda *arg: 'OK'
view.__phash__ = 'abc'
config = self._makeOne(autocommit=True)
config.registry.registerAdapter(
view, (IViewClassifier, IRequest, Interface), IView, name='')
config.add_view(view=view, renderer=null_renderer,
containment=IFoo)
config.add_view(view=view, renderer=null_renderer,
containment=IBar)
wrapper = self._getViewCallable(config)
self.assertTrue(IMultiView.providedBy(wrapper))
request = self._makeRequest(config)
self.assertNotEqual(
wrapper.__discriminator__(foo, request),
wrapper.__discriminator__(bar, request),
)
def test_add_view_with_template_renderer(self):
from pyramid.tests import test_config
from pyramid.interfaces import ISettings
class view(object):
def __init__(self, context, request):
self.request = request
self.context = context
def __call__(self):
return {'a':'1'}
config = self._makeOne(autocommit=True)
renderer = self._registerRenderer(config)
fixture = 'pyramid.tests.test_config:files/minimal.txt'
config.introspection = False
config.add_view(view=view, renderer=fixture)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
result = wrapper(None, request)
self.assertEqual(result.body, b'Hello!')
settings = config.registry.queryUtility(ISettings)
result = renderer.info
self.assertEqual(result.registry, config.registry)
self.assertEqual(result.type, '.txt')
self.assertEqual(result.package, test_config)
self.assertEqual(result.name, fixture)
self.assertEqual(result.settings, settings)
def test_add_view_with_default_renderer(self):
class view(object):
def __init__(self, context, request):
self.request = request
self.context = context
def __call__(self):
return {'a':'1'}
config = self._makeOne(autocommit=True)
class moo(object):
def __init__(self, *arg, **kw):
pass
def __call__(self, *arg, **kw):
return b'moo'
config.add_renderer(None, moo)
config.add_view(view=view)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
result = wrapper(None, request)
self.assertEqual(result.body, b'moo')
def test_add_view_with_template_renderer_no_callable(self):
from pyramid.tests import test_config
from pyramid.interfaces import ISettings
config = self._makeOne(autocommit=True)
renderer = self._registerRenderer(config)
fixture = 'pyramid.tests.test_config:files/minimal.txt'
config.introspection = False
config.add_view(view=None, renderer=fixture)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
result = wrapper(None, request)
self.assertEqual(result.body, b'Hello!')
settings = config.registry.queryUtility(ISettings)
result = renderer.info
self.assertEqual(result.registry, config.registry)
self.assertEqual(result.type, '.txt')
self.assertEqual(result.package, test_config)
self.assertEqual(result.name, fixture)
self.assertEqual(result.settings, settings)
def test_add_view_with_request_type_as_iface(self):
from pyramid.renderers import null_renderer
from zope.interface import directlyProvides
def view(context, request):
return 'OK'
config = self._makeOne(autocommit=True)
config.add_view(request_type=IDummy, view=view, renderer=null_renderer)
wrapper = self._getViewCallable(config, None)
request = self._makeRequest(config)
directlyProvides(request, IDummy)
result = wrapper(None, request)
self.assertEqual(result, 'OK')
def test_add_view_with_request_type_as_noniface(self):
view = lambda *arg: 'OK'
config = self._makeOne()
self.assertRaises(ConfigurationError,
config.add_view, view, '', None, None, object)
def test_add_view_with_route_name(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_route('foo', '/a/b')
config.add_view(view=view, route_name='foo', renderer=null_renderer)
request_iface = self._getRouteRequestIface(config, 'foo')
self.assertNotEqual(request_iface, None)
wrapper = self._getViewCallable(config, request_iface=request_iface)
self.assertNotEqual(wrapper, None)
self.assertEqual(wrapper(None, None), 'OK')
def test_add_view_with_nonexistant_route_name(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne()
config.add_view(view=view, route_name='foo', renderer=null_renderer)
self.assertRaises(ConfigurationExecutionError, config.commit)
def test_add_view_with_route_name_exception(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_route('foo', '/a/b')
config.add_view(view=view, route_name='foo', context=RuntimeError,
renderer=null_renderer)
request_iface = self._getRouteRequestIface(config, 'foo')
wrapper_exc_view = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError),
request_iface=request_iface, exception_view=True)
self.assertNotEqual(wrapper_exc_view, None)
wrapper = self._getViewCallable(
config, ctx_iface=implementedBy(RuntimeError),
request_iface=request_iface)
self.assertEqual(wrapper_exc_view, wrapper)
self.assertEqual(wrapper_exc_view(None, None), 'OK')
def test_add_view_with_request_method_true(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_method='POST',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.method = 'POST'
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_request_method_false(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_method='POST')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.method = 'GET'
self._assertNotFound(wrapper, None, request)
def test_add_view_with_request_method_sequence_true(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_method=('POST', 'GET'),
renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.method = 'POST'
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_request_method_sequence_conflict(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne()
config.add_view(view=view, request_method=('POST', 'GET'),
renderer=null_renderer)
config.add_view(view=view, request_method=('GET', 'POST'),
renderer=null_renderer)
self.assertRaises(ConfigurationConflictError, config.commit)
def test_add_view_with_request_method_sequence_false(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_method=('POST', 'HEAD'))
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.method = 'GET'
self._assertNotFound(wrapper, None, request)
def test_add_view_with_request_method_get_implies_head(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_method='GET', renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.method = 'HEAD'
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_request_param_noval_true(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_param='abc', renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.params = {'abc':''}
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_request_param_noval_false(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_param='abc')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.params = {}
self._assertNotFound(wrapper, None, request)
def test_add_view_with_request_param_val_true(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_param='abc=123',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.params = {'abc':'123'}
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_request_param_val_false(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_param='abc=123')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.params = {'abc':''}
self._assertNotFound(wrapper, None, request)
def test_add_view_with_xhr_true(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, xhr=True, renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_xhr_false(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, xhr=True)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.is_xhr = False
self._assertNotFound(wrapper, None, request)
def test_add_view_with_header_badregex(self):
view = lambda *arg: 'OK'
config = self._makeOne()
config.add_view(view, header='Host:a\\')
self.assertRaises(ConfigurationError, config.commit)
def test_add_view_with_header_noval_match(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, header='Host', renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.headers = {'Host':'whatever'}
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_header_noval_nomatch(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, header='Host')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.headers = {'NotHost':'whatever'}
self._assertNotFound(wrapper, None, request)
def test_add_view_with_header_val_match(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, header=r'Host:\d', renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.headers = {'Host':'1'}
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_header_val_nomatch(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, header=r'Host:\d')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.headers = {'Host':'abc'}
self._assertNotFound(wrapper, None, request)
def test_add_view_with_header_val_missing(self):
from pyramid.httpexceptions import HTTPNotFound
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, header=r'Host:\d')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.headers = {'NoHost':'1'}
self.assertRaises(HTTPNotFound, wrapper, None, request)
def test_add_view_with_accept_match(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, accept='text/xml', renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.accept = ['text/xml']
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_accept_nomatch(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, accept='text/xml')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.accept = ['text/html']
self._assertNotFound(wrapper, None, request)
def test_add_view_with_containment_true(self):
from pyramid.renderers import null_renderer
from zope.interface import directlyProvides
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, containment=IDummy, renderer=null_renderer)
wrapper = self._getViewCallable(config)
context = DummyContext()
directlyProvides(context, IDummy)
self.assertEqual(wrapper(context, None), 'OK')
def test_add_view_with_containment_false(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, containment=IDummy)
wrapper = self._getViewCallable(config)
context = DummyContext()
self._assertNotFound(wrapper, context, None)
def test_add_view_with_containment_dottedname(self):
from pyramid.renderers import null_renderer
from zope.interface import directlyProvides
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(
view=view,
containment='pyramid.tests.test_config.IDummy',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
context = DummyContext()
directlyProvides(context, IDummy)
self.assertEqual(wrapper(context, None), 'OK')
def test_add_view_with_path_info_badregex(self):
view = lambda *arg: 'OK'
config = self._makeOne()
config.add_view(view, path_info='\\')
self.assertRaises(ConfigurationError, config.commit)
def test_add_view_with_path_info_match(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, path_info='/foo', renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.upath_info = text_(b'/foo')
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_path_info_nomatch(self):
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, path_info='/foo')
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.upath_info = text_('/')
self._assertNotFound(wrapper, None, request)
def test_add_view_with_custom_predicates_match(self):
import warnings
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
def pred1(context, request):
return True
def pred2(context, request):
return True
predicates = (pred1, pred2)
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always')
config.add_view(view=view, custom_predicates=predicates,
renderer=null_renderer)
self.assertEqual(len(w), 1)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_with_custom_predicates_nomatch(self):
import warnings
view = lambda *arg: 'OK'
config = self._makeOne(autocommit=True)
def pred1(context, request):
return True
def pred2(context, request):
return False
predicates = (pred1, pred2)
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always')
config.add_view(view=view, custom_predicates=predicates)
self.assertEqual(len(w), 1)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
self._assertNotFound(wrapper, None, request)
def test_add_view_custom_predicate_bests_standard_predicate(self):
import warnings
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
view2 = lambda *arg: 'NOT OK'
config = self._makeOne(autocommit=True)
def pred1(context, request):
return True
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always')
config.add_view(view=view, custom_predicates=(pred1,),
renderer=null_renderer)
config.add_view(view=view2, request_method='GET',
renderer=null_renderer)
self.assertEqual(len(w), 1)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.method = 'GET'
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_custom_more_preds_first_bests_fewer_preds_last(self):
from pyramid.renderers import null_renderer
view = lambda *arg: 'OK'
view2 = lambda *arg: 'NOT OK'
config = self._makeOne(autocommit=True)
config.add_view(view=view, request_method='GET', xhr=True,
renderer=null_renderer)
config.add_view(view=view2, request_method='GET',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
request = self._makeRequest(config)
request.method = 'GET'
request.is_xhr = True
self.assertEqual(wrapper(None, request), 'OK')
def test_add_view_same_predicates(self):
view2 = lambda *arg: 'second'
view1 = lambda *arg: 'first'
config = self._makeOne()
config.add_view(view=view1)
config.add_view(view=view2)
self.assertRaises(ConfigurationConflictError, config.commit)
def test_add_view_with_permission(self):
from pyramid.renderers import null_renderer
view1 = lambda *arg: 'OK'
outerself = self
class DummyPolicy(object):
def effective_principals(self, r):
outerself.assertEqual(r, request)
return ['abc']
def permits(self, context, principals, permission):
outerself.assertEqual(context, None)
outerself.assertEqual(principals, ['abc'])
outerself.assertEqual(permission, 'view')
return True
policy = DummyPolicy()
config = self._makeOne(authorization_policy=policy,
authentication_policy=policy,
autocommit=True)
config.add_view(view=view1, permission='view', renderer=null_renderer)
view = self._getViewCallable(config)
request = self._makeRequest(config)
self.assertEqual(view(None, request), 'OK')
def test_add_view_with_default_permission_no_explicit_permission(self):
from pyramid.renderers import null_renderer
view1 = lambda *arg: 'OK'
outerself = self
class DummyPolicy(object):
def effective_principals(self, r):
outerself.assertEqual(r, request)
return ['abc']
def permits(self, context, principals, permission):
outerself.assertEqual(context, None)
outerself.assertEqual(principals, ['abc'])
outerself.assertEqual(permission, 'view')
return True
policy = DummyPolicy()
config = self._makeOne(authorization_policy=policy,
authentication_policy=policy,
default_permission='view',
autocommit=True)
config.add_view(view=view1, renderer=null_renderer)
view = self._getViewCallable(config)
request = self._makeRequest(config)
self.assertEqual(view(None, request), 'OK')
def test_add_view_with_no_default_permission_no_explicit_permission(self):
from pyramid.renderers import null_renderer
view1 = lambda *arg: 'OK'
class DummyPolicy(object): pass # wont be called
policy = DummyPolicy()
config = self._makeOne(authorization_policy=policy,
authentication_policy=policy,
autocommit=True)
config.add_view(view=view1, renderer=null_renderer)
view = self._getViewCallable(config)
request = self._makeRequest(config)
self.assertEqual(view(None, request), 'OK')
def test_add_view_with_mapper(self):
from pyramid.renderers import null_renderer
class Mapper(object):
def __init__(self, **kw):
self.__class__.kw = kw
def __call__(self, view):
return view
config = self._makeOne(autocommit=True)
def view(context, request): return 'OK'
config.add_view(view=view, mapper=Mapper, renderer=null_renderer)
view = self._getViewCallable(config)
self.assertEqual(view(None, None), 'OK')
self.assertEqual(Mapper.kw['mapper'], Mapper)
def test_add_view_with_view_defaults(self):
from pyramid.renderers import null_renderer
from pyramid.exceptions import PredicateMismatch
from zope.interface import directlyProvides
class view(object):
__view_defaults__ = {
'containment':'pyramid.tests.test_config.IDummy'
}
def __init__(self, request):
pass
def __call__(self):
return 'OK'
config = self._makeOne(autocommit=True)
config.add_view(
view=view,
renderer=null_renderer)
wrapper = self._getViewCallable(config)
context = DummyContext()
directlyProvides(context, IDummy)
request = self._makeRequest(config)
self.assertEqual(wrapper(context, request), 'OK')
context = DummyContext()
request = self._makeRequest(config)
self.assertRaises(PredicateMismatch, wrapper, context, request)
def test_add_view_with_view_defaults_viewname_is_dottedname_kwarg(self):
from pyramid.renderers import null_renderer
from pyramid.exceptions import PredicateMismatch
from zope.interface import directlyProvides
config = self._makeOne(autocommit=True)
config.add_view(
view='pyramid.tests.test_config.test_views.DummyViewDefaultsClass',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
context = DummyContext()
directlyProvides(context, IDummy)
request = self._makeRequest(config)
self.assertEqual(wrapper(context, request), 'OK')
context = DummyContext()
request = self._makeRequest(config)
self.assertRaises(PredicateMismatch, wrapper, context, request)
def test_add_view_with_view_defaults_viewname_is_dottedname_nonkwarg(self):
from pyramid.renderers import null_renderer
from pyramid.exceptions import PredicateMismatch
from zope.interface import directlyProvides
config = self._makeOne(autocommit=True)
config.add_view(
'pyramid.tests.test_config.test_views.DummyViewDefaultsClass',
renderer=null_renderer)
wrapper = self._getViewCallable(config)
context = DummyContext()
directlyProvides(context, IDummy)
request = self._makeRequest(config)
self.assertEqual(wrapper(context, request), 'OK')
context = DummyContext()
request = self._makeRequest(config)
self.assertRaises(PredicateMismatch, wrapper, context, request)
def test_add_view_with_view_config_and_view_defaults_doesnt_conflict(self):
from pyramid.renderers import null_renderer
class view(object):
__view_defaults__ = {
'containment':'pyramid.tests.test_config.IDummy'
}
class view2(object):
__view_defaults__ = {
'containment':'pyramid.tests.test_config.IFactory'
}
config = self._makeOne(autocommit=False)
config.add_view(
view=view,
renderer=null_renderer)
config.add_view(
view=view2,
renderer=null_renderer)
config.commit() # does not raise
def test_add_view_with_view_config_and_view_defaults_conflicts(self):
from pyramid.renderers import null_renderer
class view(object):
__view_defaults__ = {
'containment':'pyramid.tests.test_config.IDummy'
}
class view2(object):
__view_defaults__ = {
'containment':'pyramid.tests.test_config.IDummy'
}
config = self._makeOne(autocommit=False)
config.add_view(
view=view,
renderer=null_renderer)
config.add_view(
view=view2,
renderer=null_renderer)
self.assertRaises(ConfigurationConflictError, config.commit)
def test_derive_view_function(self):
from pyramid.renderers import null_renderer
def view(request):
return 'OK'
config = self._makeOne()
result = config.derive_view(view, renderer=null_renderer)
self.assertFalse(result is view)
self.assertEqual(result(None, None), 'OK')
def test_derive_view_dottedname(self):
from pyramid.renderers import null_renderer
config = self._makeOne()
result = config.derive_view(
'pyramid.tests.test_config.dummy_view',
renderer=null_renderer)
self.assertFalse(result is dummy_view)
self.assertEqual(result(None, None), 'OK')
def test_derive_view_with_default_renderer_no_explicit_renderer(self):
config = self._makeOne()
class moo(object):
def __init__(self, view):
pass
def __call__(self, *arg, **kw):
return 'moo'
config.add_renderer(None, moo)
config.commit()
def view(request):
return 'OK'
result = config.derive_view(view)
self.assertFalse(result is view)
self.assertEqual(result(None, None).body, b'moo')
def test_derive_view_with_default_renderer_with_explicit_renderer(self):
class moo(object): pass
class foo(object):
def __init__(self, view):
pass
def __call__(self, *arg, **kw):
return b'foo'
def view(request):
return 'OK'
config = self._makeOne()
config.add_renderer(None, moo)
config.add_renderer('foo', foo)
config.commit()
result = config.derive_view(view, renderer='foo')
self.assertFalse(result is view)
request = self._makeRequest(config)
self.assertEqual(result(None, request).body, b'foo')
def test_add_static_view_here_no_utility_registered(self):
from pyramid.renderers import null_renderer
from zope.interface import Interface
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
config = self._makeOne(autocommit=True)
config.add_static_view('static', 'files', renderer=null_renderer)
request_type = self._getRouteRequestIface(config, '__static/')
self._assertRoute(config, '__static/', 'static/*subpath')
wrapped = config.registry.adapters.lookup(
(IViewClassifier, request_type, Interface), IView, name='')
from pyramid.request import Request
request = Request.blank('/static/minimal.txt')
request.subpath = ('minimal.txt', )
result = wrapped(None, request)
self.assertEqual(result.status, '200 OK')
self.assertTrue(result.body.startswith(b'<div'))
def test_add_static_view_package_relative(self):
from pyramid.interfaces import IStaticURLInfo
info = DummyStaticURLInfo()
config = self._makeOne(autocommit=True)
config.registry.registerUtility(info, IStaticURLInfo)
config.add_static_view('static',
'pyramid.tests.test_config:files')
self.assertEqual(
info.added,
[(config, 'static', 'pyramid.tests.test_config:files', {})])
def test_add_static_view_package_here_relative(self):
from pyramid.interfaces import IStaticURLInfo
info = DummyStaticURLInfo()
config = self._makeOne(autocommit=True)
config.registry.registerUtility(info, IStaticURLInfo)
config.add_static_view('static', 'files')
self.assertEqual(
info.added,
[(config, 'static', 'pyramid.tests.test_config:files', {})])
def test_add_static_view_absolute(self):
import os
from pyramid.interfaces import IStaticURLInfo
info = DummyStaticURLInfo()
config = self._makeOne(autocommit=True)
config.registry.registerUtility(info, IStaticURLInfo)
here = os.path.dirname(__file__)
static_path = os.path.join(here, 'files')
config.add_static_view('static', static_path)
self.assertEqual(info.added,
[(config, 'static', static_path, {})])
def test_add_forbidden_view(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.httpexceptions import HTTPForbidden
config = self._makeOne(autocommit=True)
view = lambda *arg: 'OK'
config.add_forbidden_view(view, renderer=null_renderer)
request = self._makeRequest(config)
view = self._getViewCallable(config,
ctx_iface=implementedBy(HTTPForbidden),
request_iface=IRequest)
result = view(None, request)
self.assertEqual(result, 'OK')
def test_add_forbidden_view_allows_other_predicates(self):
from pyramid.renderers import null_renderer
config = self._makeOne(autocommit=True)
# doesnt blow up
config.add_view_predicate('dummy', DummyPredicate)
config.add_forbidden_view(renderer=null_renderer, dummy='abc')
def test_add_forbidden_view_disallows_name(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_forbidden_view, name='foo')
def test_add_forbidden_view_disallows_permission(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_forbidden_view, permission='foo')
def test_add_forbidden_view_disallows_context(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_forbidden_view, context='foo')
def test_add_forbidden_view_disallows_for_(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_forbidden_view, for_='foo')
def test_add_forbidden_view_disallows_http_cache(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_forbidden_view, http_cache='foo')
def test_add_notfound_view(self):
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.httpexceptions import HTTPNotFound
config = self._makeOne(autocommit=True)
view = lambda *arg: arg
config.add_notfound_view(view, renderer=null_renderer)
request = self._makeRequest(config)
view = self._getViewCallable(config,
ctx_iface=implementedBy(HTTPNotFound),
request_iface=IRequest)
result = view(None, request)
self.assertEqual(result, (None, request))
def test_add_notfound_view_allows_other_predicates(self):
from pyramid.renderers import null_renderer
config = self._makeOne(autocommit=True)
# doesnt blow up
config.add_view_predicate('dummy', DummyPredicate)
config.add_notfound_view(renderer=null_renderer, dummy='abc')
def test_add_notfound_view_disallows_name(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_notfound_view, name='foo')
def test_add_notfound_view_disallows_permission(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_notfound_view, permission='foo')
def test_add_notfound_view_disallows_context(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_notfound_view, context='foo')
def test_add_notfound_view_disallows_for_(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_notfound_view, for_='foo')
def test_add_notfound_view_disallows_http_cache(self):
config = self._makeOne(autocommit=True)
self.assertRaises(ConfigurationError,
config.add_notfound_view, http_cache='foo')
def test_add_notfound_view_append_slash(self):
from pyramid.response import Response
from pyramid.renderers import null_renderer
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.httpexceptions import HTTPNotFound
config = self._makeOne(autocommit=True)
config.add_route('foo', '/foo/')
def view(request): return Response('OK')
config.add_notfound_view(view, renderer=null_renderer,append_slash=True)
request = self._makeRequest(config)
request.environ['PATH_INFO'] = '/foo'
request.query_string = 'a=1&b=2'
request.path = '/scriptname/foo'
view = self._getViewCallable(config,
ctx_iface=implementedBy(HTTPNotFound),
request_iface=IRequest)
result = view(None, request)
self.assertEqual(result.location, '/scriptname/foo/?a=1&b=2')
# Since Python 3 has to be all cool and fancy and different...
def _assertBody(self, response, value):
from pyramid.compat import text_type
if isinstance(value, text_type): # pragma: nocover
self.assertEqual(response.text, value)
else: # pragma: nocover
self.assertEqual(response.body, value)
def test_add_notfound_view_with_renderer(self):
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.httpexceptions import HTTPNotFound
config = self._makeOne(autocommit=True)
view = lambda *arg: {}
config.introspection = False
config.add_notfound_view(
view,
renderer='json')
request = self._makeRequest(config)
view = self._getViewCallable(config,
ctx_iface=implementedBy(HTTPNotFound),
request_iface=IRequest)
result = view(None, request)
self._assertBody(result, '{}')
def test_add_forbidden_view_with_renderer(self):
from zope.interface import implementedBy
from pyramid.interfaces import IRequest
from pyramid.httpexceptions import HTTPForbidden
config = self._makeOne(autocommit=True)
view = lambda *arg: {}
config.introspection = False
config.add_forbidden_view(
view,
renderer='json')
request = self._makeRequest(config)
view = self._getViewCallable(config,
ctx_iface=implementedBy(HTTPForbidden),
request_iface=IRequest)
result = view(None, request)
self._assertBody(result, '{}')
def test_set_view_mapper(self):
from pyramid.interfaces import IViewMapperFactory
config = self._makeOne(autocommit=True)
mapper = object()
config.set_view_mapper(mapper)
result = config.registry.getUtility(IViewMapperFactory)
self.assertEqual(result, mapper)
def test_set_view_mapper_dottedname(self):
from pyramid.interfaces import IViewMapperFactory
config = self._makeOne(autocommit=True)
config.set_view_mapper('pyramid.tests.test_config')
result = config.registry.getUtility(IViewMapperFactory)
from pyramid.tests import test_config
self.assertEqual(result, test_config)
class Test_requestonly(unittest.TestCase):
def _callFUT(self, view, attr=None):
from pyramid.config.views import requestonly
return requestonly(view, attr=attr)
def test_defaults(self):
def aview(request, a=1, b=2): pass
self.assertTrue(self._callFUT(aview))
def test_otherattr(self):
class AView(object):
def __init__(self, request, a=1, b=2): pass
def bleh(self): pass
self.assertTrue(self._callFUT(AView, 'bleh'))
class Test_isexception(unittest.TestCase):
def _callFUT(self, ob):
from pyramid.config.views import isexception
return isexception(ob)
def test_is_exception_instance(self):
class E(Exception):
pass
e = E()
self.assertEqual(self._callFUT(e), True)
def test_is_exception_class(self):
class E(Exception):
pass
self.assertEqual(self._callFUT(E), True)
def test_is_IException(self):
from pyramid.interfaces import IException
self.assertEqual(self._callFUT(IException), True)
def test_is_IException_subinterface(self):
from pyramid.interfaces import IException
class ISubException(IException):
pass
self.assertEqual(self._callFUT(ISubException), True)
class TestMultiView(unittest.TestCase):
def _getTargetClass(self):
from pyramid.config.views import MultiView
return MultiView
def _makeOne(self, name='name'):
return self._getTargetClass()(name)
def test_class_implements_ISecuredView(self):
from zope.interface.verify import verifyClass
from pyramid.interfaces import ISecuredView
verifyClass(ISecuredView, self._getTargetClass())
def test_instance_implements_ISecuredView(self):
from zope.interface.verify import verifyObject
from pyramid.interfaces import ISecuredView
verifyObject(ISecuredView, self._makeOne())
def test_add(self):
mv = self._makeOne()
mv.add('view', 100)
self.assertEqual(mv.views, [(100, 'view', None)])
mv.add('view2', 99)
self.assertEqual(mv.views, [(99, 'view2', None), (100, 'view', None)])
mv.add('view3', 100, 'text/html')
self.assertEqual(mv.media_views['text/html'], [(100, 'view3', None)])
mv.add('view4', 99, 'text/html', 'abc')
self.assertEqual(mv.media_views['text/html'],
[(99, 'view4', 'abc'), (100, 'view3', None)])
mv.add('view5', 100, 'text/xml')
self.assertEqual(mv.media_views['text/xml'], [(100, 'view5', None)])
self.assertEqual(set(mv.accepts), set(['text/xml', 'text/html']))
self.assertEqual(mv.views, [(99, 'view2', None), (100, 'view', None)])
mv.add('view6', 98, 'text/*')
self.assertEqual(mv.views, [(98, 'view6', None),
(99, 'view2', None),
(100, 'view', None)])
def test_add_with_phash(self):
mv = self._makeOne()
mv.add('view', 100, phash='abc')
self.assertEqual(mv.views, [(100, 'view', 'abc')])
mv.add('view', 100, phash='abc')
self.assertEqual(mv.views, [(100, 'view', 'abc')])
mv.add('view', 100, phash='def')
self.assertEqual(mv.views, [(100, 'view', 'abc'),
(100, 'view', 'def')])
mv.add('view', 100, phash='abc')
self.assertEqual(mv.views, [(100, 'view', 'abc'),
(100, 'view', 'def')])
def test_add_with_phash_override_accept(self):
mv = self._makeOne()
def view1(): pass
def view2(): pass
def view3(): pass
mv.add(view1, 100, accept='text/html', phash='abc')
mv.add(view2, 100, accept='text/html', phash='abc')
mv.add(view3, 99, accept='text/html', phash='def')
self.assertEqual(mv.media_views['text/html'],
[(99, view3, 'def'), (100, view2, 'abc')])
def test_add_with_phash_override_accept2(self):
mv = self._makeOne()
def view1(): pass
def view2(): pass
def view3(): pass
mv.add(view1, 100, accept='text/html', phash='abc')
mv.add(view2, 100, accept='text/html', phash='def')
mv.add(view3, 99, accept='text/html', phash='ghi')
self.assertEqual(mv.media_views['text/html'],
[(99, view3, 'ghi'),
(100, view1, 'abc'),
(100, view2, 'def')]
)
def test_multiple_with_functions_as_views(self):
# this failed on py3 at one point, because functions aren't orderable
# and we were sorting the views via a plain sort() rather than
# sort(key=itemgetter(0)).
def view1(request): pass
def view2(request): pass
mv = self._makeOne()
mv.add(view1, 100, None)
self.assertEqual(mv.views, [(100, view1, None)])
mv.add(view2, 100, None)
self.assertEqual(mv.views, [(100, view1, None), (100, view2, None)])
def test_get_views_request_has_no_accept(self):
request = DummyRequest()
mv = self._makeOne()
mv.views = [(99, lambda *arg: None)]
self.assertEqual(mv.get_views(request), mv.views)
def test_get_views_no_self_accepts(self):
request = DummyRequest()
request.accept = True
mv = self._makeOne()
mv.accepts = []
mv.views = [(99, lambda *arg: None)]
self.assertEqual(mv.get_views(request), mv.views)
def test_get_views(self):
request = DummyRequest()
request.accept = DummyAccept('text/html')
mv = self._makeOne()
mv.accepts = ['text/html']
mv.views = [(99, lambda *arg: None)]
html_views = [(98, lambda *arg: None)]
mv.media_views['text/html'] = html_views
self.assertEqual(mv.get_views(request), html_views + mv.views)
def test_get_views_best_match_returns_None(self):
request = DummyRequest()
request.accept = DummyAccept(None)
mv = self._makeOne()
mv.accepts = ['text/html']
mv.views = [(99, lambda *arg: None)]
self.assertEqual(mv.get_views(request), mv.views)
def test_match_not_found(self):
from pyramid.httpexceptions import HTTPNotFound
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
self.assertRaises(HTTPNotFound, mv.match, context, request)
def test_match_predicate_fails(self):
from pyramid.httpexceptions import HTTPNotFound
mv = self._makeOne()
def view(context, request):
""" """
view.__predicated__ = lambda *arg: False
mv.views = [(100, view, None)]
context = DummyContext()
request = DummyRequest()
self.assertRaises(HTTPNotFound, mv.match, context, request)
def test_match_predicate_succeeds(self):
mv = self._makeOne()
def view(context, request):
""" """
view.__predicated__ = lambda *arg: True
mv.views = [(100, view, None)]
context = DummyContext()
request = DummyRequest()
result = mv.match(context, request)
self.assertEqual(result, view)
def test_permitted_no_views(self):
from pyramid.httpexceptions import HTTPNotFound
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
self.assertRaises(HTTPNotFound, mv.__permitted__, context, request)
def test_permitted_no_match_with__permitted__(self):
mv = self._makeOne()
def view(context, request):
""" """
mv.views = [(100, view, None)]
self.assertEqual(mv.__permitted__(None, None), True)
def test_permitted(self):
mv = self._makeOne()
def view(context, request):
""" """
def permitted(context, request):
return False
view.__permitted__ = permitted
mv.views = [(100, view, None)]
context = DummyContext()
request = DummyRequest()
result = mv.__permitted__(context, request)
self.assertEqual(result, False)
def test__call__not_found(self):
from pyramid.httpexceptions import HTTPNotFound
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
self.assertRaises(HTTPNotFound, mv, context, request)
def test___call__intermediate_not_found(self):
from pyramid.exceptions import PredicateMismatch
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
request.view_name = ''
expected_response = DummyResponse()
def view1(context, request):
raise PredicateMismatch
def view2(context, request):
return expected_response
mv.views = [(100, view1, None), (99, view2, None)]
response = mv(context, request)
self.assertEqual(response, expected_response)
def test___call__raise_not_found_isnt_interpreted_as_pred_mismatch(self):
from pyramid.httpexceptions import HTTPNotFound
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
request.view_name = ''
def view1(context, request):
raise HTTPNotFound
def view2(context, request):
""" """
mv.views = [(100, view1, None), (99, view2, None)]
self.assertRaises(HTTPNotFound, mv, context, request)
def test___call__(self):
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
request.view_name = ''
expected_response = DummyResponse()
def view(context, request):
return expected_response
mv.views = [(100, view, None)]
response = mv(context, request)
self.assertEqual(response, expected_response)
def test__call_permissive__not_found(self):
from pyramid.httpexceptions import HTTPNotFound
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
self.assertRaises(HTTPNotFound, mv, context, request)
def test___call_permissive_has_call_permissive(self):
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
request.view_name = ''
expected_response = DummyResponse()
def view(context, request):
""" """
def permissive(context, request):
return expected_response
view.__call_permissive__ = permissive
mv.views = [(100, view, None)]
response = mv.__call_permissive__(context, request)
self.assertEqual(response, expected_response)
def test___call_permissive_has_no_call_permissive(self):
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
request.view_name = ''
expected_response = DummyResponse()
def view(context, request):
return expected_response
mv.views = [(100, view, None)]
response = mv.__call_permissive__(context, request)
self.assertEqual(response, expected_response)
def test__call__with_accept_match(self):
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
request.accept = DummyAccept('text/html', 'text/xml')
expected_response = DummyResponse()
def view(context, request):
return expected_response
mv.views = [(100, None)]
mv.media_views['text/xml'] = [(100, view, None)]
mv.accepts = ['text/xml']
response = mv(context, request)
self.assertEqual(response, expected_response)
def test__call__with_accept_miss(self):
mv = self._makeOne()
context = DummyContext()
request = DummyRequest()
request.accept = DummyAccept('text/plain', 'text/html')
expected_response = DummyResponse()
def view(context, request):
return expected_response
mv.views = [(100, view, None)]
mv.media_views['text/xml'] = [(100, None, None)]
mv.accepts = ['text/xml']
response = mv(context, request)
self.assertEqual(response, expected_response)
class TestViewDeriver(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
self.config = None
def _makeOne(self, **kw):
kw['registry'] = self.config.registry
from pyramid.config.views import ViewDeriver
return ViewDeriver(**kw)
def _makeRequest(self):
request = DummyRequest()
request.registry = self.config.registry
return request
def _registerLogger(self):
from pyramid.interfaces import IDebugLogger
logger = DummyLogger()
self.config.registry.registerUtility(logger, IDebugLogger)
return logger
def _registerSecurityPolicy(self, permissive):
from pyramid.interfaces import IAuthenticationPolicy
from pyramid.interfaces import IAuthorizationPolicy
policy = DummySecurityPolicy(permissive)
self.config.registry.registerUtility(policy, IAuthenticationPolicy)
self.config.registry.registerUtility(policy, IAuthorizationPolicy)
def test_function_returns_non_adaptable(self):
def view(request):
return None
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
try:
result(None, None)
except ValueError as e:
self.assertEqual(
e.args[0],
'Could not convert return value of the view callable function '
'pyramid.tests.test_config.test_views.view into a response '
'object. The value returned was None. You may have forgotten '
'to return a value from the view callable.'
)
else: # pragma: no cover
raise AssertionError
def test_function_returns_non_adaptable_dict(self):
def view(request):
return {'a':1}
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
try:
result(None, None)
except ValueError as e:
self.assertEqual(
e.args[0],
"Could not convert return value of the view callable function "
"pyramid.tests.test_config.test_views.view into a response "
"object. The value returned was {'a': 1}. You may have "
"forgotten to define a renderer in the view configuration."
)
else: # pragma: no cover
raise AssertionError
def test_instance_returns_non_adaptable(self):
class AView(object):
def __call__(self, request):
return None
view = AView()
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
try:
result(None, None)
except ValueError as e:
msg = e.args[0]
self.assertTrue(msg.startswith(
'Could not convert return value of the view callable object '
'<pyramid.tests.test_config.test_views.'))
self.assertTrue(msg.endswith(
'> into a response object. The value returned was None. You '
'may have forgotten to return a value from the view callable.'))
else: # pragma: no cover
raise AssertionError
def test_function_returns_true_Response_no_renderer(self):
from pyramid.response import Response
r = Response('Hello')
def view(request):
return r
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
response = result(None, None)
self.assertEqual(response, r)
def test_function_returns_true_Response_with_renderer(self):
from pyramid.response import Response
r = Response('Hello')
def view(request):
return r
renderer = object()
deriver = self._makeOne(renderer=renderer)
result = deriver(view)
self.assertFalse(result is view)
response = result(None, None)
self.assertEqual(response, r)
def test_requestonly_default_method_returns_non_adaptable(self):
request = DummyRequest()
class AView(object):
def __init__(self, request):
pass
def __call__(self):
return None
deriver = self._makeOne()
result = deriver(AView)
self.assertFalse(result is AView)
try:
result(None, request)
except ValueError as e:
self.assertEqual(
e.args[0],
'Could not convert return value of the view callable '
'method __call__ of '
'class pyramid.tests.test_config.test_views.AView into a '
'response object. The value returned was None. You may have '
'forgotten to return a value from the view callable.'
)
else: # pragma: no cover
raise AssertionError
def test_requestonly_nondefault_method_returns_non_adaptable(self):
request = DummyRequest()
class AView(object):
def __init__(self, request):
pass
def theviewmethod(self):
return None
deriver = self._makeOne(attr='theviewmethod')
result = deriver(AView)
self.assertFalse(result is AView)
try:
result(None, request)
except ValueError as e:
self.assertEqual(
e.args[0],
'Could not convert return value of the view callable '
'method theviewmethod of '
'class pyramid.tests.test_config.test_views.AView into a '
'response object. The value returned was None. You may have '
'forgotten to return a value from the view callable.'
)
else: # pragma: no cover
raise AssertionError
def test_requestonly_function(self):
response = DummyResponse()
def view(request):
return response
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(result(None, None), response)
def test_requestonly_function_with_renderer(self):
response = DummyResponse()
class moo(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, 'OK')
self.assertEqual(view_inst, view)
self.assertEqual(ctx, context)
return response
def view(request):
return 'OK'
deriver = self._makeOne(renderer=moo())
result = deriver(view)
self.assertFalse(result.__wraps__ is view)
request = self._makeRequest()
context = testing.DummyResource()
self.assertEqual(result(context, request), response)
def test_requestonly_function_with_renderer_request_override(self):
def moo(info):
def inner(value, system):
self.assertEqual(value, 'OK')
self.assertEqual(system['request'], request)
self.assertEqual(system['context'], context)
return b'moo'
return inner
def view(request):
return 'OK'
self.config.add_renderer('moo', moo)
deriver = self._makeOne(renderer='string')
result = deriver(view)
self.assertFalse(result is view)
request = self._makeRequest()
request.override_renderer = 'moo'
context = testing.DummyResource()
self.assertEqual(result(context, request).body, b'moo')
def test_requestonly_function_with_renderer_request_has_view(self):
response = DummyResponse()
class moo(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, 'OK')
self.assertEqual(view_inst, 'view')
self.assertEqual(ctx, context)
return response
def view(request):
return 'OK'
deriver = self._makeOne(renderer=moo())
result = deriver(view)
self.assertFalse(result.__wraps__ is view)
request = self._makeRequest()
request.__view__ = 'view'
context = testing.DummyResource()
r = result(context, request)
self.assertEqual(r, response)
self.assertFalse(hasattr(request, '__view__'))
def test_class_without_attr(self):
response = DummyResponse()
class View(object):
def __init__(self, request):
pass
def __call__(self):
return response
deriver = self._makeOne()
result = deriver(View)
request = self._makeRequest()
self.assertEqual(result(None, request), response)
self.assertEqual(request.__view__.__class__, View)
def test_class_with_attr(self):
response = DummyResponse()
class View(object):
def __init__(self, request):
pass
def another(self):
return response
deriver = self._makeOne(attr='another')
result = deriver(View)
request = self._makeRequest()
self.assertEqual(result(None, request), response)
self.assertEqual(request.__view__.__class__, View)
def test_as_function_context_and_request(self):
def view(context, request):
return 'OK'
deriver = self._makeOne()
result = deriver(view)
self.assertTrue(result.__wraps__ is view)
self.assertFalse(hasattr(result, '__call_permissive__'))
self.assertEqual(view(None, None), 'OK')
def test_as_function_requestonly(self):
response = DummyResponse()
def view(request):
return response
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
self.assertEqual(result(None, None), response)
def test_as_newstyle_class_context_and_request(self):
response = DummyResponse()
class view(object):
def __init__(self, context, request):
pass
def __call__(self):
return response
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
self.assertEqual(result(None, request), response)
self.assertEqual(request.__view__.__class__, view)
def test_as_newstyle_class_requestonly(self):
response = DummyResponse()
class view(object):
def __init__(self, context, request):
pass
def __call__(self):
return response
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
self.assertEqual(result(None, request), response)
self.assertEqual(request.__view__.__class__, view)
def test_as_oldstyle_class_context_and_request(self):
response = DummyResponse()
class view:
def __init__(self, context, request):
pass
def __call__(self):
return response
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
self.assertEqual(result(None, request), response)
self.assertEqual(request.__view__.__class__, view)
def test_as_oldstyle_class_requestonly(self):
response = DummyResponse()
class view:
def __init__(self, context, request):
pass
def __call__(self):
return response
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
self.assertEqual(result(None, request), response)
self.assertEqual(request.__view__.__class__, view)
def test_as_instance_context_and_request(self):
response = DummyResponse()
class View:
def __call__(self, context, request):
return response
view = View()
deriver = self._makeOne()
result = deriver(view)
self.assertTrue(result.__wraps__ is view)
self.assertFalse(hasattr(result, '__call_permissive__'))
self.assertEqual(result(None, None), response)
def test_as_instance_requestonly(self):
response = DummyResponse()
class View:
def __call__(self, request):
return response
view = View()
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertTrue('test_views' in result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
self.assertEqual(result(None, None), response)
def test_with_debug_authorization_no_authpol(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(
debug_authorization=True, reload_templates=True)
logger = self._registerLogger()
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url url (view name "
"'view_name' against context None): Allowed "
"(no authorization policy in use)")
def test_with_debug_authorization_authn_policy_no_authz_policy(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(debug_authorization=True)
from pyramid.interfaces import IAuthenticationPolicy
policy = DummySecurityPolicy(False)
self.config.registry.registerUtility(policy, IAuthenticationPolicy)
logger = self._registerLogger()
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url url (view name "
"'view_name' against context None): Allowed "
"(no authorization policy in use)")
def test_with_debug_authorization_authz_policy_no_authn_policy(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(debug_authorization=True)
from pyramid.interfaces import IAuthorizationPolicy
policy = DummySecurityPolicy(False)
self.config.registry.registerUtility(policy, IAuthorizationPolicy)
logger = self._registerLogger()
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url url (view name "
"'view_name' against context None): Allowed "
"(no authorization policy in use)")
def test_with_debug_authorization_no_permission(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(
debug_authorization=True, reload_templates=True)
self._registerSecurityPolicy(True)
logger = self._registerLogger()
deriver = self._makeOne()
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url url (view name "
"'view_name' against context None): Allowed ("
"no permission registered)")
def test_debug_auth_permission_authpol_permitted(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(
debug_authorization=True, reload_templates=True)
logger = self._registerLogger()
self._registerSecurityPolicy(True)
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertEqual(result.__call_permissive__.__wraps__, view)
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url url (view name "
"'view_name' against context None): True")
def test_debug_auth_permission_authpol_permitted_no_request(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(
debug_authorization=True, reload_templates=True)
logger = self._registerLogger()
self._registerSecurityPolicy(True)
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertEqual(result.__call_permissive__.__wraps__, view)
self.assertEqual(result(None, None), response)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url None (view name "
"None against context None): True")
def test_debug_auth_permission_authpol_denied(self):
from pyramid.httpexceptions import HTTPForbidden
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(
debug_authorization=True, reload_templates=True)
logger = self._registerLogger()
self._registerSecurityPolicy(False)
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertEqual(result.__call_permissive__.__wraps__, view)
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertRaises(HTTPForbidden, result, None, request)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url url (view name "
"'view_name' against context None): False")
def test_debug_auth_permission_authpol_denied2(self):
view = lambda *arg: 'OK'
self.config.registry.settings = dict(
debug_authorization=True, reload_templates=True)
self._registerLogger()
self._registerSecurityPolicy(False)
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
permitted = result.__permitted__(None, None)
self.assertEqual(permitted, False)
def test_debug_auth_permission_authpol_overridden(self):
from pyramid.security import NO_PERMISSION_REQUIRED
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = dict(
debug_authorization=True, reload_templates=True)
logger = self._registerLogger()
self._registerSecurityPolicy(False)
deriver = self._makeOne(permission=NO_PERMISSION_REQUIRED)
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
self.assertEqual(len(logger.messages), 1)
self.assertEqual(logger.messages[0],
"debug_authorization of url url (view name "
"'view_name' against context None): "
"Allowed (NO_PERMISSION_REQUIRED)")
def test_secured_view_authn_policy_no_authz_policy(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = {}
from pyramid.interfaces import IAuthenticationPolicy
policy = DummySecurityPolicy(False)
self.config.registry.registerUtility(policy, IAuthenticationPolicy)
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
def test_secured_view_authz_policy_no_authn_policy(self):
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = {}
from pyramid.interfaces import IAuthorizationPolicy
policy = DummySecurityPolicy(False)
self.config.registry.registerUtility(policy, IAuthorizationPolicy)
deriver = self._makeOne(permission='view')
result = deriver(view)
self.assertEqual(view.__module__, result.__module__)
self.assertEqual(view.__doc__, result.__doc__)
self.assertEqual(view.__name__, result.__name__)
self.assertFalse(hasattr(result, '__call_permissive__'))
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
self.assertEqual(result(None, request), response)
def test_secured_view_raises_forbidden_no_name(self):
from pyramid.interfaces import IAuthenticationPolicy
from pyramid.interfaces import IAuthorizationPolicy
from pyramid.httpexceptions import HTTPForbidden
response = DummyResponse()
view = lambda *arg: response
self.config.registry.settings = {}
policy = DummySecurityPolicy(False)
self.config.registry.registerUtility(policy, IAuthenticationPolicy)
self.config.registry.registerUtility(policy, IAuthorizationPolicy)
deriver = self._makeOne(permission='view')
result = deriver(view)
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
try:
result(None, request)
except HTTPForbidden as e:
self.assertEqual(e.message,
'Unauthorized: <lambda> failed permission check')
else: # pragma: no cover
raise AssertionError
def test_secured_view_raises_forbidden_with_name(self):
from pyramid.interfaces import IAuthenticationPolicy
from pyramid.interfaces import IAuthorizationPolicy
from pyramid.httpexceptions import HTTPForbidden
def myview(request): pass
self.config.registry.settings = {}
policy = DummySecurityPolicy(False)
self.config.registry.registerUtility(policy, IAuthenticationPolicy)
self.config.registry.registerUtility(policy, IAuthorizationPolicy)
deriver = self._makeOne(permission='view')
result = deriver(myview)
request = self._makeRequest()
request.view_name = 'view_name'
request.url = 'url'
try:
result(None, request)
except HTTPForbidden as e:
self.assertEqual(e.message,
'Unauthorized: myview failed permission check')
else: # pragma: no cover
raise AssertionError
def test_predicate_mismatch_view_has_no_name(self):
from pyramid.exceptions import PredicateMismatch
response = DummyResponse()
view = lambda *arg: response
def predicate1(context, request):
return False
predicate1.text = lambda *arg: 'text'
deriver = self._makeOne(predicates=[predicate1])
result = deriver(view)
request = self._makeRequest()
request.method = 'POST'
try:
result(None, None)
except PredicateMismatch as e:
self.assertEqual(e.detail,
'predicate mismatch for view <lambda> (text)')
else: # pragma: no cover
raise AssertionError
def test_predicate_mismatch_view_has_name(self):
from pyramid.exceptions import PredicateMismatch
def myview(request): pass
def predicate1(context, request):
return False
predicate1.text = lambda *arg: 'text'
deriver = self._makeOne(predicates=[predicate1])
result = deriver(myview)
request = self._makeRequest()
request.method = 'POST'
try:
result(None, None)
except PredicateMismatch as e:
self.assertEqual(e.detail,
'predicate mismatch for view myview (text)')
else: # pragma: no cover
raise AssertionError
def test_predicate_mismatch_exception_has_text_in_detail(self):
from pyramid.exceptions import PredicateMismatch
def myview(request): pass
def predicate1(context, request):
return True
predicate1.text = lambda *arg: 'pred1'
def predicate2(context, request):
return False
predicate2.text = lambda *arg: 'pred2'
deriver = self._makeOne(predicates=[predicate1, predicate2])
result = deriver(myview)
request = self._makeRequest()
request.method = 'POST'
try:
result(None, None)
except PredicateMismatch as e:
self.assertEqual(e.detail,
'predicate mismatch for view myview (pred2)')
else: # pragma: no cover
raise AssertionError
def test_with_predicates_all(self):
response = DummyResponse()
view = lambda *arg: response
predicates = []
def predicate1(context, request):
predicates.append(True)
return True
def predicate2(context, request):
predicates.append(True)
return True
deriver = self._makeOne(predicates=[predicate1, predicate2])
result = deriver(view)
request = self._makeRequest()
request.method = 'POST'
next = result(None, None)
self.assertEqual(next, response)
self.assertEqual(predicates, [True, True])
def test_with_predicates_checker(self):
view = lambda *arg: 'OK'
predicates = []
def predicate1(context, request):
predicates.append(True)
return True
def predicate2(context, request):
predicates.append(True)
return True
deriver = self._makeOne(predicates=[predicate1, predicate2])
result = deriver(view)
request = self._makeRequest()
request.method = 'POST'
next = result.__predicated__(None, None)
self.assertEqual(next, True)
self.assertEqual(predicates, [True, True])
def test_with_predicates_notall(self):
from pyramid.httpexceptions import HTTPNotFound
view = lambda *arg: 'OK'
predicates = []
def predicate1(context, request):
predicates.append(True)
return True
predicate1.text = lambda *arg: 'text'
def predicate2(context, request):
predicates.append(True)
return False
predicate2.text = lambda *arg: 'text'
deriver = self._makeOne(predicates=[predicate1, predicate2])
result = deriver(view)
request = self._makeRequest()
request.method = 'POST'
self.assertRaises(HTTPNotFound, result, None, None)
self.assertEqual(predicates, [True, True])
def test_with_wrapper_viewname(self):
from pyramid.response import Response
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
inner_response = Response('OK')
def inner_view(context, request):
return inner_response
def outer_view(context, request):
self.assertEqual(request.wrapped_response, inner_response)
self.assertEqual(request.wrapped_body, inner_response.body)
self.assertEqual(request.wrapped_view.__original_view__,
inner_view)
return Response(b'outer ' + request.wrapped_body)
self.config.registry.registerAdapter(
outer_view, (IViewClassifier, None, None), IView, 'owrap')
deriver = self._makeOne(viewname='inner',
wrapper_viewname='owrap')
result = deriver(inner_view)
self.assertFalse(result is inner_view)
self.assertEqual(inner_view.__module__, result.__module__)
self.assertEqual(inner_view.__doc__, result.__doc__)
request = self._makeRequest()
response = result(None, request)
self.assertEqual(response.body, b'outer OK')
def test_with_wrapper_viewname_notfound(self):
from pyramid.response import Response
inner_response = Response('OK')
def inner_view(context, request):
return inner_response
deriver = self._makeOne(viewname='inner', wrapper_viewname='owrap')
wrapped = deriver(inner_view)
request = self._makeRequest()
self.assertRaises(ValueError, wrapped, None, request)
def test_as_newstyle_class_context_and_request_attr_and_renderer(self):
response = DummyResponse()
class renderer(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, {'a':'1'})
self.assertEqual(view_inst.__class__, View)
self.assertEqual(ctx, context)
return response
class View(object):
def __init__(self, context, request):
pass
def index(self):
return {'a':'1'}
deriver = self._makeOne(renderer=renderer(), attr='index')
result = deriver(View)
self.assertFalse(result is View)
self.assertEqual(result.__module__, View.__module__)
self.assertEqual(result.__doc__, View.__doc__)
self.assertEqual(result.__name__, View.__name__)
request = self._makeRequest()
context = testing.DummyResource()
self.assertEqual(result(context, request), response)
def test_as_newstyle_class_requestonly_attr_and_renderer(self):
response = DummyResponse()
class renderer(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, {'a':'1'})
self.assertEqual(view_inst.__class__, View)
self.assertEqual(ctx, context)
return response
class View(object):
def __init__(self, request):
pass
def index(self):
return {'a':'1'}
deriver = self._makeOne(renderer=renderer(), attr='index')
result = deriver(View)
self.assertFalse(result is View)
self.assertEqual(result.__module__, View.__module__)
self.assertEqual(result.__doc__, View.__doc__)
self.assertEqual(result.__name__, View.__name__)
request = self._makeRequest()
context = testing.DummyResource()
self.assertEqual(result(context, request), response)
def test_as_oldstyle_cls_context_request_attr_and_renderer(self):
response = DummyResponse()
class renderer(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, {'a':'1'})
self.assertEqual(view_inst.__class__, View)
self.assertEqual(ctx, context)
return response
class View:
def __init__(self, context, request):
pass
def index(self):
return {'a':'1'}
deriver = self._makeOne(renderer=renderer(), attr='index')
result = deriver(View)
self.assertFalse(result is View)
self.assertEqual(result.__module__, View.__module__)
self.assertEqual(result.__doc__, View.__doc__)
self.assertEqual(result.__name__, View.__name__)
request = self._makeRequest()
context = testing.DummyResource()
self.assertEqual(result(context, request), response)
def test_as_oldstyle_cls_requestonly_attr_and_renderer(self):
response = DummyResponse()
class renderer(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, {'a':'1'})
self.assertEqual(view_inst.__class__, View)
self.assertEqual(ctx, context)
return response
class View:
def __init__(self, request):
pass
def index(self):
return {'a':'1'}
deriver = self._makeOne(renderer=renderer(), attr='index')
result = deriver(View)
self.assertFalse(result is View)
self.assertEqual(result.__module__, View.__module__)
self.assertEqual(result.__doc__, View.__doc__)
self.assertEqual(result.__name__, View.__name__)
request = self._makeRequest()
context = testing.DummyResource()
self.assertEqual(result(context, request), response)
def test_as_instance_context_and_request_attr_and_renderer(self):
response = DummyResponse()
class renderer(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, {'a':'1'})
self.assertEqual(view_inst, view)
self.assertEqual(ctx, context)
return response
class View:
def index(self, context, request):
return {'a':'1'}
deriver = self._makeOne(renderer=renderer(), attr='index')
view = View()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(result.__module__, view.__module__)
self.assertEqual(result.__doc__, view.__doc__)
request = self._makeRequest()
context = testing.DummyResource()
self.assertEqual(result(context, request), response)
def test_as_instance_requestonly_attr_and_renderer(self):
response = DummyResponse()
class renderer(object):
def render_view(inself, req, resp, view_inst, ctx):
self.assertEqual(req, request)
self.assertEqual(resp, {'a':'1'})
self.assertEqual(view_inst, view)
self.assertEqual(ctx, context)
return response
class View:
def index(self, request):
return {'a':'1'}
deriver = self._makeOne(renderer=renderer(), attr='index')
view = View()
result = deriver(view)
self.assertFalse(result is view)
self.assertEqual(result.__module__, view.__module__)
self.assertEqual(result.__doc__, view.__doc__)
request = self._makeRequest()
context = testing.DummyResource()
self.assertEqual(result(context, request), response)
def test_with_view_mapper_config_specified(self):
response = DummyResponse()
class mapper(object):
def __init__(self, **kw):
self.kw = kw
def __call__(self, view):
def wrapped(context, request):
return response
return wrapped
def view(context, request): return 'NOTOK'
deriver = self._makeOne(mapper=mapper)
result = deriver(view)
self.assertFalse(result.__wraps__ is view)
self.assertEqual(result(None, None), response)
def test_with_view_mapper_view_specified(self):
from pyramid.response import Response
response = Response()
def mapper(**kw):
def inner(view):
def superinner(context, request):
self.assertEqual(request, None)
return response
return superinner
return inner
def view(context, request): return 'NOTOK'
view.__view_mapper__ = mapper
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result.__wraps__ is view)
self.assertEqual(result(None, None), response)
def test_with_view_mapper_default_mapper_specified(self):
from pyramid.response import Response
response = Response()
def mapper(**kw):
def inner(view):
def superinner(context, request):
self.assertEqual(request, None)
return response
return superinner
return inner
self.config.set_view_mapper(mapper)
def view(context, request): return 'NOTOK'
deriver = self._makeOne()
result = deriver(view)
self.assertFalse(result.__wraps__ is view)
self.assertEqual(result(None, None), response)
def test_attr_wrapped_view_branching_default_phash(self):
from pyramid.config.util import DEFAULT_PHASH
def view(context, request): pass
deriver = self._makeOne(phash=DEFAULT_PHASH)
result = deriver(view)
self.assertEqual(result.__wraps__, view)
def test_attr_wrapped_view_branching_nondefault_phash(self):
def view(context, request): pass
deriver = self._makeOne(phash='nondefault')
result = deriver(view)
self.assertNotEqual(result, view)
def test_http_cached_view_integer(self):
import datetime
from pyramid.response import Response
response = Response('OK')
def inner_view(context, request):
return response
deriver = self._makeOne(http_cache=3600)
result = deriver(inner_view)
self.assertFalse(result is inner_view)
self.assertEqual(inner_view.__module__, result.__module__)
self.assertEqual(inner_view.__doc__, result.__doc__)
request = self._makeRequest()
when = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
result = result(None, request)
self.assertEqual(result, response)
headers = dict(result.headerlist)
expires = parse_httpdate(headers['Expires'])
assert_similar_datetime(expires, when)
self.assertEqual(headers['Cache-Control'], 'max-age=3600')
def test_http_cached_view_timedelta(self):
import datetime
from pyramid.response import Response
response = Response('OK')
def inner_view(context, request):
return response
deriver = self._makeOne(http_cache=datetime.timedelta(hours=1))
result = deriver(inner_view)
self.assertFalse(result is inner_view)
self.assertEqual(inner_view.__module__, result.__module__)
self.assertEqual(inner_view.__doc__, result.__doc__)
request = self._makeRequest()
when = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
result = result(None, request)
self.assertEqual(result, response)
headers = dict(result.headerlist)
expires = parse_httpdate(headers['Expires'])
assert_similar_datetime(expires, when)
self.assertEqual(headers['Cache-Control'], 'max-age=3600')
def test_http_cached_view_tuple(self):
import datetime
from pyramid.response import Response
response = Response('OK')
def inner_view(context, request):
return response
deriver = self._makeOne(http_cache=(3600, {'public':True}))
result = deriver(inner_view)
self.assertFalse(result is inner_view)
self.assertEqual(inner_view.__module__, result.__module__)
self.assertEqual(inner_view.__doc__, result.__doc__)
request = self._makeRequest()
when = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
result = result(None, request)
self.assertEqual(result, response)
headers = dict(result.headerlist)
expires = parse_httpdate(headers['Expires'])
assert_similar_datetime(expires, when)
self.assertEqual(headers['Cache-Control'], 'max-age=3600, public')
def test_http_cached_view_tuple_seconds_None(self):
from pyramid.response import Response
response = Response('OK')
def inner_view(context, request):
return response
deriver = self._makeOne(http_cache=(None, {'public':True}))
result = deriver(inner_view)
self.assertFalse(result is inner_view)
self.assertEqual(inner_view.__module__, result.__module__)
self.assertEqual(inner_view.__doc__, result.__doc__)
request = self._makeRequest()
result = result(None, request)
self.assertEqual(result, response)
headers = dict(result.headerlist)
self.assertFalse('Expires' in headers)
self.assertEqual(headers['Cache-Control'], 'public')
def test_http_cached_view_prevent_auto_set(self):
from pyramid.response import Response
response = Response()
response.cache_control.prevent_auto = True
def inner_view(context, request):
return response
deriver = self._makeOne(http_cache=3600)
result = deriver(inner_view)
request = self._makeRequest()
result = result(None, request)
self.assertEqual(result, response) # doesn't blow up
headers = dict(result.headerlist)
self.assertFalse('Expires' in headers)
self.assertFalse('Cache-Control' in headers)
def test_http_cached_prevent_http_cache_in_settings(self):
self.config.registry.settings['prevent_http_cache'] = True
from pyramid.response import Response
response = Response()
def inner_view(context, request):
return response
deriver = self._makeOne(http_cache=3600)
result = deriver(inner_view)
request = self._makeRequest()
result = result(None, request)
self.assertEqual(result, response)
headers = dict(result.headerlist)
self.assertFalse('Expires' in headers)
self.assertFalse('Cache-Control' in headers)
def test_http_cached_view_bad_tuple(self):
deriver = self._makeOne(http_cache=(None,))
def view(request): pass
self.assertRaises(ConfigurationError, deriver, view)
class TestDefaultViewMapper(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.registry = self.config.registry
def tearDown(self):
del self.registry
testing.tearDown()
def _makeOne(self, **kw):
from pyramid.config.views import DefaultViewMapper
kw['registry'] = self.registry
return DefaultViewMapper(**kw)
def _makeRequest(self):
request = DummyRequest()
request.registry = self.registry
return request
def test_view_as_function_context_and_request(self):
def view(context, request):
return 'OK'
mapper = self._makeOne()
result = mapper(view)
self.assertTrue(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test__view_as_function_with_attr(self):
def view(context, request):
""" """
mapper = self._makeOne(attr='__name__')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertRaises(TypeError, result, None, request)
def test_view_as_function_requestonly(self):
def view(request):
return 'OK'
mapper = self._makeOne()
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_function_requestonly_with_attr(self):
def view(request):
""" """
mapper = self._makeOne(attr='__name__')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertRaises(TypeError, result, None, request)
def test_view_as_newstyle_class_context_and_request(self):
class view(object):
def __init__(self, context, request):
pass
def __call__(self):
return 'OK'
mapper = self._makeOne()
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_newstyle_class_context_and_request_with_attr(self):
class view(object):
def __init__(self, context, request):
pass
def index(self):
return 'OK'
mapper = self._makeOne(attr='index')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_newstyle_class_requestonly(self):
class view(object):
def __init__(self, request):
pass
def __call__(self):
return 'OK'
mapper = self._makeOne()
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_newstyle_class_requestonly_with_attr(self):
class view(object):
def __init__(self, request):
pass
def index(self):
return 'OK'
mapper = self._makeOne(attr='index')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_oldstyle_class_context_and_request(self):
class view:
def __init__(self, context, request):
pass
def __call__(self):
return 'OK'
mapper = self._makeOne()
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_oldstyle_class_context_and_request_with_attr(self):
class view:
def __init__(self, context, request):
pass
def index(self):
return 'OK'
mapper = self._makeOne(attr='index')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_oldstyle_class_requestonly(self):
class view:
def __init__(self, request):
pass
def __call__(self):
return 'OK'
mapper = self._makeOne()
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_oldstyle_class_requestonly_with_attr(self):
class view:
def __init__(self, request):
pass
def index(self):
return 'OK'
mapper = self._makeOne(attr='index')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_instance_context_and_request(self):
class View:
def __call__(self, context, request):
return 'OK'
view = View()
mapper = self._makeOne()
result = mapper(view)
self.assertTrue(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_instance_context_and_request_and_attr(self):
class View:
def index(self, context, request):
return 'OK'
view = View()
mapper = self._makeOne(attr='index')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_instance_requestonly(self):
class View:
def __call__(self, request):
return 'OK'
view = View()
mapper = self._makeOne()
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
def test_view_as_instance_requestonly_with_attr(self):
class View:
def index(self, request):
return 'OK'
view = View()
mapper = self._makeOne(attr='index')
result = mapper(view)
self.assertFalse(result is view)
request = self._makeRequest()
self.assertEqual(result(None, request), 'OK')
class Test_preserve_view_attrs(unittest.TestCase):
def _callFUT(self, view, wrapped_view):
from pyramid.config.views import preserve_view_attrs
return preserve_view_attrs(view, wrapped_view)
def test_it_same(self):
def view(context, request):
""" """
result = self._callFUT(view, view)
self.assertTrue(result is view)
def test_it_view_is_None(self):
def view(context, request):
""" """
result = self._callFUT(None, view)
self.assertTrue(result is view)
def test_it_different_with_existing_original_view(self):
def view1(context, request): pass
view1.__original_view__ = 'abc'
def view2(context, request): pass
result = self._callFUT(view1, view2)
self.assertEqual(result.__original_view__, 'abc')
self.assertFalse(result is view1)
def test_it_different(self):
class DummyView1:
""" 1 """
__name__ = '1'
__module__ = '1'
def __call__(self, context, request):
""" """
def __call_permissive__(self, context, request):
""" """
def __predicated__(self, context, request):
""" """
def __permitted__(self, context, request):
""" """
class DummyView2:
""" 2 """
__name__ = '2'
__module__ = '2'
def __call__(self, context, request):
""" """
def __call_permissive__(self, context, request):
""" """
def __predicated__(self, context, request):
""" """
def __permitted__(self, context, request):
""" """
view1 = DummyView1()
view2 = DummyView2()
result = self._callFUT(view2, view1)
self.assertEqual(result, view1)
self.assertTrue(view1.__original_view__ is view2)
self.assertTrue(view1.__doc__ is view2.__doc__)
self.assertTrue(view1.__module__ is view2.__module__)
self.assertTrue(view1.__name__ is view2.__name__)
self.assertTrue(getattr(view1.__call_permissive__, im_func) is
getattr(view2.__call_permissive__, im_func))
self.assertTrue(getattr(view1.__permitted__, im_func) is
getattr(view2.__permitted__, im_func))
self.assertTrue(getattr(view1.__predicated__, im_func) is
getattr(view2.__predicated__, im_func))
class TestStaticURLInfo(unittest.TestCase):
def _getTargetClass(self):
from pyramid.config.views import StaticURLInfo
return StaticURLInfo
def _makeOne(self):
return self._getTargetClass()()
def _makeConfig(self, registrations=None):
config = DummyConfig()
registry = DummyRegistry()
if registrations is not None:
registry._static_url_registrations = registrations
config.registry = registry
return config
def _makeRequest(self):
request = DummyRequest()
request.registry = DummyRegistry()
return request
def _assertRegistrations(self, config, expected):
self.assertEqual(config.registry._static_url_registrations, expected)
def test_verifyClass(self):
from pyramid.interfaces import IStaticURLInfo
from zope.interface.verify import verifyClass
verifyClass(IStaticURLInfo, self._getTargetClass())
def test_verifyObject(self):
from pyramid.interfaces import IStaticURLInfo
from zope.interface.verify import verifyObject
verifyObject(IStaticURLInfo, self._makeOne())
def test_generate_missing(self):
inst = self._makeOne()
request = self._makeRequest()
self.assertRaises(ValueError, inst.generate, 'path', request)
def test_generate_registration_miss(self):
inst = self._makeOne()
registrations = [(None, 'spec', 'route_name'),
('http://example.com/foo/', 'package:path/', None)]
inst._get_registrations = lambda *x: registrations
request = self._makeRequest()
result = inst.generate('package:path/abc', request)
self.assertEqual(result, 'http://example.com/foo/abc')
def test_generate_registration_no_registry_on_request(self):
inst = self._makeOne()
registrations = [('http://example.com/foo/', 'package:path/', None)]
inst._get_registrations = lambda *x: registrations
request = self._makeRequest()
del request.registry
result = inst.generate('package:path/abc', request)
self.assertEqual(result, 'http://example.com/foo/abc')
def test_generate_slash_in_name1(self):
inst = self._makeOne()
registrations = [('http://example.com/foo/', 'package:path/', None)]
inst._get_registrations = lambda *x: registrations
request = self._makeRequest()
result = inst.generate('package:path/abc', request)
self.assertEqual(result, 'http://example.com/foo/abc')
def test_generate_slash_in_name2(self):
inst = self._makeOne()
registrations = [('http://example.com/foo/', 'package:path/', None)]
inst._get_registrations = lambda *x: registrations
request = self._makeRequest()
result = inst.generate('package:path/', request)
self.assertEqual(result, 'http://example.com/foo/')
def test_generate_quoting(self):
config = testing.setUp()
try:
config.add_static_view('images', path='mypkg:templates')
inst = self._makeOne()
request = testing.DummyRequest()
request.registry = config.registry
result = inst.generate('mypkg:templates/foo%2Fbar', request)
self.assertEqual(result, 'http://example.com/images/foo%252Fbar')
finally:
testing.tearDown()
def test_generate_route_url(self):
inst = self._makeOne()
registrations = [(None, 'package:path/', '__viewname/')]
inst._get_registrations = lambda *x: registrations
def route_url(n, **kw):
self.assertEqual(n, '__viewname/')
self.assertEqual(kw, {'subpath':'abc', 'a':1})
return 'url'
request = self._makeRequest()
request.route_url = route_url
result = inst.generate('package:path/abc', request, a=1)
self.assertEqual(result, 'url')
def test_generate_url_unquoted_local(self):
inst = self._makeOne()
registrations = [(None, 'package:path/', '__viewname/')]
inst._get_registrations = lambda *x: registrations
def route_url(n, **kw):
self.assertEqual(n, '__viewname/')
self.assertEqual(kw, {'subpath':'abc def', 'a':1})
return 'url'
request = self._makeRequest()
request.route_url = route_url
result = inst.generate('package:path/abc def', request, a=1)
self.assertEqual(result, 'url')
def test_generate_url_quoted_remote(self):
inst = self._makeOne()
registrations = [('http://example.com/', 'package:path/', None)]
inst._get_registrations = lambda *x: registrations
request = self._makeRequest()
result = inst.generate('package:path/abc def', request, a=1)
self.assertEqual(result, 'http://example.com/abc%20def')
def test_add_already_exists(self):
inst = self._makeOne()
config = self._makeConfig(
[('http://example.com/', 'package:path/', None)])
inst.add(config, 'http://example.com', 'anotherpackage:path')
expected = [('http://example.com/', 'anotherpackage:path/', None)]
self._assertRegistrations(config, expected)
def test_add_url_withendslash(self):
inst = self._makeOne()
config = self._makeConfig()
inst.add(config, 'http://example.com/', 'anotherpackage:path')
expected = [('http://example.com/', 'anotherpackage:path/', None)]
self._assertRegistrations(config, expected)
def test_add_url_noendslash(self):
inst = self._makeOne()
config = self._makeConfig()
inst.add(config, 'http://example.com', 'anotherpackage:path')
expected = [('http://example.com/', 'anotherpackage:path/', None)]
self._assertRegistrations(config, expected)
def test_add_url_noscheme(self):
inst = self._makeOne()
config = self._makeConfig()
inst.add(config, '//example.com', 'anotherpackage:path')
expected = [('//example.com/', 'anotherpackage:path/', None)]
self._assertRegistrations(config, expected)
def test_add_viewname(self):
from pyramid.security import NO_PERMISSION_REQUIRED
from pyramid.static import static_view
config = self._makeConfig()
inst = self._makeOne()
inst.add(config, 'view', 'anotherpackage:path', cache_max_age=1)
expected = [(None, 'anotherpackage:path/', '__view/')]
self._assertRegistrations(config, expected)
self.assertEqual(config.route_args, ('__view/', 'view/*subpath'))
self.assertEqual(config.view_kw['permission'], NO_PERMISSION_REQUIRED)
self.assertEqual(config.view_kw['view'].__class__, static_view)
def test_add_viewname_with_route_prefix(self):
config = self._makeConfig()
config.route_prefix = '/abc'
inst = self._makeOne()
inst.add(config, 'view', 'anotherpackage:path',)
expected = [(None, 'anotherpackage:path/', '__/abc/view/')]
self._assertRegistrations(config, expected)
self.assertEqual(config.route_args, ('__/abc/view/', 'view/*subpath'))
def test_add_viewname_with_permission(self):
config = self._makeConfig()
inst = self._makeOne()
inst.add(config, 'view', 'anotherpackage:path', cache_max_age=1,
permission='abc')
self.assertEqual(config.view_kw['permission'], 'abc')
def test_add_viewname_with_context(self):
config = self._makeConfig()
inst = self._makeOne()
inst.add(config, 'view', 'anotherpackage:path', cache_max_age=1,
context=DummyContext)
self.assertEqual(config.view_kw['context'], DummyContext)
def test_add_viewname_with_for_(self):
config = self._makeConfig()
inst = self._makeOne()
inst.add(config, 'view', 'anotherpackage:path', cache_max_age=1,
for_=DummyContext)
self.assertEqual(config.view_kw['context'], DummyContext)
def test_add_viewname_with_renderer(self):
config = self._makeConfig()
inst = self._makeOne()
inst.add(config, 'view', 'anotherpackage:path', cache_max_age=1,
renderer='mypackage:templates/index.pt')
self.assertEqual(config.view_kw['renderer'],
'mypackage:templates/index.pt')
class Test_view_description(unittest.TestCase):
def _callFUT(self, view):
from pyramid.config.views import view_description
return view_description(view)
def test_with_text(self):
def view(): pass
view.__text__ = 'some text'
result = self._callFUT(view)
self.assertEqual(result, 'some text')
def test_without_text(self):
def view(): pass
result = self._callFUT(view)
self.assertEqual(result,
'function pyramid.tests.test_config.test_views.view')
class DummyRegistry:
pass
from zope.interface import implementer
from pyramid.interfaces import IResponse
@implementer(IResponse)
class DummyResponse(object):
content_type = None
default_content_type = None
body = None
class DummyRequest:
subpath = ()
matchdict = None
def __init__(self, environ=None):
if environ is None:
environ = {}
self.environ = environ
self.params = {}
self.cookies = {}
self.response = DummyResponse()
class DummyContext:
pass
class DummyAccept(object):
def __init__(self, *matches):
self.matches = list(matches)
def best_match(self, offered):
if self.matches:
for match in self.matches:
if match in offered:
self.matches.remove(match)
return match
def __contains__(self, val):
return val in self.matches
class DummyLogger:
def __init__(self):
self.messages = []
def info(self, msg):
self.messages.append(msg)
warn = info
debug = info
class DummySecurityPolicy:
def __init__(self, permitted=True):
self.permitted = permitted
def effective_principals(self, request):
return []
def permits(self, context, principals, permission):
return self.permitted
class DummyConfig:
route_prefix = ''
def add_route(self, *args, **kw):
self.route_args = args
self.route_kw = kw
def add_view(self, *args, **kw):
self.view_args = args
self.view_kw = kw
def action(self, discriminator, callable, introspectables=()):
callable()
def introspectable(self, *arg):
return {}
from zope.interface import implementer
from pyramid.interfaces import IMultiView
@implementer(IMultiView)
class DummyMultiView:
def __init__(self):
self.views = []
self.name = 'name'
def add(self, view, order, accept=None, phash=None):
self.views.append((view, accept, phash))
def __call__(self, context, request):
return 'OK1'
def __permitted__(self, context, request):
""" """
def parse_httpdate(s):
import datetime
# cannot use %Z, must use literal GMT; Jython honors timezone
# but CPython does not
return datetime.datetime.strptime(s, "%a, %d %b %Y %H:%M:%S GMT")
def assert_similar_datetime(one, two):
for attr in ('year', 'month', 'day', 'hour', 'minute'):
one_attr = getattr(one, attr)
two_attr = getattr(two, attr)
if not one_attr == two_attr: # pragma: no cover
raise AssertionError('%r != %r in %s' % (one_attr, two_attr, attr))
class DummyStaticURLInfo:
def __init__(self):
self.added = []
def add(self, config, name, spec, **kw):
self.added.append((config, name, spec, kw))
class DummyViewDefaultsClass(object):
__view_defaults__ = {
'containment':'pyramid.tests.test_config.IDummy'
}
def __init__(self, request):
pass
def __call__(self):
return 'OK'
class DummyPredicate(object):
def __init__(self, val, config):
self.val = val
def text(self):
return 'dummy'
phash = text
class DummyIntrospector(object):
def __init__(self, getval=None):
self.related = []
self.introspectables = []
self.getval = getval
def add(self, introspectable):
self.introspectables.append(introspectable)
def get(self, name, discrim):
return self.getval
def relate(self, a, b):
self.related.append((a, b))
| bsd-2-clause |
savoirfairelinux/OpenUpgrade | addons/purchase/purchase.py | 17 | 79204 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pytz
from openerp import SUPERUSER_ID, workflow
from datetime import datetime
from dateutil.relativedelta import relativedelta
from operator import attrgetter
from openerp.tools.safe_eval import safe_eval as eval
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from openerp.osv.orm import browse_record, browse_null
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
class purchase_order(osv.osv):
def _amount_all(self, cr, uid, ids, field_name, arg, context=None):
res = {}
cur_obj=self.pool.get('res.currency')
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = {
'amount_untaxed': 0.0,
'amount_tax': 0.0,
'amount_total': 0.0,
}
val = val1 = 0.0
cur = order.pricelist_id.currency_id
for line in order.order_line:
val1 += line.price_subtotal
for c in self.pool.get('account.tax').compute_all(cr, uid, line.taxes_id, line.price_unit, line.product_qty, line.product_id, order.partner_id)['taxes']:
val += c.get('amount', 0.0)
res[order.id]['amount_tax']=cur_obj.round(cr, uid, cur, val)
res[order.id]['amount_untaxed']=cur_obj.round(cr, uid, cur, val1)
res[order.id]['amount_total']=res[order.id]['amount_untaxed'] + res[order.id]['amount_tax']
return res
def _set_minimum_planned_date(self, cr, uid, ids, name, value, arg, context=None):
if not value: return False
if type(ids)!=type([]):
ids=[ids]
for po in self.browse(cr, uid, ids, context=context):
if po.order_line:
cr.execute("""update purchase_order_line set
date_planned=%s
where
order_id=%s and
(date_planned=%s or date_planned<%s)""", (value,po.id,po.minimum_planned_date,value))
cr.execute("""update purchase_order set
minimum_planned_date=%s where id=%s""", (value, po.id))
return True
def _minimum_planned_date(self, cr, uid, ids, field_name, arg, context=None):
res={}
purchase_obj=self.browse(cr, uid, ids, context=context)
for purchase in purchase_obj:
res[purchase.id] = False
if purchase.order_line:
min_date=purchase.order_line[0].date_planned
for line in purchase.order_line:
if line.date_planned < min_date:
min_date=line.date_planned
res[purchase.id]=min_date
return res
def _invoiced_rate(self, cursor, user, ids, name, arg, context=None):
res = {}
for purchase in self.browse(cursor, user, ids, context=context):
tot = 0.0
for invoice in purchase.invoice_ids:
if invoice.state not in ('draft','cancel'):
tot += invoice.amount_untaxed
if purchase.amount_untaxed:
res[purchase.id] = tot * 100.0 / purchase.amount_untaxed
else:
res[purchase.id] = 0.0
return res
def _shipped_rate(self, cr, uid, ids, name, arg, context=None):
if not ids: return {}
res = {}
for id in ids:
res[id] = [0.0,0.0]
cr.execute('''SELECT
p.order_id, sum(m.product_qty), m.state
FROM
stock_move m
LEFT JOIN
purchase_order_line p on (p.id=m.purchase_line_id)
WHERE
p.order_id IN %s GROUP BY m.state, p.order_id''',(tuple(ids),))
for oid,nbr,state in cr.fetchall():
if state=='cancel':
continue
if state=='done':
res[oid][0] += nbr or 0.0
res[oid][1] += nbr or 0.0
else:
res[oid][1] += nbr or 0.0
for r in res:
if not res[r][1]:
res[r] = 0.0
else:
res[r] = 100.0 * res[r][0] / res[r][1]
return res
def _get_order(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('purchase.order.line').browse(cr, uid, ids, context=context):
result[line.order_id.id] = True
return result.keys()
def _invoiced(self, cursor, user, ids, name, arg, context=None):
res = {}
for purchase in self.browse(cursor, user, ids, context=context):
res[purchase.id] = all(line.invoiced for line in purchase.order_line)
return res
def _get_journal(self, cr, uid, context=None):
if context is None:
context = {}
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
company_id = context.get('company_id', user.company_id.id)
journal_obj = self.pool.get('account.journal')
res = journal_obj.search(cr, uid, [('type', '=', 'purchase'),
('company_id', '=', company_id)],
limit=1)
return res and res[0] or False
def _get_picking_in(self, cr, uid, context=None):
obj_data = self.pool.get('ir.model.data')
return obj_data.get_object_reference(cr, uid, 'stock','picking_type_in') and obj_data.get_object_reference(cr, uid, 'stock','picking_type_in')[1] or False
def _get_picking_ids(self, cr, uid, ids, field_names, args, context=None):
res = {}
for po_id in ids:
res[po_id] = []
query = """
SELECT picking_id, po.id FROM stock_picking p, stock_move m, purchase_order_line pol, purchase_order po
WHERE po.id in %s and po.id = pol.order_id and pol.id = m.purchase_line_id and m.picking_id = p.id
GROUP BY picking_id, po.id
"""
cr.execute(query, (tuple(ids), ))
picks = cr.fetchall()
for pick_id, po_id in picks:
res[po_id].append(pick_id)
return res
def _count_all(self, cr, uid, ids, field_name, arg, context=None):
return {
purchase.id: {
'shipment_count': len(purchase.picking_ids),
'invoice_count': len(purchase.invoice_ids),
}
for purchase in self.browse(cr, uid, ids, context=context)
}
STATE_SELECTION = [
('draft', 'Draft PO'),
('sent', 'RFQ'),
('bid', 'Bid Received'),
('confirmed', 'Waiting Approval'),
('approved', 'Purchase Confirmed'),
('except_picking', 'Shipping Exception'),
('except_invoice', 'Invoice Exception'),
('done', 'Done'),
('cancel', 'Cancelled')
]
_track = {
'state': {
'purchase.mt_rfq_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state == 'confirmed',
'purchase.mt_rfq_approved': lambda self, cr, uid, obj, ctx=None: obj.state == 'approved',
'purchase.mt_rfq_done': lambda self, cr, uid, obj, ctx=None: obj.state == 'done',
},
}
_columns = {
'name': fields.char('Order Reference', size=64, required=True, select=True, help="Unique number of the purchase order, computed automatically when the purchase order is created."),
'origin': fields.char('Source Document', size=64,
help="Reference of the document that generated this purchase order request; a sales order or an internal procurement request."
),
'partner_ref': fields.char('Supplier Reference', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]}, size=64,
help="Reference of the sales order or bid sent by your supplier. It's mainly used to do the matching when you receive the products as this reference is usually written on the delivery order sent by your supplier."),
'date_order':fields.date('Order Date', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)]}, select=True, help="Date on which this document has been created."),
'date_approve':fields.date('Date Approved', readonly=1, select=True, help="Date on which purchase order has been approved"),
'partner_id':fields.many2one('res.partner', 'Supplier', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]},
change_default=True, track_visibility='always'),
'dest_address_id':fields.many2one('res.partner', 'Customer Address (Direct Delivery)',
states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]},
help="Put an address if you want to deliver directly from the supplier to the customer. " \
"Otherwise, keep empty to deliver to your own company."
),
'location_id': fields.many2one('stock.location', 'Destination', required=True, domain=[('usage','<>','view')], states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]} ),
'pricelist_id':fields.many2one('product.pricelist', 'Pricelist', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]}, help="The pricelist sets the currency used for this purchase order. It also computes the supplier price for the selected products/quantities."),
'currency_id': fields.many2one('res.currency','Currency', readonly=True, required=True,states={'draft': [('readonly', False)],'sent': [('readonly', False)]}),
'state': fields.selection(STATE_SELECTION, 'Status', readonly=True, help="The status of the purchase order or the quotation request. A request for quotation is a purchase order in a 'Draft' status. Then the order has to be confirmed by the user, the status switch to 'Confirmed'. Then the supplier must confirm the order to change the status to 'Approved'. When the purchase order is paid and received, the status becomes 'Done'. If a cancel action occurs in the invoice or in the reception of goods, the status becomes in exception.", select=True),
'order_line': fields.one2many('purchase.order.line', 'order_id', 'Order Lines', states={'approved':[('readonly',True)],'done':[('readonly',True)]}),
'validator' : fields.many2one('res.users', 'Validated by', readonly=True),
'notes': fields.text('Terms and Conditions'),
'invoice_ids': fields.many2many('account.invoice', 'purchase_invoice_rel', 'purchase_id', 'invoice_id', 'Invoices', help="Invoices generated for a purchase order"),
'picking_ids': fields.function(_get_picking_ids, method=True, type='one2many', relation='stock.picking', string='Picking List', help="This is the list of reception operations that have been generated for this purchase order."),
'shipped':fields.boolean('Received', readonly=True, select=True, help="It indicates that a picking has been done"),
'shipped_rate': fields.function(_shipped_rate, string='Received Ratio', type='float'),
'invoiced': fields.function(_invoiced, string='Invoice Received', type='boolean', help="It indicates that an invoice has been validated"),
'invoiced_rate': fields.function(_invoiced_rate, string='Invoiced', type='float'),
'invoice_method': fields.selection([('manual','Based on Purchase Order lines'),('order','Based on generated draft invoice'),('picking','Based on incoming shipments')], 'Invoicing Control', required=True,
readonly=True, states={'draft':[('readonly',False)], 'sent':[('readonly',False)]},
help="Based on Purchase Order lines: place individual lines in 'Invoice Control / On Purchase Order lines' from where you can selectively create an invoice.\n" \
"Based on generated invoice: create a draft invoice you can validate later.\n" \
"Based on incoming shipments: let you create an invoice when receptions are validated."
),
'minimum_planned_date':fields.function(_minimum_planned_date, fnct_inv=_set_minimum_planned_date, string='Expected Date', type='date', select=True, help="This is computed as the minimum scheduled date of all purchase order lines' products.",
store = {
'purchase.order.line': (_get_order, ['date_planned'], 10),
}
),
'amount_untaxed': fields.function(_amount_all, digits_compute=dp.get_precision('Account'), string='Untaxed Amount',
store={
'purchase.order.line': (_get_order, None, 10),
}, multi="sums", help="The amount without tax", track_visibility='always'),
'amount_tax': fields.function(_amount_all, digits_compute=dp.get_precision('Account'), string='Taxes',
store={
'purchase.order.line': (_get_order, None, 10),
}, multi="sums", help="The tax amount"),
'amount_total': fields.function(_amount_all, digits_compute=dp.get_precision('Account'), string='Total',
store={
'purchase.order.line': (_get_order, None, 10),
}, multi="sums", help="The total amount"),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position'),
'payment_term_id': fields.many2one('account.payment.term', 'Payment Term'),
'incoterm_id': fields.many2one('stock.incoterms', 'Incoterm', help="International Commercial Terms are a series of predefined commercial terms used in international transactions."),
'product_id': fields.related('order_line', 'product_id', type='many2one', relation='product.product', string='Product'),
'create_uid': fields.many2one('res.users', 'Responsible'),
'company_id': fields.many2one('res.company', 'Company', required=True, select=1, states={'confirmed': [('readonly', True)], 'approved': [('readonly', True)]}),
'journal_id': fields.many2one('account.journal', 'Journal'),
'bid_date': fields.date('Bid Received On', readonly=True, help="Date on which the bid was received"),
'bid_validity': fields.date('Bid Valid Until', help="Date on which the bid expired"),
'picking_type_id': fields.many2one('stock.picking.type', 'Deliver To', help="This will determine picking type of incoming shipment", required=True,
states={'confirmed': [('readonly', True)], 'approved': [('readonly', True)], 'done': [('readonly', True)]}),
'related_location_id': fields.related('picking_type_id', 'default_location_dest_id', type='many2one', relation='stock.location', string="Related location", store=True),
'shipment_count': fields.function(_count_all, type='integer', string='Incoming Shipments', multi=True),
'invoice_count': fields.function(_count_all, type='integer', string='Invoices', multi=True)
}
_defaults = {
'date_order': fields.date.context_today,
'state': 'draft',
'name': lambda obj, cr, uid, context: '/',
'shipped': 0,
'invoice_method': 'order',
'invoiced': 0,
'pricelist_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').browse(cr, uid, context['partner_id']).property_product_pricelist_purchase.id,
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'purchase.order', context=c),
'journal_id': _get_journal,
'currency_id': lambda self, cr, uid, context: self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id,
'picking_type_id': _get_picking_in,
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Order Reference must be unique per Company!'),
]
_name = "purchase.order"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_description = "Purchase Order"
_order = 'date_order desc, id desc'
def create(self, cr, uid, vals, context=None):
if vals.get('name','/')=='/':
vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'purchase.order') or '/'
if context is None:
context = {}
context.update({'mail_create_nolog': True})
order = super(purchase_order, self).create(cr, uid, vals, context=context)
self.message_post(cr, uid, [order], body=_("RFQ created"), context=context)
return order
def unlink(self, cr, uid, ids, context=None):
purchase_orders = self.read(cr, uid, ids, ['state'], context=context)
unlink_ids = []
for s in purchase_orders:
if s['state'] in ['draft','cancel']:
unlink_ids.append(s['id'])
else:
raise osv.except_osv(_('Invalid Action!'), _('In order to delete a purchase order, you must cancel it first.'))
# automatically sending subflow.delete upon deletion
self.signal_purchase_cancel(cr, uid, unlink_ids)
return super(purchase_order, self).unlink(cr, uid, unlink_ids, context=context)
def set_order_line_status(self, cr, uid, ids, status, context=None):
line = self.pool.get('purchase.order.line')
order_line_ids = []
proc_obj = self.pool.get('procurement.order')
for order in self.browse(cr, uid, ids, context=context):
order_line_ids += [po_line.id for po_line in order.order_line]
if order_line_ids:
line.write(cr, uid, order_line_ids, {'state': status}, context=context)
if order_line_ids and status == 'cancel':
procs = proc_obj.search(cr, uid, [('purchase_line_id', 'in', order_line_ids)], context=context)
if procs:
proc_obj.write(cr, uid, procs, {'state': 'exception'}, context=context)
return True
def button_dummy(self, cr, uid, ids, context=None):
return True
def onchange_pricelist(self, cr, uid, ids, pricelist_id, context=None):
if not pricelist_id:
return {}
return {'value': {'currency_id': self.pool.get('product.pricelist').browse(cr, uid, pricelist_id, context=context).currency_id.id}}
#Destination address is used when dropshipping
def onchange_dest_address_id(self, cr, uid, ids, address_id):
if not address_id:
return {}
address = self.pool.get('res.partner')
values = {}
supplier = address.browse(cr, uid, address_id)
if supplier:
location_id = supplier.property_stock_customer.id
values.update({'location_id': location_id})
return {'value':values}
def onchange_picking_type_id(self, cr, uid, ids, picking_type_id, context=None):
value = {}
if picking_type_id:
picktype = self.pool.get("stock.picking.type").browse(cr, uid, picking_type_id, context=context)
if picktype.default_location_dest_id:
value.update({'location_id': picktype.default_location_dest_id.id})
value.update({'related_location_id': picktype.default_location_dest_id and picktype.default_location_dest_id.id or False})
return {'value': value}
def onchange_partner_id(self, cr, uid, ids, partner_id):
partner = self.pool.get('res.partner')
if not partner_id:
return {'value': {
'fiscal_position': False,
'payment_term_id': False,
}}
supplier_address = partner.address_get(cr, uid, [partner_id], ['default'])
supplier = partner.browse(cr, uid, partner_id)
return {'value': {
'pricelist_id': supplier.property_product_pricelist_purchase.id,
'fiscal_position': supplier.property_account_position and supplier.property_account_position.id or False,
'payment_term_id': supplier.property_supplier_payment_term.id or False,
}}
def invoice_open(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_invoice_tree2')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
inv_ids = []
for po in self.browse(cr, uid, ids, context=context):
inv_ids+= [invoice.id for invoice in po.invoice_ids]
if not inv_ids:
raise osv.except_osv(_('Error!'), _('Please create Invoices.'))
#choose the view_mode accordingly
if len(inv_ids)>1:
result['domain'] = "[('id','in',["+','.join(map(str, inv_ids))+"])]"
else:
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_supplier_form')
result['views'] = [(res and res[1] or False, 'form')]
result['res_id'] = inv_ids and inv_ids[0] or False
return result
def view_invoice(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing invoices of given sales order ids. It can either be a in a list or in a form view, if there is only one invoice to show.
'''
mod_obj = self.pool.get('ir.model.data')
wizard_obj = self.pool.get('purchase.order.line_invoice')
#compute the number of invoices to display
inv_ids = []
for po in self.browse(cr, uid, ids, context=context):
if po.invoice_method == 'manual':
if not po.invoice_ids:
context.update({'active_ids' : [line.id for line in po.order_line]})
wizard_obj.makeInvoices(cr, uid, [], context=context)
for po in self.browse(cr, uid, ids, context=context):
inv_ids+= [invoice.id for invoice in po.invoice_ids]
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_supplier_form')
res_id = res and res[1] or False
return {
'name': _('Supplier Invoices'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id],
'res_model': 'account.invoice',
'context': "{'type':'in_invoice', 'journal_type': 'purchase'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': inv_ids and inv_ids[0] or False,
}
def view_picking(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing picking orders of given purchase order ids.
'''
if context is None:
context = {}
mod_obj = self.pool.get('ir.model.data')
dummy, action_id = tuple(mod_obj.get_object_reference(cr, uid, 'stock', 'action_picking_tree'))
action = self.pool.get('ir.actions.act_window').read(cr, uid, action_id, context=context)
pick_ids = []
for po in self.browse(cr, uid, ids, context=context):
pick_ids += [picking.id for picking in po.picking_ids]
#override the context to get rid of the default filtering on picking type
action['context'] = {}
#choose the view_mode accordingly
if len(pick_ids) > 1:
action['domain'] = "[('id','in',[" + ','.join(map(str, pick_ids)) + "])]"
else:
res = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_form')
action['views'] = [(res and res[1] or False, 'form')]
action['res_id'] = pick_ids and pick_ids[0] or False
return action
def wkf_approve_order(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'approved', 'date_approve': fields.date.context_today(self,cr,uid,context=context)})
return True
def wkf_bid_received(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state':'bid', 'bid_date': fields.date.context_today(self,cr,uid,context=context)})
def wkf_send_rfq(self, cr, uid, ids, context=None):
'''
This function opens a window to compose an email, with the edi purchase template message loaded by default
'''
if not context:
context= {}
ir_model_data = self.pool.get('ir.model.data')
try:
if context.get('send_rfq', False):
template_id = ir_model_data.get_object_reference(cr, uid, 'purchase', 'email_template_edi_purchase')[1]
else:
template_id = ir_model_data.get_object_reference(cr, uid, 'purchase', 'email_template_edi_purchase_done')[1]
except ValueError:
template_id = False
try:
compose_form_id = ir_model_data.get_object_reference(cr, uid, 'mail', 'email_compose_message_wizard_form')[1]
except ValueError:
compose_form_id = False
ctx = dict(context)
ctx.update({
'default_model': 'purchase.order',
'default_res_id': ids[0],
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
})
return {
'name': _('Compose Email'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form_id, 'form')],
'view_id': compose_form_id,
'target': 'new',
'context': ctx,
}
def print_quotation(self, cr, uid, ids, context=None):
'''
This function prints the request for quotation and mark it as sent, so that we can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time'
self.signal_send_rfq(cr, uid, ids)
return self.pool['report'].get_action(cr, uid, ids, 'purchase.report_purchasequotation', context=context)
def wkf_confirm_order(self, cr, uid, ids, context=None):
todo = []
for po in self.browse(cr, uid, ids, context=context):
if not po.order_line:
raise osv.except_osv(_('Error!'),_('You cannot confirm a purchase order without any purchase order line.'))
for line in po.order_line:
if line.state=='draft':
todo.append(line.id)
self.pool.get('purchase.order.line').action_confirm(cr, uid, todo, context)
for id in ids:
self.write(cr, uid, [id], {'state' : 'confirmed', 'validator' : uid})
return True
def _choose_account_from_po_line(self, cr, uid, po_line, context=None):
fiscal_obj = self.pool.get('account.fiscal.position')
property_obj = self.pool.get('ir.property')
if po_line.product_id:
acc_id = po_line.product_id.property_account_expense.id
if not acc_id:
acc_id = po_line.product_id.categ_id.property_account_expense_categ.id
if not acc_id:
raise osv.except_osv(_('Error!'), _('Define an expense account for this product: "%s" (id:%d).') % (po_line.product_id.name, po_line.product_id.id,))
else:
acc_id = property_obj.get(cr, uid, 'property_account_expense_categ', 'product.category', context=context).id
fpos = po_line.order_id.fiscal_position or False
return fiscal_obj.map_account(cr, uid, fpos, acc_id)
def _prepare_inv_line(self, cr, uid, account_id, order_line, context=None):
"""Collects require data from purchase order line that is used to create invoice line
for that purchase order line
:param account_id: Expense account of the product of PO line if any.
:param browse_record order_line: Purchase order line browse record
:return: Value for fields of invoice lines.
:rtype: dict
"""
return {
'name': order_line.name,
'account_id': account_id,
'price_unit': order_line.price_unit or 0.0,
'quantity': order_line.product_qty,
'product_id': order_line.product_id.id or False,
'uos_id': order_line.product_uom.id or False,
'invoice_line_tax_id': [(6, 0, [x.id for x in order_line.taxes_id])],
'account_analytic_id': order_line.account_analytic_id.id or False,
'purchase_line_id': order_line.id,
}
def _prepare_invoice(self, cr, uid, order, line_ids, context=None):
"""Prepare the dict of values to create the new invoice for a
purchase order. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record order: purchase.order record to invoice
:param list(int) line_ids: list of invoice line IDs that must be
attached to the invoice
:return: dict of value to create() the invoice
"""
journal_ids = self.pool['account.journal'].search(
cr, uid, [('type', '=', 'purchase'),
('company_id', '=', order.company_id.id)],
limit=1)
if not journal_ids:
raise osv.except_osv(
_('Error!'),
_('Define purchase journal for this company: "%s" (id:%d).') % \
(order.company_id.name, order.company_id.id))
return {
'name': order.partner_ref or order.name,
'reference': order.partner_ref or order.name,
'account_id': order.partner_id.property_account_payable.id,
'type': 'in_invoice',
'partner_id': order.partner_id.id,
'currency_id': order.currency_id.id,
'journal_id': len(journal_ids) and journal_ids[0] or False,
'invoice_line': [(6, 0, line_ids)],
'origin': order.name,
'fiscal_position': order.fiscal_position.id or False,
'payment_term': order.payment_term_id.id or False,
'company_id': order.company_id.id,
}
def action_cancel_draft(self, cr, uid, ids, context=None):
if not len(ids):
return False
self.write(cr, uid, ids, {'state':'draft','shipped':0})
self.set_order_line_status(cr, uid, ids, 'draft', context=context)
for p_id in ids:
# Deleting the existing instance of workflow for PO
self.delete_workflow(cr, uid, [p_id]) # TODO is it necessary to interleave the calls?
self.create_workflow(cr, uid, [p_id])
return True
def wkf_po_done(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'done'}, context=context)
self.set_order_line_status(cr, uid, ids, 'done', context=context)
def action_invoice_create(self, cr, uid, ids, context=None):
"""Generates invoice for given ids of purchase orders and links that invoice ID to purchase order.
:param ids: list of ids of purchase orders.
:return: ID of created invoice.
:rtype: int
"""
if context is None:
context = {}
inv_obj = self.pool.get('account.invoice')
inv_line_obj = self.pool.get('account.invoice.line')
res = False
uid_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
for order in self.browse(cr, uid, ids, context=context):
context.pop('force_company', None)
if order.company_id.id != uid_company_id:
#if the company of the document is different than the current user company, force the company in the context
#then re-do a browse to read the property fields for the good company.
context['force_company'] = order.company_id.id
order = self.browse(cr, uid, order.id, context=context)
# generate invoice line correspond to PO line and link that to created invoice (inv_id) and PO line
inv_lines = []
for po_line in order.order_line:
acc_id = self._choose_account_from_po_line(cr, uid, po_line, context=context)
inv_line_data = self._prepare_inv_line(cr, uid, acc_id, po_line, context=context)
inv_line_id = inv_line_obj.create(cr, uid, inv_line_data, context=context)
inv_lines.append(inv_line_id)
po_line.write({'invoice_lines': [(4, inv_line_id)]}, context=context)
# get invoice data and create invoice
inv_data = self._prepare_invoice(cr, uid, order, inv_lines, context=context)
inv_id = inv_obj.create(cr, uid, inv_data, context=context)
# compute the invoice
inv_obj.button_compute(cr, uid, [inv_id], context=context, set_total=True)
# Link this new invoice to related purchase order
order.write({'invoice_ids': [(4, inv_id)]}, context=context)
res = inv_id
return res
def invoice_done(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'approved'}, context=context)
return True
def has_stockable_product(self, cr, uid, ids, *args):
for order in self.browse(cr, uid, ids):
for order_line in order.order_line:
if order_line.product_id and order_line.product_id.type in ('product', 'consu'):
return True
return False
def action_cancel(self, cr, uid, ids, context=None):
for purchase in self.browse(cr, uid, ids, context=context):
for pick in purchase.picking_ids:
if pick.state not in ('draft', 'cancel'):
raise osv.except_osv(
_('Unable to cancel the purchase order %s.') % (purchase.name),
_('First cancel all receptions related to this purchase order.'))
self.pool.get('stock.picking') \
.signal_button_cancel(cr, uid, map(attrgetter('id'), purchase.picking_ids))
for inv in purchase.invoice_ids:
if inv and inv.state not in ('cancel', 'draft'):
raise osv.except_osv(
_('Unable to cancel this purchase order.'),
_('You must first cancel all invoices related to this purchase order.'))
self.pool.get('account.invoice') \
.signal_invoice_cancel(cr, uid, map(attrgetter('id'), purchase.invoice_ids))
self.write(cr, uid, ids, {'state': 'cancel'})
self.set_order_line_status(cr, uid, ids, 'cancel', context=context)
self.signal_purchase_cancel(cr, uid, ids)
return True
def _prepare_order_line_move(self, cr, uid, order, order_line, picking_id, group_id, context=None):
''' prepare the stock move data from the PO line. This function returns a list of dictionary ready to be used in stock.move's create()'''
product_uom = self.pool.get('product.uom')
price_unit = order_line.price_unit
if order_line.product_uom.id != order_line.product_id.uom_id.id:
price_unit *= order_line.product_uom.factor
if order.currency_id.id != order.company_id.currency_id.id:
#we don't round the price_unit, as we may want to store the standard price with more digits than allowed by the currency
price_unit = self.pool.get('res.currency').compute(cr, uid, order.currency_id.id, order.company_id.currency_id.id, price_unit, round=False, context=context)
res = []
move_template = {
'name': order_line.name or '',
'product_id': order_line.product_id.id,
'product_uom': order_line.product_uom.id,
'product_uos': order_line.product_uom.id,
'date': fields.date.date_to_datetime(self, cr, uid, order.date_order, context),
'date_expected': fields.date.date_to_datetime(self, cr, uid, order_line.date_planned, context),
'location_id': order.partner_id.property_stock_supplier.id,
'location_dest_id': order.location_id.id,
'picking_id': picking_id,
'partner_id': order.dest_address_id.id or order.partner_id.id,
'move_dest_id': False,
'state': 'draft',
'purchase_line_id': order_line.id,
'company_id': order.company_id.id,
'price_unit': price_unit,
'picking_type_id': order.picking_type_id.id,
'group_id': group_id,
'procurement_id': False,
'origin': order.name,
'route_ids': order.picking_type_id.warehouse_id and [(6, 0, [x.id for x in order.picking_type_id.warehouse_id.route_ids])] or [],
'warehouse_id':order.picking_type_id.warehouse_id.id,
}
diff_quantity = order_line.product_qty
for procurement in order_line.procurement_ids:
procurement_qty = product_uom._compute_qty(cr, uid, procurement.product_uom.id, procurement.product_qty, to_uom_id=order_line.product_uom.id)
tmp = move_template.copy()
tmp.update({
'product_uom_qty': min(procurement_qty, diff_quantity),
'product_uos_qty': min(procurement_qty, diff_quantity),
'move_dest_id': procurement.move_dest_id.id, # blabla
'group_id': procurement.group_id.id or group_id, # blabla to check ca devrait etre bon et groupé dans le meme picking qd meme
'procurement_id': procurement.id,
})
diff_quantity -= min(procurement_qty, diff_quantity)
res.append(tmp)
#if the order line has a bigger quantity than the procurement it was for (manually changed or minimal quantity), then
#split the future stock move in two because the route followed may be different.
if diff_quantity > 0:
move_template['product_uom_qty'] = diff_quantity
move_template['product_uos_qty'] = diff_quantity
res.append(move_template)
return res
def _create_stock_moves(self, cr, uid, order, order_lines, picking_id=False, context=None):
"""Creates appropriate stock moves for given order lines, whose can optionally create a
picking if none is given or no suitable is found, then confirms the moves, makes them
available, and confirms the pickings.
If ``picking_id`` is provided, the stock moves will be added to it, otherwise a standard
incoming picking will be created to wrap the stock moves (default behavior of the stock.move)
Modules that wish to customize the procurements or partition the stock moves over
multiple stock pickings may override this method and call ``super()`` with
different subsets of ``order_lines`` and/or preset ``picking_id`` values.
:param browse_record order: purchase order to which the order lines belong
:param list(browse_record) order_lines: purchase order line records for which picking
and moves should be created.
:param int picking_id: optional ID of a stock picking to which the created stock moves
will be added. A new picking will be created if omitted.
:return: None
"""
stock_move = self.pool.get('stock.move')
todo_moves = []
new_group = self.pool.get("procurement.group").create(cr, uid, {'name': order.name, 'partner_id': order.partner_id.id}, context=context)
for order_line in order_lines:
if not order_line.product_id:
continue
if order_line.product_id.type in ('product', 'consu'):
for vals in self._prepare_order_line_move(cr, uid, order, order_line, picking_id, new_group, context=context):
move = stock_move.create(cr, uid, vals, context=context)
todo_moves.append(move)
todo_moves = stock_move.action_confirm(cr, uid, todo_moves)
stock_move.force_assign(cr, uid, todo_moves)
def test_moves_done(self, cr, uid, ids, context=None):
'''PO is done at the delivery side if all the incoming shipments are done'''
for purchase in self.browse(cr, uid, ids, context=context):
for picking in purchase.picking_ids:
if picking.state != 'done':
return False
return True
def test_moves_except(self, cr, uid, ids, context=None):
''' PO is in exception at the delivery side if one of the picking is canceled
and the other pickings are completed (done or canceled)
'''
at_least_one_canceled = False
alldoneorcancel = True
for purchase in self.browse(cr, uid, ids, context=context):
for picking in purchase.picking_ids:
if picking.state == 'cancel':
at_least_one_canceled = True
if picking.state not in ['done', 'cancel']:
alldoneorcancel = False
return at_least_one_canceled and alldoneorcancel
def move_lines_get(self, cr, uid, ids, *args):
res = []
for order in self.browse(cr, uid, ids, context={}):
for line in order.order_line:
res += [x.id for x in line.move_ids]
return res
def action_picking_create(self, cr, uid, ids, context=None):
for order in self.browse(cr, uid, ids):
picking_id = self.pool.get('stock.picking').create(cr, uid, {'picking_type_id': order.picking_type_id.id, 'partner_id': order.dest_address_id.id or order.partner_id.id}, context=context)
self._create_stock_moves(cr, uid, order, order.order_line, picking_id, context=context)
def picking_done(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'shipped':1,'state':'approved'}, context=context)
# Do check on related procurements:
proc_obj = self.pool.get("procurement.order")
po_lines = []
for po in self.browse(cr, uid, ids, context=context):
po_lines += [x.id for x in po.order_line]
if po_lines:
procs = proc_obj.search(cr, uid, [('purchase_line_id', 'in', po_lines)], context=context)
if procs:
proc_obj.check(cr, uid, procs, context=context)
self.message_post(cr, uid, ids, body=_("Products received"), context=context)
return True
def copy(self, cr, uid, id, default=None, context=None):
if not default:
default = {}
default.update({
'state':'draft',
'shipped':False,
'invoiced':False,
'invoice_ids': [],
'origin': '',
'partner_ref': '',
'name': self.pool.get('ir.sequence').get(cr, uid, 'purchase.order'),
})
return super(purchase_order, self).copy(cr, uid, id, default, context)
def do_merge(self, cr, uid, ids, context=None):
"""
To merge similar type of purchase orders.
Orders will only be merged if:
* Purchase Orders are in draft
* Purchase Orders belong to the same partner
* Purchase Orders are have same stock location, same pricelist
Lines will only be merged if:
* Order lines are exactly the same except for the quantity and unit
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: the ID or list of IDs
@param context: A standard dictionary
@return: new purchase order id
"""
#TOFIX: merged order line should be unlink
def make_key(br, fields):
list_key = []
for field in fields:
field_val = getattr(br, field)
if field in ('product_id', 'account_analytic_id'):
if not field_val:
field_val = False
if isinstance(field_val, browse_record):
field_val = field_val.id
elif isinstance(field_val, browse_null):
field_val = False
elif isinstance(field_val, list):
field_val = ((6, 0, tuple([v.id for v in field_val])),)
list_key.append((field, field_val))
list_key.sort()
return tuple(list_key)
if context is None:
context = {}
# Compute what the new orders should contain
new_orders = {}
order_lines_to_move = []
for porder in [order for order in self.browse(cr, uid, ids, context=context) if order.state == 'draft']:
order_key = make_key(porder, ('partner_id', 'location_id', 'pricelist_id'))
new_order = new_orders.setdefault(order_key, ({}, []))
new_order[1].append(porder.id)
order_infos = new_order[0]
if not order_infos:
order_infos.update({
'origin': porder.origin,
'date_order': porder.date_order,
'partner_id': porder.partner_id.id,
'dest_address_id': porder.dest_address_id.id,
'picking_type_id': porder.picking_type_id.id,
'location_id': porder.location_id.id,
'pricelist_id': porder.pricelist_id.id,
'state': 'draft',
'order_line': {},
'notes': '%s' % (porder.notes or '',),
'fiscal_position': porder.fiscal_position and porder.fiscal_position.id or False,
})
else:
if porder.date_order < order_infos['date_order']:
order_infos['date_order'] = porder.date_order
if porder.notes:
order_infos['notes'] = (order_infos['notes'] or '') + ('\n%s' % (porder.notes,))
if porder.origin:
order_infos['origin'] = (order_infos['origin'] or '') + ' ' + porder.origin
for order_line in porder.order_line:
order_lines_to_move += [order_line.id]
allorders = []
orders_info = {}
for order_key, (order_data, old_ids) in new_orders.iteritems():
# skip merges with only one order
if len(old_ids) < 2:
allorders += (old_ids or [])
continue
# cleanup order line data
for key, value in order_data['order_line'].iteritems():
del value['uom_factor']
value.update(dict(key))
order_data['order_line'] = [(6, 0, order_lines_to_move)]
# create the new order
context.update({'mail_create_nolog': True})
neworder_id = self.create(cr, uid, order_data)
self.message_post(cr, uid, [neworder_id], body=_("RFQ created"), context=context)
orders_info.update({neworder_id: old_ids})
allorders.append(neworder_id)
# make triggers pointing to the old orders point to the new order
for old_id in old_ids:
self.redirect_workflow(cr, uid, [(old_id, neworder_id)])
self.signal_purchase_cancel(cr, uid, [old_id])
return orders_info
class purchase_order_line(osv.osv):
def _amount_line(self, cr, uid, ids, prop, arg, context=None):
res = {}
cur_obj=self.pool.get('res.currency')
tax_obj = self.pool.get('account.tax')
for line in self.browse(cr, uid, ids, context=context):
taxes = tax_obj.compute_all(cr, uid, line.taxes_id, line.price_unit, line.product_qty, line.product_id, line.order_id.partner_id)
cur = line.order_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
return res
def _get_uom_id(self, cr, uid, context=None):
try:
proxy = self.pool.get('ir.model.data')
result = proxy.get_object_reference(cr, uid, 'product', 'product_uom_unit')
return result[1]
except Exception, ex:
return False
_columns = {
'name': fields.text('Description', required=True),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'date_planned': fields.date('Scheduled Date', required=True, select=True),
'taxes_id': fields.many2many('account.tax', 'purchase_order_taxe', 'ord_id', 'tax_id', 'Taxes'),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'product_id': fields.many2one('product.product', 'Product', domain=[('purchase_ok','=',True)], change_default=True),
'move_ids': fields.one2many('stock.move', 'purchase_line_id', 'Reservation', readonly=True, ondelete='set null'),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute= dp.get_precision('Account')),
'order_id': fields.many2one('purchase.order', 'Order Reference', select=True, required=True, ondelete='cascade'),
'account_analytic_id':fields.many2one('account.analytic.account', 'Analytic Account',),
'company_id': fields.related('order_id','company_id',type='many2one',relation='res.company',string='Company', store=True, readonly=True),
'state': fields.selection([('draft', 'Draft'), ('confirmed', 'Confirmed'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Status', required=True, readonly=True,
help=' * The \'Draft\' status is set automatically when purchase order in draft status. \
\n* The \'Confirmed\' status is set automatically as confirm when purchase order in confirm status. \
\n* The \'Done\' status is set automatically when purchase order is set as done. \
\n* The \'Cancelled\' status is set automatically when user cancel purchase order.'),
'invoice_lines': fields.many2many('account.invoice.line', 'purchase_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True),
'invoiced': fields.boolean('Invoiced', readonly=True),
'partner_id': fields.related('order_id','partner_id',string='Partner',readonly=True,type="many2one", relation="res.partner", store=True),
'date_order': fields.related('order_id','date_order',string='Order Date',readonly=True,type="date"),
'procurement_ids': fields.one2many('procurement.order', 'purchase_line_id', string='Associated procurements'),
}
_defaults = {
'product_uom' : _get_uom_id,
'product_qty': lambda *a: 1.0,
'state': lambda *args: 'draft',
'invoiced': lambda *a: 0,
}
_table = 'purchase_order_line'
_name = 'purchase.order.line'
_description = 'Purchase Order Line'
def copy_data(self, cr, uid, id, default=None, context=None):
if not default:
default = {}
default.update({'state':'draft', 'move_ids':[], 'invoiced':0, 'invoice_lines':[], 'procurement_ids': False})
return super(purchase_order_line, self).copy_data(cr, uid, id, default, context)
def unlink(self, cr, uid, ids, context=None):
procurement_obj = self.pool.get('procurement.order')
procurement_ids_to_cancel = procurement_obj.search(cr, uid, [('purchase_line_id', 'in', ids)], context=context)
if procurement_ids_to_cancel:
self.pool['procurement.order'].cancel(cr, uid, procurement_ids_to_cancel)
return super(purchase_order_line, self).unlink(cr, uid, ids, context=context)
def onchange_product_uom(self, cr, uid, ids, pricelist_id, product_id, qty, uom_id,
partner_id, date_order=False, fiscal_position_id=False, date_planned=False,
name=False, price_unit=False, state='draft', context=None):
"""
onchange handler of product_uom.
"""
if context is None:
context = {}
if not uom_id:
return {'value': {'price_unit': price_unit or 0.0, 'name': name or '', 'product_uom' : uom_id or False}}
context = dict(context, purchase_uom_check=True)
return self.onchange_product_id(cr, uid, ids, pricelist_id, product_id, qty, uom_id,
partner_id, date_order=date_order, fiscal_position_id=fiscal_position_id, date_planned=date_planned,
name=name, price_unit=price_unit, state=state, context=context)
def _get_date_planned(self, cr, uid, supplier_info, date_order_str, context=None):
"""Return the datetime value to use as Schedule Date (``date_planned``) for
PO Lines that correspond to the given product.supplierinfo,
when ordered at `date_order_str`.
:param browse_record | False supplier_info: product.supplierinfo, used to
determine delivery delay (if False, default delay = 0)
:param str date_order_str: date of order, as a string in
DEFAULT_SERVER_DATE_FORMAT
:rtype: datetime
:return: desired Schedule Date for the PO line
"""
supplier_delay = int(supplier_info.delay) if supplier_info else 0
return datetime.strptime(date_order_str, DEFAULT_SERVER_DATE_FORMAT) + relativedelta(days=supplier_delay)
def action_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
for po_line in self.browse(cr, uid, ids, context=context):
if all([l.state == 'cancel' for l in po_line.order_id.order_line]):
self.pool.get('purchase.order').action_cancel(cr, uid, [po_line.order_id.id], context=context)
def _check_product_uom_group(self, cr, uid, context=None):
group_uom = self.pool.get('ir.model.data').get_object(cr, uid, 'product', 'group_uom')
res = [user for user in group_uom.users if user.id == uid]
return len(res) and True or False
def onchange_product_id(self, cr, uid, ids, pricelist_id, product_id, qty, uom_id,
partner_id, date_order=False, fiscal_position_id=False, date_planned=False,
name=False, price_unit=False, state='draft', context=None):
"""
onchange handler of product_id.
"""
if context is None:
context = {}
res = {'value': {'price_unit': price_unit or 0.0, 'name': name or '', 'product_uom' : uom_id or False}}
if not product_id:
return res
product_product = self.pool.get('product.product')
product_uom = self.pool.get('product.uom')
res_partner = self.pool.get('res.partner')
product_pricelist = self.pool.get('product.pricelist')
account_fiscal_position = self.pool.get('account.fiscal.position')
account_tax = self.pool.get('account.tax')
# - check for the presence of partner_id and pricelist_id
#if not partner_id:
# raise osv.except_osv(_('No Partner!'), _('Select a partner in purchase order to choose a product.'))
#if not pricelist_id:
# raise osv.except_osv(_('No Pricelist !'), _('Select a price list in the purchase order form before choosing a product.'))
# - determine name and notes based on product in partner lang.
context_partner = context.copy()
if partner_id:
lang = res_partner.browse(cr, uid, partner_id).lang
context_partner.update( {'lang': lang, 'partner_id': partner_id} )
product = product_product.browse(cr, uid, product_id, context=context_partner)
#call name_get() with partner in the context to eventually match name and description in the seller_ids field
dummy, name = product_product.name_get(cr, uid, product_id, context=context_partner)[0]
if product.description_purchase:
name += '\n' + product.description_purchase
res['value'].update({'name': name})
# - set a domain on product_uom
res['domain'] = {'product_uom': [('category_id','=',product.uom_id.category_id.id)]}
# - check that uom and product uom belong to the same category
product_uom_po_id = product.uom_po_id.id
if not uom_id:
uom_id = product_uom_po_id
if product.uom_id.category_id.id != product_uom.browse(cr, uid, uom_id, context=context).category_id.id:
if context.get('purchase_uom_check') and self._check_product_uom_group(cr, uid, context=context):
res['warning'] = {'title': _('Warning!'), 'message': _('Selected Unit of Measure does not belong to the same category as the product Unit of Measure.')}
uom_id = product_uom_po_id
res['value'].update({'product_uom': uom_id})
# - determine product_qty and date_planned based on seller info
if not date_order:
date_order = fields.date.context_today(self,cr,uid,context=context)
supplierinfo = False
for supplier in product.seller_ids:
if partner_id and (supplier.name.id == partner_id):
supplierinfo = supplier
if supplierinfo.product_uom.id != uom_id:
res['warning'] = {'title': _('Warning!'), 'message': _('The selected supplier only sells this product by %s') % supplierinfo.product_uom.name }
min_qty = product_uom._compute_qty(cr, uid, supplierinfo.product_uom.id, supplierinfo.min_qty, to_uom_id=uom_id)
if (qty or 0.0) < min_qty: # If the supplier quantity is greater than entered from user, set minimal.
if qty:
res['warning'] = {'title': _('Warning!'), 'message': _('The selected supplier has a minimal quantity set to %s %s, you should not purchase less.') % (supplierinfo.min_qty, supplierinfo.product_uom.name)}
qty = min_qty
dt = self._get_date_planned(cr, uid, supplierinfo, date_order, context=context).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
qty = qty or 1.0
res['value'].update({'date_planned': date_planned or dt})
if qty:
res['value'].update({'product_qty': qty})
price = price_unit
if state not in ('sent','bid'):
# - determine price_unit and taxes_id
if pricelist_id:
price = product_pricelist.price_get(cr, uid, [pricelist_id],
product.id, qty or 1.0, partner_id or False, {'uom': uom_id, 'date': date_order})[pricelist_id]
else:
price = product.standard_price
taxes = account_tax.browse(cr, uid, map(lambda x: x.id, product.supplier_taxes_id))
fpos = fiscal_position_id and account_fiscal_position.browse(cr, uid, fiscal_position_id, context=context) or False
taxes_ids = account_fiscal_position.map_tax(cr, uid, fpos, taxes)
res['value'].update({'price_unit': price, 'taxes_id': taxes_ids})
return res
product_id_change = onchange_product_id
product_uom_change = onchange_product_uom
def action_confirm(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'confirmed'}, context=context)
return True
class procurement_rule(osv.osv):
_inherit = 'procurement.rule'
def _get_action(self, cr, uid, context=None):
return [('buy', _('Buy'))] + super(procurement_rule, self)._get_action(cr, uid, context=context)
class procurement_order(osv.osv):
_inherit = 'procurement.order'
_columns = {
'purchase_line_id': fields.many2one('purchase.order.line', 'Purchase Order Line'),
'purchase_id': fields.related('purchase_line_id', 'order_id', type='many2one', relation='purchase.order', string='Purchase Order'),
}
def propagate_cancel(self, cr, uid, procurement, context=None):
if procurement.rule_id.action == 'buy' and procurement.purchase_line_id:
purchase_line_obj = self.pool.get('purchase.order.line')
if procurement.purchase_line_id.product_qty > procurement.product_qty and procurement.purchase_line_id.order_id.state == 'draft':
purchase_line_obj.write(cr, uid, [procurement.purchase_line_id.id], {'product_qty': procurement.purchase_line_id.product_qty - procurement.product_qty}, context=context)
else:
purchase_line_obj.action_cancel(cr, uid, [procurement.purchase_line_id.id], context=context)
return super(procurement_order, self).propagate_cancel(cr, uid, procurement, context=context)
def _run(self, cr, uid, procurement, context=None):
if procurement.rule_id and procurement.rule_id.action == 'buy':
#make a purchase order for the procurement
return self.make_po(cr, uid, [procurement.id], context=context)[procurement.id]
return super(procurement_order, self)._run(cr, uid, procurement, context=context)
def _check(self, cr, uid, procurement, context=None):
if procurement.purchase_line_id and procurement.purchase_line_id.order_id.shipped: # TOCHECK: does it work for several deliveries?
return True
return super(procurement_order, self)._check(cr, uid, procurement, context=context)
def _check_supplier_info(self, cr, uid, ids, context=None):
''' Check the supplier info field of a product and write an error message on the procurement if needed.
Returns True if all needed information is there, False if some configuration mistake is detected.
'''
partner_obj = self.pool.get('res.partner')
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
for procurement in self.browse(cr, uid, ids, context=context):
message = ''
partner = procurement.product_id.seller_id #Taken Main Supplier of Product of Procurement.
if not procurement.product_id.seller_ids:
message = _('No supplier defined for this product !')
elif not partner:
message = _('No default supplier defined for this product')
elif not partner_obj.address_get(cr, uid, [partner.id], ['delivery'])['delivery']:
message = _('No address defined for the supplier')
if message:
if procurement.message != message:
cr.execute('update procurement_order set message=%s where id=%s', (message, procurement.id))
return False
if user.company_id and user.company_id.partner_id:
if partner.id == user.company_id.partner_id.id:
raise osv.except_osv(_('Configuration Error!'), _('The product "%s" has been defined with your company as reseller which seems to be a configuration error!' % procurement.product_id.name))
return True
def create_procurement_purchase_order(self, cr, uid, procurement, po_vals, line_vals, context=None):
"""Create the purchase order from the procurement, using
the provided field values, after adding the given purchase
order line in the purchase order.
:params procurement: the procurement object generating the purchase order
:params dict po_vals: field values for the new purchase order (the
``order_line`` field will be overwritten with one
single line, as passed in ``line_vals``).
:params dict line_vals: field values of the single purchase order line that
the purchase order will contain.
:return: id of the newly created purchase order
:rtype: int
"""
po_vals.update({'order_line': [(0,0,line_vals)]})
return self.pool.get('purchase.order').create(cr, uid, po_vals, context=context)
def _get_purchase_schedule_date(self, cr, uid, procurement, company, context=None):
"""Return the datetime value to use as Schedule Date (``date_planned``) for the
Purchase Order Lines created to satisfy the given procurement.
:param browse_record procurement: the procurement for which a PO will be created.
:param browse_report company: the company to which the new PO will belong to.
:rtype: datetime
:return: the desired Schedule Date for the PO lines
"""
procurement_date_planned = datetime.strptime(procurement.date_planned, DEFAULT_SERVER_DATETIME_FORMAT)
schedule_date = (procurement_date_planned - relativedelta(days=company.po_lead))
return schedule_date
def _get_purchase_order_date(self, cr, uid, procurement, company, schedule_date, context=None):
"""Return the datetime value to use as Order Date (``date_order``) for the
Purchase Order created to satisfy the given procurement.
:param browse_record procurement: the procurement for which a PO will be created.
:param browse_report company: the company to which the new PO will belong to.
:param datetime schedule_date: desired Scheduled Date for the Purchase Order lines.
:rtype: datetime
:return: the desired Order Date for the PO
"""
seller_delay = int(procurement.product_id.seller_delay)
return schedule_date - relativedelta(days=seller_delay)
def _get_product_supplier(self, cr, uid, procurement, context=None):
''' returns the main supplier of the procurement's product given as argument'''
return procurement.product_id.seller_id
def _get_po_line_values_from_proc(self, cr, uid, procurement, partner, company, schedule_date, context=None):
if context is None:
context = {}
uom_obj = self.pool.get('product.uom')
pricelist_obj = self.pool.get('product.pricelist')
prod_obj = self.pool.get('product.product')
acc_pos_obj = self.pool.get('account.fiscal.position')
seller_qty = procurement.product_id.seller_qty
pricelist_id = partner.property_product_pricelist_purchase.id
uom_id = procurement.product_id.uom_po_id.id
qty = uom_obj._compute_qty(cr, uid, procurement.product_uom.id, procurement.product_qty, uom_id)
if seller_qty:
qty = max(qty, seller_qty)
price = pricelist_obj.price_get(cr, uid, [pricelist_id], procurement.product_id.id, qty, partner.id, {'uom': uom_id})[pricelist_id]
#Passing partner_id to context for purchase order line integrity of Line name
new_context = context.copy()
new_context.update({'lang': partner.lang, 'partner_id': partner.id})
product = prod_obj.browse(cr, uid, procurement.product_id.id, context=new_context)
taxes_ids = procurement.product_id.supplier_taxes_id
taxes = acc_pos_obj.map_tax(cr, uid, partner.property_account_position, taxes_ids)
name = product.partner_ref
if product.description_purchase:
name += '\n' + product.description_purchase
return {
'name': name,
'product_qty': qty,
'product_id': procurement.product_id.id,
'product_uom': uom_id,
'price_unit': price or 0.0,
'date_planned': schedule_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT),
'taxes_id': [(6, 0, taxes)],
}
def make_po(self, cr, uid, ids, context=None):
""" Resolve the purchase from procurement, which may result in a new PO creation, a new PO line creation or a quantity change on existing PO line.
Note that some operations (as the PO creation) are made as SUPERUSER because the current user may not have rights to do it (mto product launched by a sale for example)
@return: dictionary giving for each procurement its related resolving PO line.
"""
res = {}
company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
po_obj = self.pool.get('purchase.order')
po_line_obj = self.pool.get('purchase.order.line')
seq_obj = self.pool.get('ir.sequence')
pass_ids = []
linked_po_ids = []
sum_po_line_ids = []
for procurement in self.browse(cr, uid, ids, context=context):
partner = self._get_product_supplier(cr, uid, procurement, context=context)
if not partner:
self.message_post(cr, uid, [procurement.id], _('There is no supplier associated to product %s') % (procurement.product_id.name))
res[procurement.id] = False
else:
schedule_date = self._get_purchase_schedule_date(cr, uid, procurement, company, context=context)
line_vals = self._get_po_line_values_from_proc(cr, uid, procurement, partner, company, schedule_date, context=context)
#look for any other draft PO for the same supplier, to attach the new line on instead of creating a new draft one
available_draft_po_ids = po_obj.search(cr, uid, [
('partner_id', '=', partner.id), ('state', '=', 'draft'), ('picking_type_id', '=', procurement.rule_id.picking_type_id.id),
('location_id', '=', procurement.location_id.id), ('company_id', '=', procurement.company_id.id), ('dest_address_id', '=', procurement.partner_dest_id.id)], context=context)
if available_draft_po_ids:
po_id = available_draft_po_ids[0]
#look for any other PO line in the selected PO with same product and UoM to sum quantities instead of creating a new po line
available_po_line_ids = po_line_obj.search(cr, uid, [('order_id', '=', po_id), ('product_id', '=', line_vals['product_id']), ('product_uom', '=', line_vals['product_uom'])], context=context)
if available_po_line_ids:
po_line = po_line_obj.browse(cr, uid, available_po_line_ids[0], context=context)
po_line_obj.write(cr, SUPERUSER_ID, po_line.id, {'product_qty': po_line.product_qty + line_vals['product_qty']}, context=context)
po_line_id = po_line.id
sum_po_line_ids.append(procurement.id)
else:
line_vals.update(order_id=po_id)
po_line_id = po_line_obj.create(cr, SUPERUSER_ID, line_vals, context=context)
linked_po_ids.append(procurement.id)
else:
purchase_date = self._get_purchase_order_date(cr, uid, procurement, company, schedule_date, context=context)
name = seq_obj.get(cr, uid, 'purchase.order') or _('PO: %s') % procurement.name
po_vals = {
'name': name,
'origin': procurement.origin,
'partner_id': partner.id,
'location_id': procurement.location_id.id,
'picking_type_id': procurement.rule_id.picking_type_id.id,
'pricelist_id': partner.property_product_pricelist_purchase.id,
'date_order': purchase_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT),
'company_id': procurement.company_id.id,
'fiscal_position': partner.property_account_position and partner.property_account_position.id or False,
'payment_term_id': partner.property_supplier_payment_term.id or False,
'dest_address_id': procurement.partner_dest_id.id,
}
po_id = self.create_procurement_purchase_order(cr, SUPERUSER_ID, procurement, po_vals, line_vals, context=context)
po_line_id = po_obj.browse(cr, uid, po_id, context=context).order_line[0].id
pass_ids.append(procurement.id)
res[procurement.id] = po_line_id
self.write(cr, uid, [procurement.id], {'purchase_line_id': po_line_id}, context=context)
if pass_ids:
self.message_post(cr, uid, pass_ids, body=_("Draft Purchase Order created"), context=context)
if linked_po_ids:
self.message_post(cr, uid, linked_po_ids, body=_("Purchase line created and linked to an existing Purchase Order"), context=context)
if sum_po_line_ids:
self.message_post(cr, uid, sum_po_line_ids, body=_("Quantity added in existing Purchase Order Line"), context=context)
return res
class mail_mail(osv.Model):
_name = 'mail.mail'
_inherit = 'mail.mail'
def _postprocess_sent_message(self, cr, uid, mail, context=None, mail_sent=True):
if mail_sent and mail.model == 'purchase.order':
obj = self.pool.get('purchase.order').browse(cr, uid, mail.res_id, context=context)
if obj.state == 'draft':
self.pool.get('purchase.order').signal_send_rfq(cr, uid, [mail.res_id])
return super(mail_mail, self)._postprocess_sent_message(cr, uid, mail=mail, context=context, mail_sent=mail_sent)
class product_template(osv.Model):
_name = 'product.template'
_inherit = 'product.template'
def _purchase_count(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0)
for template in self.browse(cr, uid, ids, context=context):
res[template.id] = sum([p.purchase_count for p in template.product_variant_ids])
return res
_columns = {
'purchase_ok': fields.boolean('Can be Purchased', help="Specify if the product can be selected in a purchase order line."),
'purchase_count': fields.function(_purchase_count, string='# Purchases', type='integer'),
}
_defaults = {
'purchase_ok': 1,
}
class product_product(osv.Model):
_name = 'product.product'
_inherit = 'product.product'
def _purchase_count(self, cr, uid, ids, field_name, arg, context=None):
Purchase = self.pool['purchase.order']
return {
product_id: Purchase.search_count(cr,uid, [('order_line.product_id', '=', product_id)], context=context)
for product_id in ids
}
_columns = {
'purchase_count': fields.function(_purchase_count, string='# Purchases', type='integer'),
}
class mail_compose_message(osv.Model):
_inherit = 'mail.compose.message'
def send_mail(self, cr, uid, ids, context=None):
context = context or {}
if context.get('default_model') == 'purchase.order' and context.get('default_res_id'):
context = dict(context, mail_post_autofollow=True)
self.pool.get('purchase.order').signal_send_rfq(cr, uid, [context['default_res_id']])
return super(mail_compose_message, self).send_mail(cr, uid, ids, context=context)
class account_invoice(osv.Model):
""" Override account_invoice to add Chatter messages on the related purchase
orders, logging the invoice reception or payment. """
_inherit = 'account.invoice'
def invoice_validate(self, cr, uid, ids, context=None):
res = super(account_invoice, self).invoice_validate(cr, uid, ids, context=context)
purchase_order_obj = self.pool.get('purchase.order')
# read access on purchase.order object is not required
if not purchase_order_obj.check_access_rights(cr, uid, 'read', raise_exception=False):
user_id = SUPERUSER_ID
else:
user_id = uid
po_ids = purchase_order_obj.search(cr, user_id, [('invoice_ids', 'in', ids)], context=context)
for order in purchase_order_obj.browse(cr, uid, po_ids, context=context):
purchase_order_obj.message_post(cr, user_id, order.id, body=_("Invoice received"), context=context)
invoiced = []
for po_line in order.order_line:
if any(line.invoice_id.state not in ['draft', 'cancel'] for line in po_line.invoice_lines):
invoiced.append(po_line.id)
if invoiced:
self.pool['purchase.order.line'].write(cr, uid, invoiced, {'invoiced': True})
workflow.trg_write(uid, 'purchase.order', order.id, cr)
return res
def confirm_paid(self, cr, uid, ids, context=None):
res = super(account_invoice, self).confirm_paid(cr, uid, ids, context=context)
purchase_order_obj = self.pool.get('purchase.order')
# read access on purchase.order object is not required
if not purchase_order_obj.check_access_rights(cr, uid, 'read', raise_exception=False):
user_id = SUPERUSER_ID
else:
user_id = uid
po_ids = purchase_order_obj.search(cr, user_id, [('invoice_ids', 'in', ids)], context=context)
for po_id in po_ids:
purchase_order_obj.message_post(cr, user_id, po_id, body=_("Invoice paid"), context=context)
return res
class account_invoice_line(osv.Model):
""" Override account_invoice_line to add the link to the purchase order line it is related to"""
_inherit = 'account.invoice.line'
_columns = {
'purchase_line_id': fields.many2one('purchase.order.line',
'Purchase Order Line', ondelete='set null', select=True,
readonly=True),
}
class product_product(osv.osv):
_inherit = "product.product"
def _get_buy_route(self, cr, uid, context=None):
buy_route = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'purchase', 'route_warehouse0_buy')[1]
return [buy_route]
_defaults = {
'route_ids': _get_buy_route,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
tochikuji/pyPyrTools | pyrtools/showIm.py | 1 | 3701 | import numpy
from PIL import Image
import scipy.stats
import matplotlib.pyplot as plt
def showIm(*args):
# check and set input parameters
if len(args) == 0:
print("showIm( matrix, range, zoom, label, nshades )")
print(" matrix is string. It should be the name of a 2D array.")
print(" range is a two element tuple. It specifies the values that ")
print(" map to the min and max colormap values. Passing a value ")
print(" of 'auto' (default) sets range=[min,max]. 'auto2' sets ")
print(" range=[mean-2*stdev, mean+2*stdev]. 'auto3' sets ")
print(" range=[p1-(p2-p1)/8, p2+(p2-p1)/8], where p1 is the 10th ")
print(" percientile value of the sorted matix samples, and p2 is ")
print(" the 90th percentile value.")
print(" zoom specifies the number of matrix samples per screen pixel.")
print(" It will be rounded to an integer, or 1 divided by an ")
print(" integer.")
# print " A value of 'same' or 'auto' (default) causes the "
# print " zoom value to be chosen automatically to fit the image into"
# print " the current axes."
# print " A value of 'full' fills the axis region "
# print " (leaving no room for labels)."
print(" label - A string that is used as a figure title.")
print(" NSHADES (optional) specifies the number of gray shades, ")
print(" and defaults to the size of the current colormap. ")
if len(args) > 0: # matrix entered
matrix = numpy.array(args[0])
if len(args) > 1: # range entered
if isinstance(args[1], str):
if args[1] is "auto":
imRange = (numpy.amin(matrix), numpy.amax(matrix))
elif args[1] is "auto2":
imRange = (matrix.mean() - 2 * matrix.std(),
matrix.mean() + 2 * matrix.std())
elif args[1] is "auto3":
# p1 = numpy.percentile(matrix, 10) not in python 2.6.6?!
#p2 = numpy.percentile(matrix, 90)
p1 = scipy.stats.scoreatpercentile(numpy.hstack(matrix), 10)
p2 = scipy.stats.scoreatpercentile(numpy.hstack(matrix), 90)
imRange = (p1 - (p2 - p1) / 8.0, p2 + (p2 - p1) / 8.0)
else:
print("Error: range of %s is not recognized." % args[1])
print(" please use a two element tuple or ")
print(" 'auto', 'auto2' or 'auto3'")
print(" enter 'showIm' for more info about options")
return
else:
imRange = args[1][0], args[1][1]
else:
imRange = (numpy.amin(matrix), numpy.amax(matrix))
if len(args) > 2: # zoom entered
zoom = args[2]
else:
zoom = 1
if len(args) > 3: # label entered
label = args[3]
else:
label = ''
if len(args) > 4: # colormap entered
nshades = args[4]
print("colormap parameter is not supported.")
print("Such specification does not make any sense.")
else:
nshades = 256 # NOQA
# show image
# create canvas (mpl)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.set_title(label)
width = matrix.shape[0] * zoom
height = matrix.shape[1] * zoom
# normalize image to [0, 255]
pmin, pmax = matrix.min(), matrix.max()
matrix = (matrix - pmin) / (pmax - pmin) * 255
img = Image.fromarray(matrix.astype(numpy.uint8))
# zoom
if zoom != 1:
img.thumbnail((width, height), Image.BICUBIC)
ax.imshow(img, cmap='gray')
plt.show()
| mit |
sencha/chromium-spacewalk | tools/vim/chromium.ycm_extra_conf.py | 6 | 7192 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Autocompletion config for YouCompleteMe in Chromium.
#
# USAGE:
#
# 1. Install YCM [https://github.com/Valloric/YouCompleteMe]
# (Googlers should check out [go/ycm])
#
# 2. Point to this config file in your .vimrc:
# let g:ycm_global_ycm_extra_conf =
# '<chrome_depot>/src/tools/vim/chromium.ycm_extra_conf.py'
#
# 3. Profit
#
#
# Usage notes:
#
# * You must use ninja & clang to build Chromium.
#
# * You must have run gyp_chromium and built Chromium recently.
#
#
# Hacking notes:
#
# * The purpose of this script is to construct an accurate enough command line
# for YCM to pass to clang so it can build and extract the symbols.
#
# * Right now, we only pull the -I and -D flags. That seems to be sufficient
# for everything I've used it for.
#
# * That whole ninja & clang thing? We could support other configs if someone
# were willing to write the correct commands and a parser.
#
# * This has only been tested on gPrecise.
import os
import subprocess
# Flags from YCM's default config.
flags = [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x',
'c++',
]
def PathExists(*args):
return os.path.exists(os.path.join(*args))
def FindChromeSrcFromFilename(filename):
"""Searches for the root of the Chromium checkout.
Simply checks parent directories until it finds .gclient and src/.
Args:
filename: (String) Path to source file being edited.
Returns:
(String) Path of 'src/', or None if unable to find.
"""
curdir = os.path.normpath(os.path.dirname(filename))
while not (PathExists(curdir, 'src') and PathExists(curdir, 'src', 'DEPS')
and (PathExists(curdir, '.gclient')
or PathExists(curdir, 'src', '.git'))):
nextdir = os.path.normpath(os.path.join(curdir, '..'))
if nextdir == curdir:
return None
curdir = nextdir
return os.path.join(curdir, 'src')
# Largely copied from ninja-build.vim (guess_configuration)
def GetNinjaOutputDirectory(chrome_root):
"""Returns <chrome_root>/<output_dir>/(Release|Debug).
The configuration chosen is the one most recently generated/built. Detects
a custom output_dir specified by GYP_GENERATOR_FLAGS."""
output_dir = 'out'
generator_flags = os.getenv('GYP_GENERATOR_FLAGS', '').split(' ')
for flag in generator_flags:
name_value = flag.split('=', 1)
if len(name_value) == 2 and name_value[0] == 'output_dir':
output_dir = name_value[1]
root = os.path.join(chrome_root, output_dir)
debug_path = os.path.join(root, 'Debug')
release_path = os.path.join(root, 'Release')
def is_release_15s_newer(test_path):
try:
debug_mtime = os.path.getmtime(os.path.join(debug_path, test_path))
except os.error:
debug_mtime = 0
try:
rel_mtime = os.path.getmtime(os.path.join(release_path, test_path))
except os.error:
rel_mtime = 0
return rel_mtime - debug_mtime >= 15
if is_release_15s_newer('build.ninja') or is_release_15s_newer('protoc'):
return release_path
return debug_path
def GetClangCommandFromNinjaForFilename(chrome_root, filename):
"""Returns the command line to build |filename|.
Asks ninja how it would build the source file. If the specified file is a
header, tries to find its companion source file first.
Args:
chrome_root: (String) Path to src/.
filename: (String) Path to source file being edited.
Returns:
(List of Strings) Command line arguments for clang.
"""
if not chrome_root:
return []
# Generally, everyone benefits from including Chromium's src/, because all of
# Chromium's includes are relative to that.
chrome_flags = ['-I' + os.path.join(chrome_root)]
# Default file to get a reasonable approximation of the flags for a Blink
# file.
blink_root = os.path.join(chrome_root, 'third_party', 'WebKit')
default_blink_file = os.path.join(blink_root, 'Source', 'core', 'Init.cpp')
# Header files can't be built. Instead, try to match a header file to its
# corresponding source file.
if filename.endswith('.h'):
# Add config.h to Blink headers, which won't have it by default.
if filename.startswith(blink_root):
chrome_flags.append('-include')
chrome_flags.append(os.path.join(blink_root, 'Source', 'config.h'))
alternates = ['.cc', '.cpp']
for alt_extension in alternates:
alt_name = filename[:-2] + alt_extension
if os.path.exists(alt_name):
filename = alt_name
break
else:
if filename.startswith(blink_root):
# If this is a Blink file, we can at least try to get a reasonable
# approximation.
filename = default_blink_file
else:
# If this is a standalone .h file with no source, the best we can do is
# try to use the default flags.
return chrome_flags
# Ninja needs the path to the source file from the output build directory.
# Cut off the common part and /.
subdir_filename = filename[len(chrome_root)+1:]
rel_filename = os.path.join('..', '..', subdir_filename)
out_dir = GetNinjaOutputDirectory(chrome_root)
# Ask ninja how it would build our source file.
p = subprocess.Popen(['ninja', '-v', '-C', out_dir, '-t',
'commands', rel_filename + '^'],
stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode:
return chrome_flags
# Ninja might execute several commands to build something. We want the last
# clang command.
clang_line = None
for line in reversed(stdout.split('\n')):
if 'clang' in line:
clang_line = line
break
else:
return chrome_flags
# Parse flags that are important for YCM's purposes.
for flag in clang_line.split(' '):
if flag.startswith('-I'):
# Relative paths need to be resolved, because they're relative to the
# output dir, not the source.
if flag[2] == '/':
chrome_flags.append(flag)
else:
abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
chrome_flags.append('-I' + abs_path)
elif flag.startswith('-std'):
chrome_flags.append(flag)
elif flag.startswith('-') and flag[1] in 'DWFfmO':
if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard':
# These flags causes libclang (3.3) to crash. Remove it until things
# are fixed.
continue
chrome_flags.append(flag)
return chrome_flags
def FlagsForFile(filename):
"""This is the main entry point for YCM. Its interface is fixed.
Args:
filename: (String) Path to source file being edited.
Returns:
(Dictionary)
'flags': (List of Strings) Command line flags.
'do_cache': (Boolean) True if the result should be cached.
"""
chrome_root = FindChromeSrcFromFilename(filename)
chrome_flags = GetClangCommandFromNinjaForFilename(chrome_root,
filename)
final_flags = flags + chrome_flags
return {
'flags': final_flags,
'do_cache': True
}
| bsd-3-clause |
ntoll/yotta | yotta/lib/vcs.py | 2 | 7644 | # Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import os
import subprocess
import tempfile
import logging
import hgapi
import errno
# fsutils, , misc filesystem utils, internal
import fsutils
git_logger = logging.getLogger('git')
hg_logger = logging.getLogger('hg')
class VCSError(Exception):
def __init__(self, message, returncode=None):
super(VCSError, self).__init__(message)
self.returncode = returncode
class VCS(object):
@classmethod
def cloneToTemporaryDir(cls, remote):
raise NotImplementedError()
@classmethod
def cloneToDirectory(cls, remote, directory, tag=None):
raise NotImplementedError()
def isClean(self):
raise NotImplementedError()
def commit(self, message, tag=None):
raise NotImplementedError()
def isClean(self):
raise NotImplementedError()
def tags(self):
raise NotImplementedError()
def markForCommit(self, path):
pass
def remove(self):
raise NotImplementedError()
def __nonzero__(self):
raise NotImplementedError()
# python 3 truthiness
def __bool__(self):
return self.__nonzero__()
class Git(VCS):
def __init__(self, path):
self.worktree = path
self.gitdir = os.path.join(path, '.git')
@classmethod
def cloneToTemporaryDir(cls, remote):
return cls.cloneToDirectory(remote, tempfile.mkdtemp())
@classmethod
def cloneToDirectory(cls, remote, directory, tag=None):
commands = [
['git', 'clone', remote, directory]
]
cls._execCommands(commands)
r = Git(directory)
if tag is not None:
r.updateToTag(tag)
return r
def fetchAllBranches(self):
remote_branches = []
local_branches = []
# list remote branches
out, err = self._execCommands([self._gitCmd('branch', '-r')])
for line in out.split(b'\n'):
branch_info = line.split(b' -> ')
# skip HEAD:
if len(branch_info) > 1:
continue
remote_branch = branch_info[0].strip()
branch = b'/'.join(remote_branch.split(b'/')[1:])
remote_branches.append((remote_branch, branch))
# list already-existing local branches
out, err = self._execCommands([self._gitCmd('branch')])
for line in out.split(b'\n'):
local_branches.append(line.strip(b' *'))
for remote, branchname in remote_branches:
# don't try to replace existing local branches
if branchname in local_branches:
continue
try:
out, err = self._execCommands([
self._gitCmd('checkout', '-b', branchname, remote)
])
except VCSError as e:
git_logger.error('failed to fetch remote branch %s %s' % (remote, branchname))
raise
def remove(self):
fsutils.rmRf(self.worktree)
def workingDirectory(self):
return self.worktree
def _gitCmd(self, *args):
return ['git','--work-tree=%s' % self.worktree,'--git-dir=%s'%self.gitdir.replace('\\', '/')] + list(args);
@classmethod
def _execCommands(cls, commands):
out, err = None, None
for cmd in commands:
try:
child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError as e:
if e.errno == errno.ENOENT:
if cmd[0] == 'git':
raise VCSError(
'git is not installed, or not in your path. Please follow the installation instructions at http://docs.yottabuild.org/#installing'
)
else:
raise VCSError('%s is not installed' % (cmd[0]))
else:
raise VCSError('command %s failed' % (cmd))
out, err = child.communicate()
returncode = child.returncode
if returncode:
raise VCSError("command failed: %s:%s" % (cmd, err or out), returncode=returncode)
return out, err
def isClean(self):
commands = [
self._gitCmd('diff', '--quiet', '--exit-code'),
self._gitCmd('diff', '--cached', '--quiet', '--exit-code'),
]
try:
out, err = self._execCommands(commands)
except VCSError as e:
if e.returncode:
return False
else:
raise
return True
def markForCommit(self, relative_path):
commands = [
self._gitCmd('add', os.path.join(self.worktree, relative_path)),
]
self._execCommands(commands)
def updateToTag(self, tag):
commands = [
self._gitCmd('checkout', tag),
]
self._execCommands(commands)
def tags(self):
commands = [
self._gitCmd('tag', '-l')
]
out, err = self._execCommands(commands)
# I think utf-8 is the right encoding? commit messages are utf-8
# encoded, couldn't find any documentation on tag names.
return out.decode('utf-8').split(u'\n')
def branches(self):
commands = [
self._gitCmd('branch', '--list')
]
out, err = self._execCommands(commands)
return [x.lstrip(' *') for x in out.decode('utf-8').split('\n')]
def commit(self, message, tag=None):
commands = [
self._gitCmd('commit', '-m', message),
]
if tag:
commands.append(
self._gitCmd('tag', tag, '-a', '-m', tag),
)
self._execCommands(commands)
def __nonzero__(self):
return True
# FIXME: hgapi will throw HgException when something goes wrong, it may be worth trying
# to catch that in some methods
class HG(VCS):
def __init__(self, path):
self.worktree = path
self.repo = hgapi.Repo(path)
@classmethod
def cloneToTemporaryDir(cls, remote):
return cls.cloneToDirectory(remote, tempfile.mkdtemp())
@classmethod
def cloneToDirectory(cls, remote, directory, tag=None):
# hg doesn't automatically create the directories needed by destination
try:
os.makedirs(directory)
except:
pass
hg_logger.debug('will clone %s into %s', remote, directory)
hgapi.Repo.hg_clone(remote, directory)
r = HG(directory)
if tag is not None:
r.updateToTag(tag)
return r
def remove(self):
fsutils.rmRf(self.worktree)
def workingDirectory(self):
return self.worktree
def isClean(self):
return not bool(self.repo.hg_status(empty=True))
def markForCommit(self, relative_path):
self.repo.hg_add(os.path.join(self.worktree, relative_path))
def updateToTag(self, tag):
self.repo.hg_update(tag)
def tags(self):
l = list(self.repo.hg_tags().keys())
l.remove('tip')
return l
def commit(self, message, tag=None):
self.repo.hg_commit(message)
if tag:
self.repo.hg_tag(tag)
def __nonzero__(self):
return True
def getVCS(path):
# crude heuristic, does the job...
if os.path.exists(os.path.join(path, '.git')):
return Git(path)
if os.path.isdir(os.path.join(path, '.hg')):
return HG(path)
return None
| apache-2.0 |
pcaro/jurko-suds | suds/sax/date.py | 9 | 14022 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jurko Gospodnetić ( jurko.gospodnetic@pke.hr )
# based on code by: Glen Walker
# based on code by: Nathan Van Gheem ( vangheem@gmail.com )
"""Classes for conversion between XML dates and Python objects."""
from suds import UnicodeMixin
import datetime
import re
import time
_SNIPPET_DATE = \
r"(?P<year>\d{1,})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
_SNIPPET_TIME = \
r"(?P<hour>\d{1,2}):(?P<minute>[0-5]?[0-9]):(?P<second>[0-5]?[0-9])" \
r"(?:\.(?P<subsecond>\d+))?"
_SNIPPET_ZONE = \
r"(?:(?P<tz_sign>[-+])(?P<tz_hour>\d{1,2})" \
r"(?::(?P<tz_minute>[0-5]?[0-9]))?)" \
r"|(?P<tz_utc>[Zz])"
_PATTERN_DATE = r"^%s(?:%s)?$" % (_SNIPPET_DATE, _SNIPPET_ZONE)
_PATTERN_TIME = r"^%s(?:%s)?$" % (_SNIPPET_TIME, _SNIPPET_ZONE)
_PATTERN_DATETIME = r"^%s[T ]%s(?:%s)?$" % (_SNIPPET_DATE, _SNIPPET_TIME,
_SNIPPET_ZONE)
_RE_DATE = re.compile(_PATTERN_DATE)
_RE_TIME = re.compile(_PATTERN_TIME)
_RE_DATETIME = re.compile(_PATTERN_DATETIME)
class Date(UnicodeMixin):
"""
An XML date object supporting the xsd:date datatype.
@ivar value: The object value.
@type value: B{datetime}.I{date}
"""
def __init__(self, value):
"""
@param value: The date value of the object.
@type value: (datetime.date|str)
@raise ValueError: When I{value} is invalid.
"""
if isinstance(value, datetime.datetime):
self.value = value.date()
elif isinstance(value, datetime.date):
self.value = value
elif isinstance(value, basestring):
self.value = self.__parse(value)
else:
raise ValueError("invalid type for Date(): %s" % type(value))
@staticmethod
def __parse(value):
"""
Parse the string date.
Supports the subset of ISO8601 used by xsd:date, but is lenient with
what is accepted, handling most reasonable syntax.
Any timezone is parsed but ignored because a) it is meaningless without
a time and b) B{datetime}.I{date} does not support timezone
information.
@param value: A date string.
@type value: str
@return: A date object.
@rtype: B{datetime}.I{date}
"""
match_result = _RE_DATE.match(value)
if match_result is None:
raise ValueError("date data has invalid format '%s'" % (value,))
return _date_from_match(match_result)
def __unicode__(self):
return self.value.isoformat()
class DateTime(UnicodeMixin):
"""
An XML datetime object supporting the xsd:dateTime datatype.
@ivar value: The object value.
@type value: B{datetime}.I{datetime}
"""
def __init__(self, value):
"""
@param value: The datetime value of the object.
@type value: (datetime.datetime|str)
@raise ValueError: When I{value} is invalid.
"""
if isinstance(value, datetime.datetime):
self.value = value
elif isinstance(value, basestring):
self.value = self.__parse(value)
else:
raise ValueError("invalid type for DateTime(): %s" % type(value))
@staticmethod
def __parse(value):
"""
Parse the string datetime.
Supports the subset of ISO8601 used by xsd:dateTime, but is lenient
with what is accepted, handling most reasonable syntax.
Subsecond information is rounded to microseconds due to a restriction
in the python datetime.datetime/time implementation.
@param value: A datetime string.
@type value: str
@return: A datetime object.
@rtype: B{datetime}.I{datetime}
"""
match_result = _RE_DATETIME.match(value)
if match_result is None:
raise ValueError("date data has invalid format '%s'" % (value,))
date = _date_from_match(match_result)
time, round_up = _time_from_match(match_result)
tzinfo = _tzinfo_from_match(match_result)
value = datetime.datetime.combine(date, time)
value = value.replace(tzinfo=tzinfo)
if round_up:
value += datetime.timedelta(microseconds=1)
return value
def __unicode__(self):
return self.value.isoformat()
class Time(UnicodeMixin):
"""
An XML time object supporting the xsd:time datatype.
@ivar value: The object value.
@type value: B{datetime}.I{time}
"""
def __init__(self, value):
"""
@param value: The time value of the object.
@type value: (datetime.time|str)
@raise ValueError: When I{value} is invalid.
"""
if isinstance(value, datetime.time):
self.value = value
elif isinstance(value, basestring):
self.value = self.__parse(value)
else:
raise ValueError("invalid type for Time(): %s" % type(value))
@staticmethod
def __parse(value):
"""
Parse the string date.
Supports the subset of ISO8601 used by xsd:time, but is lenient with
what is accepted, handling most reasonable syntax.
Subsecond information is rounded to microseconds due to a restriction
in the python datetime.time implementation.
@param value: A time string.
@type value: str
@return: A time object.
@rtype: B{datetime}.I{time}
"""
match_result = _RE_TIME.match(value)
if match_result is None:
raise ValueError("date data has invalid format '%s'" % (value,))
time, round_up = _time_from_match(match_result)
tzinfo = _tzinfo_from_match(match_result)
if round_up:
time = _bump_up_time_by_microsecond(time)
return time.replace(tzinfo=tzinfo)
def __unicode__(self):
return self.value.isoformat()
class FixedOffsetTimezone(datetime.tzinfo, UnicodeMixin):
"""
A timezone with a fixed offset and no daylight savings adjustment.
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
def __init__(self, offset):
"""
@param offset: The fixed offset of the timezone.
@type offset: I{int} or B{datetime}.I{timedelta}
"""
if type(offset) == int:
offset = datetime.timedelta(hours=offset)
elif type(offset) != datetime.timedelta:
raise TypeError("timezone offset must be an int or "
"datetime.timedelta")
if offset.microseconds or (offset.seconds % 60 != 0):
raise ValueError("timezone offset must have minute precision")
self.__offset = offset
def dst(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.dst
"""
return datetime.timedelta(0)
def utcoffset(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.utcoffset
"""
return self.__offset
def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
# total_seconds was introduced in Python 2.7
if hasattr(self.__offset, "total_seconds"):
total_seconds = self.__offset.total_seconds()
else:
total_seconds = (self.__offset.days * 24 * 60 * 60) + \
(self.__offset.seconds)
hours = total_seconds // (60 * 60)
total_seconds -= hours * 60 * 60
minutes = total_seconds // 60
total_seconds -= minutes * 60
seconds = total_seconds // 1
total_seconds -= seconds
if seconds:
return "%+03d:%02d:%02d" % (hours, minutes, seconds)
return "%+03d:%02d" % (hours, minutes)
def __unicode__(self):
return "FixedOffsetTimezone %s" % (self.tzname(None),)
class UtcTimezone(FixedOffsetTimezone):
"""
The UTC timezone.
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
def __init__(self):
FixedOffsetTimezone.__init__(self, datetime.timedelta(0))
def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
return "UTC"
def __unicode__(self):
return "UtcTimezone"
class LocalTimezone(datetime.tzinfo):
"""
The local timezone of the operating system.
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
def __init__(self):
self.__offset = datetime.timedelta(seconds=-time.timezone)
self.__dst_offset = None
if time.daylight:
self.__dst_offset = datetime.timedelta(seconds=-time.altzone)
def dst(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.dst
"""
if self.__is_daylight_time(dt):
return self.__dst_offset - self.__offset
return datetime.timedelta(0)
def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
if self.__is_daylight_time(dt):
return time.tzname[1]
return time.tzname[0]
def utcoffset(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.utcoffset
"""
if self.__is_daylight_time(dt):
return self.__dst_offset
return self.__offset
def __is_daylight_time(self, dt):
if not time.daylight:
return False
time_tuple = dt.replace(tzinfo=None).timetuple()
time_tuple = time.localtime(time.mktime(time_tuple))
return time_tuple.tm_isdst > 0
def __unicode__(self):
dt = datetime.datetime.now()
return "LocalTimezone %s offset: %s dst: %s" % (self.tzname(dt),
self.utcoffset(dt), self.dst(dt))
def _bump_up_time_by_microsecond(time):
"""
Helper function bumping up the given datetime.time by a microsecond,
cycling around silently to 00:00:00.0 in case of an overflow.
@param time: Time object.
@type time: B{datetime}.I{time}
@return: Time object.
@rtype: B{datetime}.I{time}
"""
dt = datetime.datetime(2000, 1, 1, time.hour, time.minute,
time.second, time.microsecond)
dt += datetime.timedelta(microseconds=1)
return dt.time()
def _date_from_match(match_object):
"""
Create a date object from a regular expression match.
The regular expression match is expected to be from _RE_DATE or
_RE_DATETIME.
@param match_object: The regular expression match.
@type match_object: B{re}.I{MatchObject}
@return: A date object.
@rtype: B{datetime}.I{date}
"""
year = int(match_object.group("year"))
month = int(match_object.group("month"))
day = int(match_object.group("day"))
return datetime.date(year, month, day)
def _time_from_match(match_object):
"""
Create a time object from a regular expression match.
Returns the time object and information whether the resulting time should
be bumped up by one microsecond due to microsecond rounding.
Subsecond information is rounded to microseconds due to a restriction in
the python datetime.datetime/time implementation.
The regular expression match is expected to be from _RE_DATETIME or
_RE_TIME.
@param match_object: The regular expression match.
@type match_object: B{re}.I{MatchObject}
@return: Time object + rounding flag.
@rtype: tuple of B{datetime}.I{time} and bool
"""
hour = int(match_object.group('hour'))
minute = int(match_object.group('minute'))
second = int(match_object.group('second'))
subsecond = match_object.group('subsecond')
round_up = False
microsecond = 0
if subsecond:
round_up = len(subsecond) > 6 and int(subsecond[6]) >= 5
subsecond = subsecond[:6]
microsecond = int(subsecond + "0" * (6 - len(subsecond)))
return datetime.time(hour, minute, second, microsecond), round_up
def _tzinfo_from_match(match_object):
"""
Create a timezone information object from a regular expression match.
The regular expression match is expected to be from _RE_DATE, _RE_DATETIME
or _RE_TIME.
@param match_object: The regular expression match.
@type match_object: B{re}.I{MatchObject}
@return: A timezone information object.
@rtype: B{datetime}.I{tzinfo}
"""
tz_utc = match_object.group("tz_utc")
if tz_utc:
return UtcTimezone()
tz_sign = match_object.group("tz_sign")
if not tz_sign:
return
h = int(match_object.group("tz_hour") or 0)
m = int(match_object.group("tz_minute") or 0)
if h == 0 and m == 0:
return UtcTimezone()
# Python limitation - timezone offsets larger than one day (in absolute)
# will cause operations depending on tzinfo.utcoffset() to fail, e.g.
# comparing two timezone aware datetime.datetime/time objects.
if h >= 24:
raise ValueError("timezone indicator too large")
tz_delta = datetime.timedelta(hours=h, minutes=m)
if tz_sign == "-":
tz_delta *= -1
return FixedOffsetTimezone(tz_delta)
| lgpl-3.0 |
JioCloud/python-novaclient | novaclient/v1_1/virtual_interfaces.py | 17 | 1039 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Virtual Interfaces (1.1 extension).
"""
from novaclient import base
class VirtualInterface(base.Resource):
def __repr__(self):
pass
class VirtualInterfaceManager(base.ManagerWithFind):
resource_class = VirtualInterface
def list(self, instance_id):
return self._list('/servers/%s/os-virtual-interfaces' % instance_id,
'virtual_interfaces')
| apache-2.0 |
tejo-esperanto/pasportaservo | tests/models/test_gender_model.py | 1 | 1457 | from django.test import tag
from django_webtest import WebTest
from ..factories import GenderFactory
@tag('models')
class GenderModelTests(WebTest):
def test_field_max_lengths(self):
gender = GenderFactory.build()
self.assertEquals(gender._meta.get_field('name_en').max_length, 255)
self.assertEquals(gender._meta.get_field('name').max_length, 255)
def test_field_uniqueness(self):
gender = GenderFactory.build()
self.assertTrue(gender._meta.get_field('name_en').unique)
self.assertTrue(gender._meta.get_field('name').unique)
def test_eqality(self):
gender = GenderFactory.build(name="forrest", name_en="gump")
self.assertEquals(gender, GenderFactory.build(name="forrest"))
self.assertEquals(gender, GenderFactory.build(name="forrest", name_en="curran"))
self.assertEquals(gender, "forrest")
self.assertNotEquals(gender, GenderFactory.build(name="bubba"))
self.assertNotEquals(gender, GenderFactory.build(name="bubba", name_en="gump"))
self.assertEquals("forrest", gender)
self.assertNotEquals("bubba", gender)
self.assertFalse(gender == 7)
self.assertFalse(gender == id(gender))
self.assertFalse(gender == ["forrest"])
self.assertFalse(gender == (term for term in ["forrest"]))
def test_str(self):
gender = GenderFactory.build()
self.assertEquals(str(gender), gender.name)
| agpl-3.0 |
synasius/django | django/contrib/gis/db/backends/oracle/introspection.py | 539 | 1977 | import sys
import cx_Oracle
from django.db.backends.oracle.introspection import DatabaseIntrospection
from django.utils import six
class OracleIntrospection(DatabaseIntrospection):
# Associating any OBJECTVAR instances with GeometryField. Of course,
# this won't work right on Oracle objects that aren't MDSYS.SDO_GEOMETRY,
# but it is the only object type supported within Django anyways.
data_types_reverse = DatabaseIntrospection.data_types_reverse.copy()
data_types_reverse[cx_Oracle.OBJECT] = 'GeometryField'
def get_geometry_type(self, table_name, geo_col):
cursor = self.connection.cursor()
try:
# Querying USER_SDO_GEOM_METADATA to get the SRID and dimension information.
try:
cursor.execute(
'SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" '
'WHERE "TABLE_NAME"=%s AND "COLUMN_NAME"=%s',
(table_name.upper(), geo_col.upper())
)
row = cursor.fetchone()
except Exception as msg:
new_msg = (
'Could not find entry in USER_SDO_GEOM_METADATA '
'corresponding to "%s"."%s"\n'
'Error message: %s.') % (table_name, geo_col, msg)
six.reraise(Exception, Exception(new_msg), sys.exc_info()[2])
# TODO: Research way to find a more specific geometry field type for
# the column's contents.
field_type = 'GeometryField'
# Getting the field parameters.
field_params = {}
dim, srid = row
if srid != 4326:
field_params['srid'] = srid
# Length of object array ( SDO_DIM_ARRAY ) is number of dimensions.
dim = len(dim)
if dim != 2:
field_params['dim'] = dim
finally:
cursor.close()
return field_type, field_params
| bsd-3-clause |
azumimuo/family-xbmc-addon | script.video.F4mProxy/lib/flvlib/scripts/debug_flv.py | 98 | 3580 | import sys
import logging
from optparse import OptionParser
from flvlib import __versionstr__
from flvlib import tags
from flvlib import helpers
from flvlib.astypes import MalformedFLV
log = logging.getLogger('flvlib.debug-flv')
log.setLevel(logging.ERROR)
def debug_file(filename, quiet=False, metadata=False):
try:
f = open(filename, 'rb')
except IOError, (errno, strerror):
log.error("Failed to open `%s': %s", filename, strerror)
return False
flv = tags.FLV(f)
if not quiet:
print "=== `%s' ===" % filename
try:
tag_generator = flv.iter_tags()
for i, tag in enumerate(tag_generator):
if quiet:
# If we're quiet, we just want to catch errors
continue
# Print the tag information
print "#%05d %s" % (i + 1, tag)
# Print the content of onMetaData tags
if (isinstance(tag, tags.ScriptTag)
and tag.name == "onMetaData"):
helpers.pprint(tag.variable)
if metadata:
return True
except MalformedFLV, e:
message = e[0] % e[1:]
log.error("The file `%s' is not a valid FLV file: %s",
filename, message)
return False
except tags.EndOfFile:
log.error("Unexpected end of file on file `%s'", filename)
return False
f.close()
return True
def process_options():
usage = "%prog [options] files ..."
description = ("Checks FLV files for comformance with the FLV "
"specification. Outputs a list of tags and, "
"if present, the content of the onMetaData script tag.")
version = "%%prog flvlib %s" % __versionstr__
parser = OptionParser(usage=usage, description=description,
version=version)
parser.add_option("-s", "--strict", action="store_true",
help="be strict while parsing the FLV file")
parser.add_option("-q", "--quiet", action="store_true",
help="do not output anything unless there are errors")
parser.add_option("-m", "--metadata", action="store_true",
help="exit immediately after printing an onMetaData tag")
parser.add_option("-v", "--verbose", action="count",
default=0, dest="verbosity",
help="be more verbose, each -v increases verbosity")
options, args = parser.parse_args(sys.argv)
if len(args) < 2:
parser.error("You have to provide at least one file path")
if options.strict:
tags.STRICT_PARSING = True
if options.verbosity > 3:
options.verbosity = 3
level = ({0: logging.ERROR, 1: logging.WARNING,
2: logging.INFO, 3: logging.DEBUG}[options.verbosity])
logging.getLogger('flvlib').setLevel(level)
return options, args
def debug_files():
options, args = process_options()
clean_run = True
for filename in args[1:]:
if not debug_file(filename, options.quiet, options.metadata):
clean_run = False
return clean_run
def main():
try:
outcome = debug_files()
except KeyboardInterrupt:
# give the right exit status, 128 + signal number
# signal.SIGINT = 2
sys.exit(128 + 2)
except EnvironmentError, (errno, strerror):
try:
print >>sys.stderr, strerror
except StandardError:
pass
sys.exit(2)
if outcome:
sys.exit(0)
else:
sys.exit(1)
| gpl-2.0 |
jamespacileo/django-france | django/contrib/localflavor/pl/pl_administrativeunits.py | 433 | 13194 | # -*- coding: utf-8 -*-
"""
Polish administrative units as in http://pl.wikipedia.org/wiki/Podzia%C5%82_administracyjny_Polski
"""
ADMINISTRATIVE_UNIT_CHOICES = (
('wroclaw', u'Wrocław'),
('jeleniagora', u'Jelenia Góra'),
('legnica', u'Legnica'),
('boleslawiecki', u'bolesławiecki'),
('dzierzoniowski', u'dzierżoniowski'),
('glogowski', u'głogowski'),
('gorowski', u'górowski'),
('jaworski', u'jaworski'),
('jeleniogorski', u'jeleniogórski'),
('kamiennogorski', u'kamiennogórski'),
('klodzki', u'kłodzki'),
('legnicki', u'legnicki'),
('lubanski', u'lubański'),
('lubinski', u'lubiński'),
('lwowecki', u'lwówecki'),
('milicki', u'milicki'),
('olesnicki', u'oleśnicki'),
('olawski', u'oławski'),
('polkowicki', u'polkowicki'),
('strzelinski', u'strzeliński'),
('sredzki', u'średzki'),
('swidnicki', u'świdnicki'),
('trzebnicki', u'trzebnicki'),
('walbrzyski', u'wałbrzyski'),
('wolowski', u'wołowski'),
('wroclawski', u'wrocławski'),
('zabkowicki', u'ząbkowicki'),
('zgorzelecki', u'zgorzelecki'),
('zlotoryjski', u'złotoryjski'),
('bydgoszcz', u'Bydgoszcz'),
('torun', u'Toruń'),
('wloclawek', u'Włocławek'),
('grudziadz', u'Grudziądz'),
('aleksandrowski', u'aleksandrowski'),
('brodnicki', u'brodnicki'),
('bydgoski', u'bydgoski'),
('chelminski', u'chełmiński'),
('golubsko-dobrzynski', u'golubsko-dobrzyński'),
('grudziadzki', u'grudziądzki'),
('inowroclawski', u'inowrocławski'),
('lipnowski', u'lipnowski'),
('mogilenski', u'mogileński'),
('nakielski', u'nakielski'),
('radziejowski', u'radziejowski'),
('rypinski', u'rypiński'),
('sepolenski', u'sępoleński'),
('swiecki', u'świecki'),
('torunski', u'toruński'),
('tucholski', u'tucholski'),
('wabrzeski', u'wąbrzeski'),
('wloclawski', u'wrocławski'),
('zninski', u'źniński'),
('lublin', u'Lublin'),
('biala-podlaska', u'Biała Podlaska'),
('chelm', u'Chełm'),
('zamosc', u'Zamość'),
('bialski', u'bialski'),
('bilgorajski', u'biłgorajski'),
('chelmski', u'chełmski'),
('hrubieszowski', u'hrubieszowski'),
('janowski', u'janowski'),
('krasnostawski', u'krasnostawski'),
('krasnicki', u'kraśnicki'),
('lubartowski', u'lubartowski'),
('lubelski', u'lubelski'),
('leczynski', u'łęczyński'),
('lukowski', u'łukowski'),
('opolski', u'opolski'),
('parczewski', u'parczewski'),
('pulawski', u'puławski'),
('radzynski', u'radzyński'),
('rycki', u'rycki'),
('swidnicki', u'świdnicki'),
('tomaszowski', u'tomaszowski'),
('wlodawski', u'włodawski'),
('zamojski', u'zamojski'),
('gorzow-wielkopolski', u'Gorzów Wielkopolski'),
('zielona-gora', u'Zielona Góra'),
('gorzowski', u'gorzowski'),
('krosnienski', u'krośnieński'),
('miedzyrzecki', u'międzyrzecki'),
('nowosolski', u'nowosolski'),
('slubicki', u'słubicki'),
('strzelecko-drezdenecki', u'strzelecko-drezdenecki'),
('sulecinski', u'suleńciński'),
('swiebodzinski', u'świebodziński'),
('wschowski', u'wschowski'),
('zielonogorski', u'zielonogórski'),
('zaganski', u'żagański'),
('zarski', u'żarski'),
('lodz', u'Łódź'),
('piotrkow-trybunalski', u'Piotrków Trybunalski'),
('skierniewice', u'Skierniewice'),
('belchatowski', u'bełchatowski'),
('brzezinski', u'brzeziński'),
('kutnowski', u'kutnowski'),
('laski', u'łaski'),
('leczycki', u'łęczycki'),
('lowicki', u'łowicki'),
('lodzki wschodni', u'łódzki wschodni'),
('opoczynski', u'opoczyński'),
('pabianicki', u'pabianicki'),
('pajeczanski', u'pajęczański'),
('piotrkowski', u'piotrkowski'),
('poddebicki', u'poddębicki'),
('radomszczanski', u'radomszczański'),
('rawski', u'rawski'),
('sieradzki', u'sieradzki'),
('skierniewicki', u'skierniewicki'),
('tomaszowski', u'tomaszowski'),
('wielunski', u'wieluński'),
('wieruszowski', u'wieruszowski'),
('zdunskowolski', u'zduńskowolski'),
('zgierski', u'zgierski'),
('krakow', u'Kraków'),
('tarnow', u'Tarnów'),
('nowy-sacz', u'Nowy Sącz'),
('bochenski', u'bocheński'),
('brzeski', u'brzeski'),
('chrzanowski', u'chrzanowski'),
('dabrowski', u'dąbrowski'),
('gorlicki', u'gorlicki'),
('krakowski', u'krakowski'),
('limanowski', u'limanowski'),
('miechowski', u'miechowski'),
('myslenicki', u'myślenicki'),
('nowosadecki', u'nowosądecki'),
('nowotarski', u'nowotarski'),
('olkuski', u'olkuski'),
('oswiecimski', u'oświęcimski'),
('proszowicki', u'proszowicki'),
('suski', u'suski'),
('tarnowski', u'tarnowski'),
('tatrzanski', u'tatrzański'),
('wadowicki', u'wadowicki'),
('wielicki', u'wielicki'),
('warszawa', u'Warszawa'),
('ostroleka', u'Ostrołęka'),
('plock', u'Płock'),
('radom', u'Radom'),
('siedlce', u'Siedlce'),
('bialobrzeski', u'białobrzeski'),
('ciechanowski', u'ciechanowski'),
('garwolinski', u'garwoliński'),
('gostyninski', u'gostyniński'),
('grodziski', u'grodziski'),
('grojecki', u'grójecki'),
('kozienicki', u'kozenicki'),
('legionowski', u'legionowski'),
('lipski', u'lipski'),
('losicki', u'łosicki'),
('makowski', u'makowski'),
('minski', u'miński'),
('mlawski', u'mławski'),
('nowodworski', u'nowodworski'),
('ostrolecki', u'ostrołęcki'),
('ostrowski', u'ostrowski'),
('otwocki', u'otwocki'),
('piaseczynski', u'piaseczyński'),
('plocki', u'płocki'),
('plonski', u'płoński'),
('pruszkowski', u'pruszkowski'),
('przasnyski', u'przasnyski'),
('przysuski', u'przysuski'),
('pultuski', u'pułtuski'),
('radomski', u'radomski'),
('siedlecki', u'siedlecki'),
('sierpecki', u'sierpecki'),
('sochaczewski', u'sochaczewski'),
('sokolowski', u'sokołowski'),
('szydlowiecki', u'szydłowiecki'),
('warszawski-zachodni', u'warszawski zachodni'),
('wegrowski', u'węgrowski'),
('wolominski', u'wołomiński'),
('wyszkowski', u'wyszkowski'),
('zwolenski', u'zwoleński'),
('zurominski', u'żuromiński'),
('zyrardowski', u'żyrardowski'),
('opole', u'Opole'),
('brzeski', u'brzeski'),
('glubczycki', u'głubczyski'),
('kedzierzynsko-kozielski', u'kędzierzyński-kozielski'),
('kluczborski', u'kluczborski'),
('krapkowicki', u'krapkowicki'),
('namyslowski', u'namysłowski'),
('nyski', u'nyski'),
('oleski', u'oleski'),
('opolski', u'opolski'),
('prudnicki', u'prudnicki'),
('strzelecki', u'strzelecki'),
('rzeszow', u'Rzeszów'),
('krosno', u'Krosno'),
('przemysl', u'Przemyśl'),
('tarnobrzeg', u'Tarnobrzeg'),
('bieszczadzki', u'bieszczadzki'),
('brzozowski', u'brzozowski'),
('debicki', u'dębicki'),
('jaroslawski', u'jarosławski'),
('jasielski', u'jasielski'),
('kolbuszowski', u'kolbuszowski'),
('krosnienski', u'krośnieński'),
('leski', u'leski'),
('lezajski', u'leżajski'),
('lubaczowski', u'lubaczowski'),
('lancucki', u'łańcucki'),
('mielecki', u'mielecki'),
('nizanski', u'niżański'),
('przemyski', u'przemyski'),
('przeworski', u'przeworski'),
('ropczycko-sedziszowski', u'ropczycko-sędziszowski'),
('rzeszowski', u'rzeszowski'),
('sanocki', u'sanocki'),
('stalowowolski', u'stalowowolski'),
('strzyzowski', u'strzyżowski'),
('tarnobrzeski', u'tarnobrzeski'),
('bialystok', u'Białystok'),
('lomza', u'Łomża'),
('suwalki', u'Suwałki'),
('augustowski', u'augustowski'),
('bialostocki', u'białostocki'),
('bielski', u'bielski'),
('grajewski', u'grajewski'),
('hajnowski', u'hajnowski'),
('kolnenski', u'kolneński'),
('łomzynski', u'łomżyński'),
('moniecki', u'moniecki'),
('sejnenski', u'sejneński'),
('siemiatycki', u'siematycki'),
('sokolski', u'sokólski'),
('suwalski', u'suwalski'),
('wysokomazowiecki', u'wysokomazowiecki'),
('zambrowski', u'zambrowski'),
('gdansk', u'Gdańsk'),
('gdynia', u'Gdynia'),
('slupsk', u'Słupsk'),
('sopot', u'Sopot'),
('bytowski', u'bytowski'),
('chojnicki', u'chojnicki'),
('czluchowski', u'człuchowski'),
('kartuski', u'kartuski'),
('koscierski', u'kościerski'),
('kwidzynski', u'kwidzyński'),
('leborski', u'lęborski'),
('malborski', u'malborski'),
('nowodworski', u'nowodworski'),
('gdanski', u'gdański'),
('pucki', u'pucki'),
('slupski', u'słupski'),
('starogardzki', u'starogardzki'),
('sztumski', u'sztumski'),
('tczewski', u'tczewski'),
('wejherowski', u'wejcherowski'),
('katowice', u'Katowice'),
('bielsko-biala', u'Bielsko-Biała'),
('bytom', u'Bytom'),
('chorzow', u'Chorzów'),
('czestochowa', u'Częstochowa'),
('dabrowa-gornicza', u'Dąbrowa Górnicza'),
('gliwice', u'Gliwice'),
('jastrzebie-zdroj', u'Jastrzębie Zdrój'),
('jaworzno', u'Jaworzno'),
('myslowice', u'Mysłowice'),
('piekary-slaskie', u'Piekary Śląskie'),
('ruda-slaska', u'Ruda Śląska'),
('rybnik', u'Rybnik'),
('siemianowice-slaskie', u'Siemianowice Śląskie'),
('sosnowiec', u'Sosnowiec'),
('swietochlowice', u'Świętochłowice'),
('tychy', u'Tychy'),
('zabrze', u'Zabrze'),
('zory', u'Żory'),
('bedzinski', u'będziński'),
('bielski', u'bielski'),
('bierunsko-ledzinski', u'bieruńsko-lędziński'),
('cieszynski', u'cieszyński'),
('czestochowski', u'częstochowski'),
('gliwicki', u'gliwicki'),
('klobucki', u'kłobucki'),
('lubliniecki', u'lubliniecki'),
('mikolowski', u'mikołowski'),
('myszkowski', u'myszkowski'),
('pszczynski', u'pszczyński'),
('raciborski', u'raciborski'),
('rybnicki', u'rybnicki'),
('tarnogorski', u'tarnogórski'),
('wodzislawski', u'wodzisławski'),
('zawiercianski', u'zawierciański'),
('zywiecki', u'żywiecki'),
('kielce', u'Kielce'),
('buski', u'buski'),
('jedrzejowski', u'jędrzejowski'),
('kazimierski', u'kazimierski'),
('kielecki', u'kielecki'),
('konecki', u'konecki'),
('opatowski', u'opatowski'),
('ostrowiecki', u'ostrowiecki'),
('pinczowski', u'pińczowski'),
('sandomierski', u'sandomierski'),
('skarzyski', u'skarżyski'),
('starachowicki', u'starachowicki'),
('staszowski', u'staszowski'),
('wloszczowski', u'włoszczowski'),
('olsztyn', u'Olsztyn'),
('elblag', u'Elbląg'),
('bartoszycki', u'bartoszycki'),
('braniewski', u'braniewski'),
('dzialdowski', u'działdowski'),
('elblaski', u'elbląski'),
('elcki', u'ełcki'),
('gizycki', u'giżycki'),
('goldapski', u'gołdapski'),
('ilawski', u'iławski'),
('ketrzynski', u'kętrzyński'),
('lidzbarski', u'lidzbarski'),
('mragowski', u'mrągowski'),
('nidzicki', u'nidzicki'),
('nowomiejski', u'nowomiejski'),
('olecki', u'olecki'),
('olsztynski', u'olsztyński'),
('ostrodzki', u'ostródzki'),
('piski', u'piski'),
('szczycienski', u'szczycieński'),
('wegorzewski', u'węgorzewski'),
('poznan', u'Poznań'),
('kalisz', u'Kalisz'),
('konin', u'Konin'),
('leszno', u'Leszno'),
('chodzieski', u'chodziejski'),
('czarnkowsko-trzcianecki', u'czarnkowsko-trzcianecki'),
('gnieznienski', u'gnieźnieński'),
('gostynski', u'gostyński'),
('grodziski', u'grodziski'),
('jarocinski', u'jarociński'),
('kaliski', u'kaliski'),
('kepinski', u'kępiński'),
('kolski', u'kolski'),
('koninski', u'koniński'),
('koscianski', u'kościański'),
('krotoszynski', u'krotoszyński'),
('leszczynski', u'leszczyński'),
('miedzychodzki', u'międzychodzki'),
('nowotomyski', u'nowotomyski'),
('obornicki', u'obornicki'),
('ostrowski', u'ostrowski'),
('ostrzeszowski', u'ostrzeszowski'),
('pilski', u'pilski'),
('pleszewski', u'pleszewski'),
('poznanski', u'poznański'),
('rawicki', u'rawicki'),
('slupecki', u'słupecki'),
('szamotulski', u'szamotulski'),
('sredzki', u'średzki'),
('sremski', u'śremski'),
('turecki', u'turecki'),
('wagrowiecki', u'wągrowiecki'),
('wolsztynski', u'wolsztyński'),
('wrzesinski', u'wrzesiński'),
('zlotowski', u'złotowski'),
('bialogardzki', u'białogardzki'),
('choszczenski', u'choszczeński'),
('drawski', u'drawski'),
('goleniowski', u'goleniowski'),
('gryficki', u'gryficki'),
('gryfinski', u'gryfiński'),
('kamienski', u'kamieński'),
('kolobrzeski', u'kołobrzeski'),
('koszalinski', u'koszaliński'),
('lobeski', u'łobeski'),
('mysliborski', u'myśliborski'),
('policki', u'policki'),
('pyrzycki', u'pyrzycki'),
('slawienski', u'sławieński'),
('stargardzki', u'stargardzki'),
('szczecinecki', u'szczecinecki'),
('swidwinski', u'świdwiński'),
('walecki', u'wałecki'),
)
| bsd-3-clause |
vulcansteel/autorest | AutoRest/Generators/Python/Azure.Python.Tests/Expected/AcceptanceTests/StorageManagementClient/storage_management_client/models/usage_name.py | 1 | 1109 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class UsageName(Model):
"""
The Usage Names.
"""
_required = []
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(self, *args, **kwargs):
"""UsageName
:param str value: Gets a string describing the resource name.
:param str localized_value: Gets a localized string describing the
resource name.
"""
self.value = None
self.localized_value = None
super(UsageName, self).__init__(*args, **kwargs)
| mit |
hale36/SRTV | lib/sqlalchemy/testing/assertions.py | 75 | 15418 | # testing/assertions.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import absolute_import
from . import util as testutil
from sqlalchemy import pool, orm, util
from sqlalchemy.engine import default, create_engine, url
from sqlalchemy import exc as sa_exc
from sqlalchemy.util import decorator
from sqlalchemy import types as sqltypes, schema
import warnings
import re
from .warnings import resetwarnings
from .exclusions import db_spec, _is_excluded
from . import assertsql
from . import config
import itertools
from .util import fail
import contextlib
def emits_warning(*messages):
"""Mark a test as emitting a warning.
With no arguments, squelches all SAWarning failures. Or pass one or more
strings; these will be matched to the root of the warning description by
warnings.filterwarnings().
"""
# TODO: it would be nice to assert that a named warning was
# emitted. should work with some monkeypatching of warnings,
# and may work on non-CPython if they keep to the spirit of
# warnings.showwarning's docstring.
# - update: jython looks ok, it uses cpython's module
@decorator
def decorate(fn, *args, **kw):
# todo: should probably be strict about this, too
filters = [dict(action='ignore',
category=sa_exc.SAPendingDeprecationWarning)]
if not messages:
filters.append(dict(action='ignore',
category=sa_exc.SAWarning))
else:
filters.extend(dict(action='ignore',
message=message,
category=sa_exc.SAWarning)
for message in messages)
for f in filters:
warnings.filterwarnings(**f)
try:
return fn(*args, **kw)
finally:
resetwarnings()
return decorate
def emits_warning_on(db, *warnings):
"""Mark a test as emitting a warning on a specific dialect.
With no arguments, squelches all SAWarning failures. Or pass one or more
strings; these will be matched to the root of the warning description by
warnings.filterwarnings().
"""
spec = db_spec(db)
@decorator
def decorate(fn, *args, **kw):
if isinstance(db, util.string_types):
if not spec(config._current):
return fn(*args, **kw)
else:
wrapped = emits_warning(*warnings)(fn)
return wrapped(*args, **kw)
else:
if not _is_excluded(*db):
return fn(*args, **kw)
else:
wrapped = emits_warning(*warnings)(fn)
return wrapped(*args, **kw)
return decorate
def uses_deprecated(*messages):
"""Mark a test as immune from fatal deprecation warnings.
With no arguments, squelches all SADeprecationWarning failures.
Or pass one or more strings; these will be matched to the root
of the warning description by warnings.filterwarnings().
As a special case, you may pass a function name prefixed with //
and it will be re-written as needed to match the standard warning
verbiage emitted by the sqlalchemy.util.deprecated decorator.
"""
@decorator
def decorate(fn, *args, **kw):
with expect_deprecated(*messages):
return fn(*args, **kw)
return decorate
@contextlib.contextmanager
def expect_deprecated(*messages):
# todo: should probably be strict about this, too
filters = [dict(action='ignore',
category=sa_exc.SAPendingDeprecationWarning)]
if not messages:
filters.append(dict(action='ignore',
category=sa_exc.SADeprecationWarning))
else:
filters.extend(
[dict(action='ignore',
message=message,
category=sa_exc.SADeprecationWarning)
for message in
[(m.startswith('//') and
('Call to deprecated function ' + m[2:]) or m)
for m in messages]])
for f in filters:
warnings.filterwarnings(**f)
try:
yield
finally:
resetwarnings()
def global_cleanup_assertions():
"""Check things that have to be finalized at the end of a test suite.
Hardcoded at the moment, a modular system can be built here
to support things like PG prepared transactions, tables all
dropped, etc.
"""
_assert_no_stray_pool_connections()
_STRAY_CONNECTION_FAILURES = 0
def _assert_no_stray_pool_connections():
global _STRAY_CONNECTION_FAILURES
# lazy gc on cPython means "do nothing." pool connections
# shouldn't be in cycles, should go away.
testutil.lazy_gc()
# however, once in awhile, on an EC2 machine usually,
# there's a ref in there. usually just one.
if pool._refs:
# OK, let's be somewhat forgiving. Increment a counter,
# we'll allow a couple of these at most.
_STRAY_CONNECTION_FAILURES += 1
print("Encountered a stray connection in test cleanup: %s"
% str(pool._refs))
# then do a real GC sweep. We shouldn't even be here
# so a single sweep should really be doing it, otherwise
# there's probably a real unreachable cycle somewhere.
testutil.gc_collect()
# if we've already had two of these occurrences, or
# after a hard gc sweep we still have pool._refs?!
# now we have to raise.
if _STRAY_CONNECTION_FAILURES >= 2 or pool._refs:
err = str(pool._refs)
# but clean out the pool refs collection directly,
# reset the counter,
# so the error doesn't at least keep happening.
pool._refs.clear()
_STRAY_CONNECTION_FAILURES = 0
assert False, "Stray conections in cleanup: %s" % err
def eq_(a, b, msg=None):
"""Assert a == b, with repr messaging on failure."""
assert a == b, msg or "%r != %r" % (a, b)
def ne_(a, b, msg=None):
"""Assert a != b, with repr messaging on failure."""
assert a != b, msg or "%r == %r" % (a, b)
def is_(a, b, msg=None):
"""Assert a is b, with repr messaging on failure."""
assert a is b, msg or "%r is not %r" % (a, b)
def is_not_(a, b, msg=None):
"""Assert a is not b, with repr messaging on failure."""
assert a is not b, msg or "%r is %r" % (a, b)
def startswith_(a, fragment, msg=None):
"""Assert a.startswith(fragment), with repr messaging on failure."""
assert a.startswith(fragment), msg or "%r does not start with %r" % (
a, fragment)
def assert_raises(except_cls, callable_, *args, **kw):
try:
callable_(*args, **kw)
success = False
except except_cls:
success = True
# assert outside the block so it works for AssertionError too !
assert success, "Callable did not raise an exception"
def assert_raises_message(except_cls, msg, callable_, *args, **kwargs):
try:
callable_(*args, **kwargs)
assert False, "Callable did not raise an exception"
except except_cls as e:
assert re.search(msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e)
print(util.text_type(e).encode('utf-8'))
class AssertsCompiledSQL(object):
def assert_compile(self, clause, result, params=None,
checkparams=None, dialect=None,
checkpositional=None,
use_default_dialect=False,
allow_dialect_select=False,
literal_binds=False):
if use_default_dialect:
dialect = default.DefaultDialect()
elif allow_dialect_select:
dialect = None
else:
if dialect is None:
dialect = getattr(self, '__dialect__', None)
if dialect is None:
dialect = config.db.dialect
elif dialect == 'default':
dialect = default.DefaultDialect()
elif isinstance(dialect, util.string_types):
dialect = url.URL(dialect).get_dialect()()
kw = {}
compile_kwargs = {}
if params is not None:
kw['column_keys'] = list(params)
if literal_binds:
compile_kwargs['literal_binds'] = True
if isinstance(clause, orm.Query):
context = clause._compile_context()
context.statement.use_labels = True
clause = context.statement
if compile_kwargs:
kw['compile_kwargs'] = compile_kwargs
c = clause.compile(dialect=dialect, **kw)
param_str = repr(getattr(c, 'params', {}))
if util.py3k:
param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
print(("\nSQL String:\n" + util.text_type(c) + param_str).encode('utf-8'))
else:
print("\nSQL String:\n" + util.text_type(c).encode('utf-8') + param_str)
cc = re.sub(r'[\n\t]', '', util.text_type(c))
eq_(cc, result, "%r != %r on dialect %r" % (cc, result, dialect))
if checkparams is not None:
eq_(c.construct_params(params), checkparams)
if checkpositional is not None:
p = c.construct_params(params)
eq_(tuple([p[x] for x in c.positiontup]), checkpositional)
class ComparesTables(object):
def assert_tables_equal(self, table, reflected_table, strict_types=False):
assert len(table.c) == len(reflected_table.c)
for c, reflected_c in zip(table.c, reflected_table.c):
eq_(c.name, reflected_c.name)
assert reflected_c is reflected_table.c[c.name]
eq_(c.primary_key, reflected_c.primary_key)
eq_(c.nullable, reflected_c.nullable)
if strict_types:
msg = "Type '%s' doesn't correspond to type '%s'"
assert type(reflected_c.type) is type(c.type), \
msg % (reflected_c.type, c.type)
else:
self.assert_types_base(reflected_c, c)
if isinstance(c.type, sqltypes.String):
eq_(c.type.length, reflected_c.type.length)
eq_(
set([f.column.name for f in c.foreign_keys]),
set([f.column.name for f in reflected_c.foreign_keys])
)
if c.server_default:
assert isinstance(reflected_c.server_default,
schema.FetchedValue)
assert len(table.primary_key) == len(reflected_table.primary_key)
for c in table.primary_key:
assert reflected_table.primary_key.columns[c.name] is not None
def assert_types_base(self, c1, c2):
assert c1.type._compare_type_affinity(c2.type),\
"On column %r, type '%s' doesn't correspond to type '%s'" % \
(c1.name, c1.type, c2.type)
class AssertsExecutionResults(object):
def assert_result(self, result, class_, *objects):
result = list(result)
print(repr(result))
self.assert_list(result, class_, objects)
def assert_list(self, result, class_, list):
self.assert_(len(result) == len(list),
"result list is not the same size as test list, " +
"for class " + class_.__name__)
for i in range(0, len(list)):
self.assert_row(class_, result[i], list[i])
def assert_row(self, class_, rowobj, desc):
self.assert_(rowobj.__class__ is class_,
"item class is not " + repr(class_))
for key, value in desc.items():
if isinstance(value, tuple):
if isinstance(value[1], list):
self.assert_list(getattr(rowobj, key), value[0], value[1])
else:
self.assert_row(value[0], getattr(rowobj, key), value[1])
else:
self.assert_(getattr(rowobj, key) == value,
"attribute %s value %s does not match %s" % (
key, getattr(rowobj, key), value))
def assert_unordered_result(self, result, cls, *expected):
"""As assert_result, but the order of objects is not considered.
The algorithm is very expensive but not a big deal for the small
numbers of rows that the test suite manipulates.
"""
class immutabledict(dict):
def __hash__(self):
return id(self)
found = util.IdentitySet(result)
expected = set([immutabledict(e) for e in expected])
for wrong in util.itertools_filterfalse(lambda o: type(o) == cls, found):
fail('Unexpected type "%s", expected "%s"' % (
type(wrong).__name__, cls.__name__))
if len(found) != len(expected):
fail('Unexpected object count "%s", expected "%s"' % (
len(found), len(expected)))
NOVALUE = object()
def _compare_item(obj, spec):
for key, value in spec.items():
if isinstance(value, tuple):
try:
self.assert_unordered_result(
getattr(obj, key), value[0], *value[1])
except AssertionError:
return False
else:
if getattr(obj, key, NOVALUE) != value:
return False
return True
for expected_item in expected:
for found_item in found:
if _compare_item(found_item, expected_item):
found.remove(found_item)
break
else:
fail(
"Expected %s instance with attributes %s not found." % (
cls.__name__, repr(expected_item)))
return True
def assert_sql_execution(self, db, callable_, *rules):
assertsql.asserter.add_rules(rules)
try:
callable_()
assertsql.asserter.statement_complete()
finally:
assertsql.asserter.clear_rules()
def assert_sql(self, db, callable_, list_, with_sequences=None):
if with_sequences is not None and config.db.dialect.supports_sequences:
rules = with_sequences
else:
rules = list_
newrules = []
for rule in rules:
if isinstance(rule, dict):
newrule = assertsql.AllOf(*[
assertsql.ExactSQL(k, v) for k, v in rule.items()
])
else:
newrule = assertsql.ExactSQL(*rule)
newrules.append(newrule)
self.assert_sql_execution(db, callable_, *newrules)
def assert_sql_count(self, db, callable_, count):
self.assert_sql_execution(
db, callable_, assertsql.CountStatements(count))
@contextlib.contextmanager
def assert_execution(self, *rules):
assertsql.asserter.add_rules(rules)
try:
yield
assertsql.asserter.statement_complete()
finally:
assertsql.asserter.clear_rules()
def assert_statement_count(self, count):
return self.assert_execution(assertsql.CountStatements(count))
| gpl-3.0 |
keras-team/keras-io | examples/vision/xray_classification_with_tpus.py | 1 | 12745 | """
Title: Pneumonia Classification on TPU
Author: Amy MiHyun Jang
Date created: 2020/07/28
Last modified: 2020/08/24
Description: Medical image classification on TPU.
"""
"""
## Introduction + Set-up
This tutorial will explain how to build an X-ray image classification model
to predict whether an X-ray scan shows presence of pneumonia.
"""
import re
import os
import random
import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
try:
tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
print("Device:", tpu.master())
tf.config.experimental_connect_to_cluster(tpu)
tf.tpu.experimental.initialize_tpu_system(tpu)
strategy = tf.distribute.experimental.TPUStrategy(tpu)
except:
strategy = tf.distribute.get_strategy()
print("Number of replicas:", strategy.num_replicas_in_sync)
"""
We need a Google Cloud link to our data to load the data using a TPU.
Below, we define key configuration parameters we'll use in this example.
To run on TPU, this example must be on Colab with the TPU runtime selected.
"""
AUTOTUNE = tf.data.experimental.AUTOTUNE
BATCH_SIZE = 25 * strategy.num_replicas_in_sync
IMAGE_SIZE = [180, 180]
CLASS_NAMES = ["NORMAL", "PNEUMONIA"]
"""
## Load the data
The Chest X-ray data we are using from
[*Cell*](https://www.cell.com/cell/fulltext/S0092-8674(18)30154-5) divides the data into
training and test files. Let's first load in the training TFRecords.
"""
train_images = tf.data.TFRecordDataset(
"gs://download.tensorflow.org/data/ChestXRay2017/train/images.tfrec"
)
train_paths = tf.data.TFRecordDataset(
"gs://download.tensorflow.org/data/ChestXRay2017/train/paths.tfrec"
)
ds = tf.data.Dataset.zip((train_images, train_paths))
"""
Let's count how many healthy/normal chest X-rays we have and how many
pneumonia chest X-rays we have:
"""
COUNT_NORMAL = len(
[
filename
for filename in train_paths
if "NORMAL" in filename.numpy().decode("utf-8")
]
)
print("Normal images count in training set: " + str(COUNT_NORMAL))
COUNT_PNEUMONIA = len(
[
filename
for filename in train_paths
if "PNEUMONIA" in filename.numpy().decode("utf-8")
]
)
print("Pneumonia images count in training set: " + str(COUNT_PNEUMONIA))
"""
Notice that there are way more images that are classified as pneumonia than normal. This
shows that we have an imbalance in our data. We will correct for this imbalance later on
in our notebook.
"""
"""
We want to map each filename to the corresponding (image, label) pair. The following
methods will help us do that.
As we only have two labels, we will encode the label so that `1` or `True` indicates
pneumonia and `0` or `False` indicates normal.
"""
def get_label(file_path):
# convert the path to a list of path components
parts = tf.strings.split(file_path, "/")
# The second to last is the class-directory
return parts[-2] == "PNEUMONIA"
def decode_img(img):
# convert the compressed string to a 3D uint8 tensor
img = tf.image.decode_jpeg(img, channels=3)
# resize the image to the desired size.
return tf.image.resize(img, IMAGE_SIZE)
def process_path(image, path):
label = get_label(path)
# load the raw data from the file as a string
img = decode_img(image)
return img, label
ds = ds.map(process_path, num_parallel_calls=AUTOTUNE)
"""
Let's split the data into a training and validation datasets.
"""
ds = ds.shuffle(10000)
train_ds = ds.take(4200)
val_ds = ds.skip(4200)
"""
Let's visualize the shape of an (image, label) pair.
"""
for image, label in train_ds.take(1):
print("Image shape: ", image.numpy().shape)
print("Label: ", label.numpy())
"""
Load and format the test data as well.
"""
test_images = tf.data.TFRecordDataset(
"gs://download.tensorflow.org/data/ChestXRay2017/test/images.tfrec"
)
test_paths = tf.data.TFRecordDataset(
"gs://download.tensorflow.org/data/ChestXRay2017/test/paths.tfrec"
)
test_ds = tf.data.Dataset.zip((test_images, test_paths))
test_ds = test_ds.map(process_path, num_parallel_calls=AUTOTUNE)
test_ds = test_ds.batch(BATCH_SIZE)
"""
## Visualize the dataset
First, let's use buffered prefetching so we can yield data from disk without having I/O
become blocking.
Please note that large image datasets should not be cached in memory. We do it here
because the dataset is not very large and we want to train on TPU.
"""
def prepare_for_training(ds, cache=True):
# This is a small dataset, only load it once, and keep it in memory.
# use `.cache(filename)` to cache preprocessing work for datasets that don't
# fit in memory.
if cache:
if isinstance(cache, str):
ds = ds.cache(cache)
else:
ds = ds.cache()
ds = ds.batch(BATCH_SIZE)
# `prefetch` lets the dataset fetch batches in the background while the model
# is training.
ds = ds.prefetch(buffer_size=AUTOTUNE)
return ds
"""
Call the next batch iteration of the training data.
"""
train_ds = prepare_for_training(train_ds)
val_ds = prepare_for_training(val_ds)
image_batch, label_batch = next(iter(train_ds))
"""
Define the method to show the images in the batch.
"""
def show_batch(image_batch, label_batch):
plt.figure(figsize=(10, 10))
for n in range(25):
ax = plt.subplot(5, 5, n + 1)
plt.imshow(image_batch[n] / 255)
if label_batch[n]:
plt.title("PNEUMONIA")
else:
plt.title("NORMAL")
plt.axis("off")
"""
As the method takes in NumPy arrays as its parameters, call the numpy function on the
batches to return the tensor in NumPy array form.
"""
show_batch(image_batch.numpy(), label_batch.numpy())
"""
## Build the CNN
To make our model more modular and easier to understand, let's define some blocks. As
we're building a convolution neural network, we'll create a convolution block and a dense
layer block.
The architecture for this CNN has been inspired by this
[article](https://towardsdatascience.com/deep-learning-for-detecting-pneumonia-from-x-ray-images-fc9a3d9fdba8).
"""
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.layers.experimental import preprocessing
def conv_block(filters, inputs):
x = layers.SeparableConv2D(filters, 3, activation="relu", padding="same")(inputs)
x = layers.SeparableConv2D(filters, 3, activation="relu", padding="same")(x)
x = layers.BatchNormalization()(x)
outputs = layers.MaxPool2D()(x)
return outputs
def dense_block(units, dropout_rate, inputs):
x = layers.Dense(units, activation="relu")(inputs)
x = layers.BatchNormalization()(x)
outputs = layers.Dropout(dropout_rate)(x)
return outputs
"""
The following method will define the function to build our model for us.
The images originally have values that range from [0, 255]. CNNs work better with smaller
numbers so we will scale this down for our input.
The Dropout layers are important, as they
reduce the likelikhood of the model overfitting. We want to end the model with a `Dense`
layer with one node, as this will be the binary output that determines if an X-ray shows
presence of pneumonia.
"""
def build_model():
inputs = keras.Input(shape=(IMAGE_SIZE[0], IMAGE_SIZE[1], 3))
x = preprocessing.Rescaling(1.0 / 255)(inputs)
x = layers.Conv2D(16, 3, activation="relu", padding="same")(x)
x = layers.Conv2D(16, 3, activation="relu", padding="same")(x)
x = layers.MaxPool2D()(x)
x = conv_block(32, x)
x = conv_block(64, x)
x = conv_block(128, x)
x = layers.Dropout(0.2)(x)
x = conv_block(256, x)
x = layers.Dropout(0.2)(x)
x = layers.Flatten()(x)
x = dense_block(512, 0.7, x)
x = dense_block(128, 0.5, x)
x = dense_block(64, 0.3, x)
outputs = layers.Dense(1, activation="sigmoid")(x)
model = keras.Model(inputs=inputs, outputs=outputs)
return model
"""
## Correct for data imbalance
We saw earlier in this example that the data was imbalanced, with more images classified
as pneumonia than normal. We will correct for that by using class weighting:
"""
initial_bias = np.log([COUNT_PNEUMONIA / COUNT_NORMAL])
print("Initial bias: {:.5f}".format(initial_bias[0]))
TRAIN_IMG_COUNT = COUNT_NORMAL + COUNT_PNEUMONIA
weight_for_0 = (1 / COUNT_NORMAL) * (TRAIN_IMG_COUNT) / 2.0
weight_for_1 = (1 / COUNT_PNEUMONIA) * (TRAIN_IMG_COUNT) / 2.0
class_weight = {0: weight_for_0, 1: weight_for_1}
print("Weight for class 0: {:.2f}".format(weight_for_0))
print("Weight for class 1: {:.2f}".format(weight_for_1))
"""
The weight for class `0` (Normal) is a lot higher than the weight for class `1`
(Pneumonia). Because there are less normal images, each normal image will be weighted
more to balance the data as the CNN works best when the training data is balanced.
"""
"""
## Train the model
"""
"""
### Defining callbacks
The checkpoint callback saves the best weights of the model, so next time we want to use
the model, we do not have to spend time training it. The early stopping callback stops
the training process when the model starts becoming stagnant, or even worse, when the
model starts overfitting.
"""
checkpoint_cb = tf.keras.callbacks.ModelCheckpoint("xray_model.h5", save_best_only=True)
early_stopping_cb = tf.keras.callbacks.EarlyStopping(
patience=10, restore_best_weights=True
)
"""
We also want to tune our learning rate. Too high of a learning rate will cause the model
to diverge. Too small of a learning rate will cause the model to be too slow. We
implement the exponential learning rate scheduling method below.
"""
initial_learning_rate = 0.015
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate, decay_steps=100000, decay_rate=0.96, staircase=True
)
"""
### Fit the model
For our metrics, we want to include precision and recall as they will provide use with a
more informed picture of how good our model is. Accuracy tells us what fraction of the
labels is correct. Since our data is not balanced, accuracy might give a skewed sense of
a good model (i.e. a model that always predicts PNEUMONIA will be 74% accurate but is not
a good model).
Precision is the number of true positives (TP) over the sum of TP and false positives
(FP). It shows what fraction of labeled positives are actually correct.
Recall is the number of TP over the sum of TP and false negatves (FN). It shows what
fraction of actual positives are correct.
Since there are only two possible labels for the image, we will be using the
binary crossentropy loss. When we fit the model, remember to specify the class weights,
which we defined earlier. Because we are using a TPU, training will be quick - less than
2 minutes.
"""
with strategy.scope():
model = build_model()
METRICS = [
tf.keras.metrics.BinaryAccuracy(),
tf.keras.metrics.Precision(name="precision"),
tf.keras.metrics.Recall(name="recall"),
]
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=lr_schedule),
loss="binary_crossentropy",
metrics=METRICS,
)
history = model.fit(
train_ds,
epochs=100,
validation_data=val_ds,
class_weight=class_weight,
callbacks=[checkpoint_cb, early_stopping_cb],
)
"""
## Visualizing model performance
Let's plot the model accuracy and loss for the training and the validating set. Note that
no random seed is specified for this notebook. For your notebook, there might be slight
variance.
"""
fig, ax = plt.subplots(1, 4, figsize=(20, 3))
ax = ax.ravel()
for i, met in enumerate(["precision", "recall", "binary_accuracy", "loss"]):
ax[i].plot(history.history[met])
ax[i].plot(history.history["val_" + met])
ax[i].set_title("Model {}".format(met))
ax[i].set_xlabel("epochs")
ax[i].set_ylabel(met)
ax[i].legend(["train", "val"])
"""
We see that the accuracy for our model is around 95%.
"""
"""
## Predict and evaluate results
Let's evaluate the model on our test data!
"""
model.evaluate(test_ds, return_dict=True)
"""
We see that our accuracy on our test data is lower than the accuracy for our validating
set. This may indicate overfitting.
Our recall is greater than our precision, indicating that almost all pneumonia images are
correctly identified but some normal images are falsely identified. We should aim to
increase our precision.
"""
for image, label in test_ds.take(1):
plt.imshow(image[0] / 255.0)
plt.title(CLASS_NAMES[label[0].numpy()])
prediction = model.predict(test_ds.take(1))[0]
scores = [1 - prediction, prediction]
for score, name in zip(scores, CLASS_NAMES):
print("This image is %.2f percent %s" % ((100 * score), name))
| apache-2.0 |
Logan1x/Python-Scripts | bin/findlargefiles.py | 5 | 2553 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# findlargefiles.py Searches a file location and subdirectories for
# files larger than a given size.
"""
findlargefiles.py Searches a file location and subdirectories for
files larger than a given size.
Useful for phones which might hide files in FileExplorer.
Directly prints results if run directly.
May also be imported, yielding results one by one.
Created on Sun Sep 3 20:35:12 2017
@author: david.antonini // toonarmycaptain
"""
import os
def search_folder(location, min_file_size):
file_not_found_errors_count = 0
files_found_count = 0
total_size = 0
print(f'Files larger than {min_file_size:.2f} MB in location: {location}')
for foldername, subfolders, filenames in os.walk(location):
for filename in filenames:
try:
actual_size = os.path.getsize(os.path.join(foldername,
filename))
if min_file_size*1024**2 <= actual_size:
print(f'{foldername}\\{filename} - '
f'{(actual_size/1024**2):.2f} MB')
yield foldername, filename, actual_size
files_found_count += 1
total_size += actual_size
except FileNotFoundError:
file_not_found_errors_count += 1
print(f'FileNotFoundError: {filename}')
print(f'Files found: {files_found_count}')
print(f'Total size: {(total_size/1024**2):.2f} MB')
if file_not_found_errors_count > 0:
print(f'FileNotFoundErrors: {file_not_found_errors_count}')
if __name__ == '__main__':
print('This program searches for files larger than a given size '
'in a given location.')
while True:
location = input("Where would you like to search? ")
if os.path.exists(location):
break
else:
print('Please enter a valid path.')
while True:
try:
min_file_size = float(input('Please enter file size in MB: '))
break
except ValueError:
print('Please enter numeric input only.')
search_folder(location, min_file_size)
for foldername, filename, actual_size in search_folder(location,
min_file_size):
print(f'{foldername}\\{filename} - {(actual_size/1024**2):.2f} MB')
| mit |
Manuel1510/Manuelbot | requests/packages/chardet/langhungarianmodel.py | 2763 | 12536 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = {
'charToOrderMap': Latin2_HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "ISO-8859-2"
}
Win1250HungarianModel = {
'charToOrderMap': win1250HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "windows-1250"
}
# flake8: noqa
| unlicense |
DirkHoffmann/indico | indico/modules/events/registration/fields/base.py | 1 | 6511 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from copy import deepcopy
from wtforms.validators import DataRequired, Optional
from indico.modules.events.registration.models.registrations import RegistrationData
class RegistrationFormFieldBase:
"""Base class for a registration form field definition."""
#: unique name of the field type
name = None
#: wtform field class
wtf_field_class = None
#: additional options for the WTForm class
wtf_field_kwargs = {}
#: the validator to use when the field is required
required_validator = DataRequired
#: the validator to use when the field is not required
not_required_validator = Optional
#: the data fields that need to be versioned
versioned_data_fields = frozenset({'is_billable', 'price'})
def __init__(self, form_item):
self.form_item = form_item
@property
def default_value(self):
return ''
@property
def validators(self):
"""Return a list of validators for this field."""
return None
@property
def filter_choices(self):
return None
def calculate_price(self, reg_data, versioned_data):
"""Calculate the price of the field.
:param reg_data: The user data for the field
:param versioned_data: The versioned field data to use
"""
return 0
def create_sql_filter(self, data_list):
"""
Create a SQL criterion to check whether the field's value is
in `data_list`. The function is expected to return an
operation on ``Registrationdata.data``.
"""
return RegistrationData.data.op('#>>')('{}').in_(data_list)
def create_wtf_field(self):
validators = list(self.validators) if self.validators is not None else []
if self.form_item.is_required:
validators.append(self.required_validator())
elif self.not_required_validator:
validators.append(self.not_required_validator())
return self.wtf_field_class(self.form_item.title, validators, **self.wtf_field_kwargs)
def has_data_changed(self, value, old_data):
return value != old_data.data
def process_form_data(self, registration, value, old_data=None, billable_items_locked=False):
"""Convert form data into database-usable dictionary.
:param registration: The registration the data is used for
:param value: The value from the WTForm
:param old_data: The existing `RegistrationData` in case a
registration is being modified.
:param billable_items_locked: Whether modifications to any
billable item should be ignored.
"""
if old_data is not None and not self.has_data_changed(value, old_data):
return {}
else:
return {
'field_data': self.form_item.current_data,
'data': value
}
@classmethod
def process_field_data(cls, data, old_data=None, old_versioned_data=None):
"""Process the settings of the field.
:param data: The field data from the client
:param old_data: The old unversioned field data (if available)
:param old_versioned_data: The old versioned field data (if
available)
:return: A ``(unversioned_data, versioned_data)`` tuple
"""
data = dict(data)
if 'places_limit' in data:
data['places_limit'] = int(data['places_limit']) if data['places_limit'] else 0
versioned_data = {k: v for k, v in data.items() if k in cls.versioned_data_fields}
unversioned_data = {k: v for k, v in data.items() if k not in cls.versioned_data_fields}
return unversioned_data, versioned_data
@classmethod
def unprocess_field_data(cls, versioned_data, unversioned_data):
return dict(versioned_data, **unversioned_data)
@property
def view_data(self):
return self.unprocess_field_data(self.form_item.versioned_data, self.form_item.data)
def get_friendly_data(self, registration_data, for_humans=False, for_search=False):
"""Return the data contained in the field.
If for_humans is True, return a human-readable string representation.
If for_search is True, return a string suitable for comparison in search.
"""
return registration_data.data
def iter_placeholder_info(self):
yield None, f'Value of "{self.form_item.title}" ({self.form_item.parent.title})'
def render_placeholder(self, data, key=None):
return self.get_friendly_data(data)
def get_places_used(self):
"""Return the number of used places for the field."""
return 0
class RegistrationFormBillableField(RegistrationFormFieldBase):
@classmethod
def process_field_data(cls, data, old_data=None, old_versioned_data=None):
data = deepcopy(data)
data.setdefault('is_billable', False)
data['price'] = float(data['price']) if data.get('price') else 0
return super().process_field_data(data, old_data, old_versioned_data)
def calculate_price(self, reg_data, versioned_data):
return versioned_data.get('price', 0) if versioned_data.get('is_billable') else 0
def process_form_data(self, registration, value, old_data=None, billable_items_locked=False, new_data_version=None):
if new_data_version is None:
new_data_version = self.form_item.current_data
if billable_items_locked and old_data.price != self.calculate_price(value, new_data_version.versioned_data):
return {}
return super().process_form_data(registration, value, old_data)
class RegistrationFormBillableItemsField(RegistrationFormBillableField):
@classmethod
def process_field_data(cls, data, old_data=None, old_versioned_data=None):
unversioned_data, versioned_data = super().process_field_data(
data, old_data, old_versioned_data)
# we don't have field-level billing data here
del versioned_data['is_billable']
del versioned_data['price']
return unversioned_data, versioned_data
def calculate_price(self, reg_data, versioned_data):
# billable items need custom logic
raise NotImplementedError
| gpl-3.0 |
PLOS/rhino | test/api/RequestObject/articlecc.py | 1 | 3666 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Public Library of Science
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
"""
Base class for Article crud controller JSON related services
"""
from test.Base.MySQL import MySQL
from test.Base.api import needs
from .zip_ingestion import ZIPIngestionJson
from ..resources import *
from ..resources import NOT_SCAPE_RELATED_ARTICLE_DOI
ARTICLE_API = API_BASE_URL + '/articles/'
ARTICLE_RELATED_ARTICLE_API = ARTICLE_API + RELATED_ARTICLE_DOI
ARTICLE_REVISION_API = ARTICLE_API + RELATED_ARTICLE_DOI + '/revisions'
HEADER = '-H'
__author__ = 'fcabrales@plos.org'
class ArticlesJSON(ZIPIngestionJson):
def verify_article_revision(self):
"""
Validate setting article revision using articleRevision table
"""
db_article_revision = self.get_article_sql_revision(NOT_SCAPE_RELATED_ARTICLE_DOI)
self.get_article_revisions()
self.verify_article_revision_db_expected(db_article_revision[0], 'revisionNumber')
def add_article_revision(self, expected_response_code):
"""
Calls article API to write revision for an article
POST /articles/{doi}/revisions
:param expected_response_code
"""
response = self.doPost('%s?revision=%s&ingestion=%s' % (ARTICLE_REVISION_API, REVISION,
INGESTION_NUMBER))
self.verify_http_code_is(response, expected_response_code)
"""
Below SQL statements will query ambra articleRevision table for revision number by articleDoi
"""
def get_article_sql_revision(self, article_doi):
article_revision = MySQL().query(
'SELECT ar.revisionNumber FROM articleRevision as ar JOIN articleIngestion ai ON '
'ar.ingestionId = ai.ingestionId JOIN article a ON ai.articleId = a.articleId '
'where a.doi = %s', [article_doi])
return article_revision[0]
# Article API
def get_article_revisions(self):
"""
Calls article API to get an article revisions
GET /articles/{article_doi}/revisions
"""
response = self.doGet(ARTICLE_REVISION_API, None, headers=DEFAULT_HEADERS)
self.parse_response_as_json(response)
@needs('parsed', 'parse_response_as_json()')
def verify_article_revision_db_expected(self, expected_results, attribute):
actual_results = self.parsed.get_article_revision_number()
assert actual_results[0] == expected_results, \
('%s is not correct! actual: %s expected: %s' % (
attribute, actual_results[0], expected_results))
| mit |
aifil/odoo | addons/l10n_in_hr_payroll/report/report_hr_yearly_salary_detail.py | 47 | 5890 | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
import datetime
from openerp.report import report_sxw
from openerp.osv import osv
class employees_yearly_salary_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(employees_yearly_salary_report, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'get_employee': self.get_employee,
'get_employee_detail': self.get_employee_detail,
'cal_monthly_amt': self.cal_monthly_amt,
'get_periods': self.get_periods,
'get_total': self.get_total,
'get_allow': self.get_allow,
'get_deduct': self.get_deduct,
})
self.context = context
def get_periods(self, form):
self.mnths = []
# Get start year-month-date and end year-month-date
first_year = int(form['date_from'][0:4])
last_year = int(form['date_to'][0:4])
first_month = int(form['date_from'][5:7])
last_month = int(form['date_to'][5:7])
no_months = (last_year-first_year) * 12 + last_month - first_month + 1
current_month = first_month
current_year = first_year
# Get name of the months from integer
mnth_name = []
for count in range(0, no_months):
m = datetime.date(current_year, current_month, 1).strftime('%b')
mnth_name.append(m)
self.mnths.append(str(current_month) + '-' + str(current_year))
if current_month == 12:
current_month = 0
current_year = last_year
current_month = current_month + 1
for c in range(0, (12-no_months)):
mnth_name.append('')
self.mnths.append('')
return [mnth_name]
def get_employee(self, form):
return self.pool.get('hr.employee').browse(self.cr,self.uid, form.get('employee_ids', []), context=self.context)
def get_employee_detail(self, form, obj):
self.allow_list = []
self.deduct_list = []
self.total = 0.00
gross = False
net = False
payslip_lines = self.cal_monthly_amt(form, obj.id)
for line in payslip_lines:
for line[0] in line:
if line[0][0] == "Gross":
gross = line[0]
elif line[0][0] == "Net":
net = line[0]
elif line[0][13] > 0.0 and line[0][0] != "Net":
self.total += line[0][len(line[0])-1]
self.allow_list.append(line[0])
elif line[0][13] < 0.0:
self.total += line[0][len(line[0])-1]
self.deduct_list.append(line[0])
if gross:
self.allow_list.append(gross)
if net:
self.deduct_list.append(net)
return None
def cal_monthly_amt(self, form, emp_id):
category_obj = self.pool.get('hr.salary.rule.category')
result = []
res = []
salaries = {}
self.cr.execute('''SELECT rc.code, pl.name, sum(pl.total), \
to_char(date_to,'mm-yyyy') as to_date FROM hr_payslip_line as pl \
LEFT JOIN hr_salary_rule_category AS rc on (pl.category_id = rc.id) \
LEFT JOIN hr_payslip as p on pl.slip_id = p.id \
LEFT JOIN hr_employee as emp on emp.id = p.employee_id \
WHERE p.employee_id = %s \
GROUP BY rc.parent_id, pl.sequence, pl.id, pl.category_id,pl.name,p.date_to,rc.code \
ORDER BY pl.sequence, rc.parent_id''',(emp_id,))
salary = self.cr.fetchall()
for category in salary:
if category[0] not in salaries:
salaries.setdefault(category[0], {})
salaries[category[0]].update({category[1]: {category[3]: category[2]}})
elif category[1] not in salaries[category[0]]:
salaries[category[0]].setdefault(category[1], {})
salaries[category[0]][category[1]].update({category[3]: category[2]})
else:
salaries[category[0]][category[1]].update({category[3]: category[2]})
category_ids = category_obj.search(self.cr,self.uid, [], context=self.context)
categories = category_obj.read(self.cr, self.uid, category_ids, ['code'], context=self.context)
for code in map(lambda x: x['code'], categories):
if code in salaries:
res = self.salary_list(salaries[code])
result.append(res)
return result
def salary_list(self, salaries):
cat_salary_all = []
for category_name,amount in salaries.items():
cat_salary = []
total = 0.0
cat_salary.append(category_name)
for mnth in self.mnths:
if mnth <> 'None':
if len(mnth) != 7:
mnth = '0' + str(mnth)
if mnth in amount and amount[mnth]:
cat_salary.append(amount[mnth])
total += amount[mnth]
else:
cat_salary.append(0.00)
else:
cat_salary.append('')
cat_salary.append(total)
cat_salary_all.append(cat_salary)
return cat_salary_all
def get_allow(self):
return self.allow_list
def get_deduct(self):
return self.deduct_list
def get_total(self):
return self.total
class wrapped_report_payslip(osv.AbstractModel):
_name = 'report.l10n_in_hr_payroll.report_hryearlysalary'
_inherit = 'report.abstract_report'
_template = 'l10n_in_hr_payroll.report_hryearlysalary'
_wrapped_report_class = employees_yearly_salary_report
| gpl-3.0 |
dcroc16/skunk_works | google_appengine/lib/webob-1.2.3/webob/etag.py | 80 | 4531 | """
Does parsing of ETag-related headers: If-None-Matches, If-Matches
Also If-Range parsing
"""
from webob.datetime_utils import (
parse_date,
serialize_date,
)
from webob.descriptors import _rx_etag
from webob.util import (
header_docstring,
warn_deprecation,
)
__all__ = ['AnyETag', 'NoETag', 'ETagMatcher', 'IfRange', 'etag_property']
def etag_property(key, default, rfc_section, strong=True):
doc = header_docstring(key, rfc_section)
doc += " Converts it as a Etag."
def fget(req):
value = req.environ.get(key)
if not value:
return default
else:
return ETagMatcher.parse(value, strong=strong)
def fset(req, val):
if val is None:
req.environ[key] = None
else:
req.environ[key] = str(val)
def fdel(req):
del req.environ[key]
return property(fget, fset, fdel, doc=doc)
def _warn_weak_match_deprecated():
warn_deprecation("weak_match is deprecated", '1.2', 3)
def _warn_if_range_match_deprecated(*args, **kw): # pragma: no cover
raise DeprecationWarning("IfRange.match[_response] API is deprecated")
class _AnyETag(object):
"""
Represents an ETag of *, or a missing ETag when matching is 'safe'
"""
def __repr__(self):
return '<ETag *>'
def __nonzero__(self):
return False
__bool__ = __nonzero__ # python 3
def __contains__(self, other):
return True
def weak_match(self, other):
_warn_weak_match_deprecated()
def __str__(self):
return '*'
AnyETag = _AnyETag()
class _NoETag(object):
"""
Represents a missing ETag when matching is unsafe
"""
def __repr__(self):
return '<No ETag>'
def __nonzero__(self):
return False
__bool__ = __nonzero__ # python 3
def __contains__(self, other):
return False
def weak_match(self, other): # pragma: no cover
_warn_weak_match_deprecated()
def __str__(self):
return ''
NoETag = _NoETag()
# TODO: convert into a simple tuple
class ETagMatcher(object):
def __init__(self, etags):
self.etags = etags
def __contains__(self, other):
return other in self.etags
def weak_match(self, other): # pragma: no cover
_warn_weak_match_deprecated()
def __repr__(self):
return '<ETag %s>' % (' or '.join(self.etags))
@classmethod
def parse(cls, value, strong=True):
"""
Parse this from a header value
"""
if value == '*':
return AnyETag
if not value:
return cls([])
matches = _rx_etag.findall(value)
if not matches:
return cls([value])
elif strong:
return cls([t for w,t in matches if not w])
else:
return cls([t for w,t in matches])
def __str__(self):
return ', '.join(map('"%s"'.__mod__, self.etags))
class IfRange(object):
def __init__(self, etag):
self.etag = etag
@classmethod
def parse(cls, value):
"""
Parse this from a header value.
"""
if not value:
return cls(AnyETag)
elif value.endswith(' GMT'):
# Must be a date
return IfRangeDate(parse_date(value))
else:
return cls(ETagMatcher.parse(value))
def __contains__(self, resp):
"""
Return True if the If-Range header matches the given etag or last_modified
"""
return resp.etag_strong in self.etag
def __nonzero__(self):
return bool(self.etag)
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
self.etag
)
def __str__(self):
return str(self.etag) if self.etag else ''
match = match_response = _warn_if_range_match_deprecated
__bool__ = __nonzero__ # python 3
class IfRangeDate(object):
def __init__(self, date):
self.date = date
def __contains__(self, resp):
last_modified = resp.last_modified
#if isinstance(last_modified, str):
# last_modified = parse_date(last_modified)
return last_modified and (last_modified <= self.date)
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
self.date
#serialize_date(self.date)
)
def __str__(self):
return serialize_date(self.date)
match = match_response = _warn_if_range_match_deprecated
| mit |
itsmemattchung/github3.py | github3/decorators.py | 10 | 3027 | # -*- coding: utf-8 -*-
"""
github3.decorators
==================
This module provides decorators to the rest of the library
"""
from functools import wraps
from requests.models import Response
import os
try: # (No coverage)
# python2
from StringIO import StringIO # (No coverage)
except ImportError: # (No coverage)
# python3
from io import BytesIO as StringIO
class RequestsStringIO(StringIO):
def read(self, n=-1, *args, **kwargs):
# StringIO is an old-style class, so can't use super
return StringIO.read(self, n)
def requires_auth(func):
"""Decorator to note which object methods require authorization."""
@wraps(func)
def auth_wrapper(self, *args, **kwargs):
if hasattr(self, 'session') and self.session.has_auth():
return func(self, *args, **kwargs)
else:
from .exceptions import error_for
# Mock a 401 response
r = generate_fake_error_response(
'{"message": "Requires authentication"}'
)
raise error_for(r)
return auth_wrapper
def requires_basic_auth(func):
"""Specific (basic) authentication decorator.
This is used to note which object methods require username/password
authorization and won't work with token based authorization.
"""
@wraps(func)
def auth_wrapper(self, *args, **kwargs):
if hasattr(self, 'session') and self.session.auth:
return func(self, *args, **kwargs)
else:
from .exceptions import error_for
# Mock a 401 response
r = generate_fake_error_response(
'{"message": "Requires username/password authentication"}'
)
raise error_for(r)
return auth_wrapper
def requires_app_credentials(func):
"""Require client_id and client_secret to be associated.
This is used to note and enforce which methods require a client_id and
client_secret to be used.
"""
@wraps(func)
def auth_wrapper(self, *args, **kwargs):
client_id, client_secret = self.session.retrieve_client_credentials()
if client_id and client_secret:
return func(self, *args, **kwargs)
else:
from .exceptions import error_for
# Mock a 401 response
r = generate_fake_error_response(
'{"message": "Requires username/password authentication"}'
)
raise error_for(r)
return auth_wrapper
def generate_fake_error_response(msg, status_code=401, encoding='utf-8'):
r = Response()
r.status_code = status_code
r.encoding = encoding
r.raw = RequestsStringIO(msg.encode())
r._content_consumed = True
r._content = r.raw.read()
return r
# Use mock decorators when generating documentation, so all functino signatures
# are displayed correctly
if os.getenv('GENERATING_DOCUMENTATION', None) == 'github3':
requires_auth = requires_basic_auth = lambda x: x # noqa # (No coverage)
| bsd-3-clause |
johankaito/fufuka | microblog/old-flask/lib/python2.7/codecs.py | 265 | 35266 | """ codecs -- Python Codec Registry, API and helpers.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import __builtin__, sys
### Registry and builtin stateless codec functions
try:
from _codecs import *
except ImportError, why:
raise SystemError('Failed to load the builtin codecs: %s' % why)
__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
"BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
"BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE",
"BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE",
"strict_errors", "ignore_errors", "replace_errors",
"xmlcharrefreplace_errors",
"register_error", "lookup_error"]
### Constants
#
# Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF)
# and its possible byte string values
# for UTF8/UTF16/UTF32 output and little/big endian machines
#
# UTF-8
BOM_UTF8 = '\xef\xbb\xbf'
# UTF-16, little endian
BOM_LE = BOM_UTF16_LE = '\xff\xfe'
# UTF-16, big endian
BOM_BE = BOM_UTF16_BE = '\xfe\xff'
# UTF-32, little endian
BOM_UTF32_LE = '\xff\xfe\x00\x00'
# UTF-32, big endian
BOM_UTF32_BE = '\x00\x00\xfe\xff'
if sys.byteorder == 'little':
# UTF-16, native endianness
BOM = BOM_UTF16 = BOM_UTF16_LE
# UTF-32, native endianness
BOM_UTF32 = BOM_UTF32_LE
else:
# UTF-16, native endianness
BOM = BOM_UTF16 = BOM_UTF16_BE
# UTF-32, native endianness
BOM_UTF32 = BOM_UTF32_BE
# Old broken names (don't use in new code)
BOM32_LE = BOM_UTF16_LE
BOM32_BE = BOM_UTF16_BE
BOM64_LE = BOM_UTF32_LE
BOM64_BE = BOM_UTF32_BE
### Codec base classes (defining the API)
class CodecInfo(tuple):
def __new__(cls, encode, decode, streamreader=None, streamwriter=None,
incrementalencoder=None, incrementaldecoder=None, name=None):
self = tuple.__new__(cls, (encode, decode, streamreader, streamwriter))
self.name = name
self.encode = encode
self.decode = decode
self.incrementalencoder = incrementalencoder
self.incrementaldecoder = incrementaldecoder
self.streamwriter = streamwriter
self.streamreader = streamreader
return self
def __repr__(self):
return "<%s.%s object for encoding %s at 0x%x>" % (self.__class__.__module__, self.__class__.__name__, self.name, id(self))
class Codec:
""" Defines the interface for stateless encoders/decoders.
The .encode()/.decode() methods may use different error
handling schemes by providing the errors argument. These
string values are predefined:
'strict' - raise a ValueError error (or a subclass)
'ignore' - ignore the character and continue with the next
'replace' - replace with a suitable replacement character;
Python will use the official U+FFFD REPLACEMENT
CHARACTER for the builtin Unicode codecs on
decoding and '?' on encoding.
'xmlcharrefreplace' - Replace with the appropriate XML
character reference (only for encoding).
'backslashreplace' - Replace with backslashed escape sequences
(only for encoding).
The set of allowed values can be extended via register_error.
"""
def encode(self, input, errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling.
The method may not store state in the Codec instance. Use
StreamCodec for codecs which have to keep state in order to
make encoding/decoding efficient.
The encoder must be able to handle zero length input and
return an empty object of the output object type in this
situation.
"""
raise NotImplementedError
def decode(self, input, errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling.
The method may not store state in the Codec instance. Use
StreamCodec for codecs which have to keep state in order to
make encoding/decoding efficient.
The decoder must be able to handle zero length input and
return an empty object of the output object type in this
situation.
"""
raise NotImplementedError
class IncrementalEncoder(object):
"""
An IncrementalEncoder encodes an input in multiple steps. The input can be
passed piece by piece to the encode() method. The IncrementalEncoder remembers
the state of the Encoding process between calls to encode().
"""
def __init__(self, errors='strict'):
"""
Creates an IncrementalEncoder instance.
The IncrementalEncoder may use different error handling schemes by
providing the errors keyword argument. See the module docstring
for a list of possible values.
"""
self.errors = errors
self.buffer = ""
def encode(self, input, final=False):
"""
Encodes input and returns the resulting object.
"""
raise NotImplementedError
def reset(self):
"""
Resets the encoder to the initial state.
"""
def getstate(self):
"""
Return the current state of the encoder.
"""
return 0
def setstate(self, state):
"""
Set the current state of the encoder. state must have been
returned by getstate().
"""
class BufferedIncrementalEncoder(IncrementalEncoder):
"""
This subclass of IncrementalEncoder can be used as the baseclass for an
incremental encoder if the encoder must keep some of the output in a
buffer between calls to encode().
"""
def __init__(self, errors='strict'):
IncrementalEncoder.__init__(self, errors)
self.buffer = "" # unencoded input that is kept between calls to encode()
def _buffer_encode(self, input, errors, final):
# Overwrite this method in subclasses: It must encode input
# and return an (output, length consumed) tuple
raise NotImplementedError
def encode(self, input, final=False):
# encode input (taking the buffer into account)
data = self.buffer + input
(result, consumed) = self._buffer_encode(data, self.errors, final)
# keep unencoded input until the next call
self.buffer = data[consumed:]
return result
def reset(self):
IncrementalEncoder.reset(self)
self.buffer = ""
def getstate(self):
return self.buffer or 0
def setstate(self, state):
self.buffer = state or ""
class IncrementalDecoder(object):
"""
An IncrementalDecoder decodes an input in multiple steps. The input can be
passed piece by piece to the decode() method. The IncrementalDecoder
remembers the state of the decoding process between calls to decode().
"""
def __init__(self, errors='strict'):
"""
Creates a IncrementalDecoder instance.
The IncrementalDecoder may use different error handling schemes by
providing the errors keyword argument. See the module docstring
for a list of possible values.
"""
self.errors = errors
def decode(self, input, final=False):
"""
Decodes input and returns the resulting object.
"""
raise NotImplementedError
def reset(self):
"""
Resets the decoder to the initial state.
"""
def getstate(self):
"""
Return the current state of the decoder.
This must be a (buffered_input, additional_state_info) tuple.
buffered_input must be a bytes object containing bytes that
were passed to decode() that have not yet been converted.
additional_state_info must be a non-negative integer
representing the state of the decoder WITHOUT yet having
processed the contents of buffered_input. In the initial state
and after reset(), getstate() must return (b"", 0).
"""
return (b"", 0)
def setstate(self, state):
"""
Set the current state of the decoder.
state must have been returned by getstate(). The effect of
setstate((b"", 0)) must be equivalent to reset().
"""
class BufferedIncrementalDecoder(IncrementalDecoder):
"""
This subclass of IncrementalDecoder can be used as the baseclass for an
incremental decoder if the decoder must be able to handle incomplete byte
sequences.
"""
def __init__(self, errors='strict'):
IncrementalDecoder.__init__(self, errors)
self.buffer = "" # undecoded input that is kept between calls to decode()
def _buffer_decode(self, input, errors, final):
# Overwrite this method in subclasses: It must decode input
# and return an (output, length consumed) tuple
raise NotImplementedError
def decode(self, input, final=False):
# decode input (taking the buffer into account)
data = self.buffer + input
(result, consumed) = self._buffer_decode(data, self.errors, final)
# keep undecoded input until the next call
self.buffer = data[consumed:]
return result
def reset(self):
IncrementalDecoder.reset(self)
self.buffer = ""
def getstate(self):
# additional state info is always 0
return (self.buffer, 0)
def setstate(self, state):
# ignore additional state info
self.buffer = state[0]
#
# The StreamWriter and StreamReader class provide generic working
# interfaces which can be used to implement new encoding submodules
# very easily. See encodings/utf_8.py for an example on how this is
# done.
#
class StreamWriter(Codec):
def __init__(self, stream, errors='strict'):
""" Creates a StreamWriter instance.
stream must be a file-like object open for writing
(binary) data.
The StreamWriter may use different error handling
schemes by providing the errors keyword argument. These
parameters are predefined:
'strict' - raise a ValueError (or a subclass)
'ignore' - ignore the character and continue with the next
'replace'- replace with a suitable replacement character
'xmlcharrefreplace' - Replace with the appropriate XML
character reference.
'backslashreplace' - Replace with backslashed escape
sequences (only for encoding).
The set of allowed parameter values can be extended via
register_error.
"""
self.stream = stream
self.errors = errors
def write(self, object):
""" Writes the object's contents encoded to self.stream.
"""
data, consumed = self.encode(object, self.errors)
self.stream.write(data)
def writelines(self, list):
""" Writes the concatenated list of strings to the stream
using .write().
"""
self.write(''.join(list))
def reset(self):
""" Flushes and resets the codec buffers used for keeping state.
Calling this method should ensure that the data on the
output is put into a clean state, that allows appending
of new fresh data without having to rescan the whole
stream to recover state.
"""
pass
def seek(self, offset, whence=0):
self.stream.seek(offset, whence)
if whence == 0 and offset == 0:
self.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamReader(Codec):
def __init__(self, stream, errors='strict'):
""" Creates a StreamReader instance.
stream must be a file-like object open for reading
(binary) data.
The StreamReader may use different error handling
schemes by providing the errors keyword argument. These
parameters are predefined:
'strict' - raise a ValueError (or a subclass)
'ignore' - ignore the character and continue with the next
'replace'- replace with a suitable replacement character;
The set of allowed parameter values can be extended via
register_error.
"""
self.stream = stream
self.errors = errors
self.bytebuffer = ""
# For str->str decoding this will stay a str
# For str->unicode decoding the first read will promote it to unicode
self.charbuffer = ""
self.linebuffer = None
def decode(self, input, errors='strict'):
raise NotImplementedError
def read(self, size=-1, chars=-1, firstline=False):
""" Decodes data from the stream self.stream and returns the
resulting object.
chars indicates the number of characters to read from the
stream. read() will never return more than chars
characters, but it might return less, if there are not enough
characters available.
size indicates the approximate maximum number of bytes to
read from the stream for decoding purposes. The decoder
can modify this setting as appropriate. The default value
-1 indicates to read and decode as much as possible. size
is intended to prevent having to decode huge files in one
step.
If firstline is true, and a UnicodeDecodeError happens
after the first line terminator in the input only the first line
will be returned, the rest of the input will be kept until the
next call to read().
The method should use a greedy read strategy meaning that
it should read as much data as is allowed within the
definition of the encoding and the given size, e.g. if
optional encoding endings or state markers are available
on the stream, these should be read too.
"""
# If we have lines cached, first merge them back into characters
if self.linebuffer:
self.charbuffer = "".join(self.linebuffer)
self.linebuffer = None
# read until we get the required number of characters (if available)
while True:
# can the request can be satisfied from the character buffer?
if chars < 0:
if size < 0:
if self.charbuffer:
break
elif len(self.charbuffer) >= size:
break
else:
if len(self.charbuffer) >= chars:
break
# we need more data
if size < 0:
newdata = self.stream.read()
else:
newdata = self.stream.read(size)
# decode bytes (those remaining from the last call included)
data = self.bytebuffer + newdata
try:
newchars, decodedbytes = self.decode(data, self.errors)
except UnicodeDecodeError, exc:
if firstline:
newchars, decodedbytes = self.decode(data[:exc.start], self.errors)
lines = newchars.splitlines(True)
if len(lines)<=1:
raise
else:
raise
# keep undecoded bytes until the next call
self.bytebuffer = data[decodedbytes:]
# put new characters in the character buffer
self.charbuffer += newchars
# there was no data available
if not newdata:
break
if chars < 0:
# Return everything we've got
result = self.charbuffer
self.charbuffer = ""
else:
# Return the first chars characters
result = self.charbuffer[:chars]
self.charbuffer = self.charbuffer[chars:]
return result
def readline(self, size=None, keepends=True):
""" Read one line from the input stream and return the
decoded data.
size, if given, is passed as size argument to the
read() method.
"""
# If we have lines cached from an earlier read, return
# them unconditionally
if self.linebuffer:
line = self.linebuffer[0]
del self.linebuffer[0]
if len(self.linebuffer) == 1:
# revert to charbuffer mode; we might need more data
# next time
self.charbuffer = self.linebuffer[0]
self.linebuffer = None
if not keepends:
line = line.splitlines(False)[0]
return line
readsize = size or 72
line = ""
# If size is given, we call read() only once
while True:
data = self.read(readsize, firstline=True)
if data:
# If we're at a "\r" read one extra character (which might
# be a "\n") to get a proper line ending. If the stream is
# temporarily exhausted we return the wrong line ending.
if data.endswith("\r"):
data += self.read(size=1, chars=1)
line += data
lines = line.splitlines(True)
if lines:
if len(lines) > 1:
# More than one line result; the first line is a full line
# to return
line = lines[0]
del lines[0]
if len(lines) > 1:
# cache the remaining lines
lines[-1] += self.charbuffer
self.linebuffer = lines
self.charbuffer = None
else:
# only one remaining line, put it back into charbuffer
self.charbuffer = lines[0] + self.charbuffer
if not keepends:
line = line.splitlines(False)[0]
break
line0withend = lines[0]
line0withoutend = lines[0].splitlines(False)[0]
if line0withend != line0withoutend: # We really have a line end
# Put the rest back together and keep it until the next call
self.charbuffer = "".join(lines[1:]) + self.charbuffer
if keepends:
line = line0withend
else:
line = line0withoutend
break
# we didn't get anything or this was our only try
if not data or size is not None:
if line and not keepends:
line = line.splitlines(False)[0]
break
if readsize<8000:
readsize *= 2
return line
def readlines(self, sizehint=None, keepends=True):
""" Read all lines available on the input stream
and return them as list of lines.
Line breaks are implemented using the codec's decoder
method and are included in the list entries.
sizehint, if given, is ignored since there is no efficient
way to finding the true end-of-line.
"""
data = self.read()
return data.splitlines(keepends)
def reset(self):
""" Resets the codec buffers used for keeping state.
Note that no stream repositioning should take place.
This method is primarily intended to be able to recover
from decoding errors.
"""
self.bytebuffer = ""
self.charbuffer = u""
self.linebuffer = None
def seek(self, offset, whence=0):
""" Set the input stream's current position.
Resets the codec buffers used for keeping state.
"""
self.stream.seek(offset, whence)
self.reset()
def next(self):
""" Return the next decoded line from the input stream."""
line = self.readline()
if line:
return line
raise StopIteration
def __iter__(self):
return self
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamReaderWriter:
""" StreamReaderWriter instances allow wrapping streams which
work in both read and write modes.
The design is such that one can use the factory functions
returned by the codec.lookup() function to construct the
instance.
"""
# Optional attributes set by the file wrappers below
encoding = 'unknown'
def __init__(self, stream, Reader, Writer, errors='strict'):
""" Creates a StreamReaderWriter instance.
stream must be a Stream-like object.
Reader, Writer must be factory functions or classes
providing the StreamReader, StreamWriter interface resp.
Error handling is done in the same way as defined for the
StreamWriter/Readers.
"""
self.stream = stream
self.reader = Reader(stream, errors)
self.writer = Writer(stream, errors)
self.errors = errors
def read(self, size=-1):
return self.reader.read(size)
def readline(self, size=None):
return self.reader.readline(size)
def readlines(self, sizehint=None):
return self.reader.readlines(sizehint)
def next(self):
""" Return the next decoded line from the input stream."""
return self.reader.next()
def __iter__(self):
return self
def write(self, data):
return self.writer.write(data)
def writelines(self, list):
return self.writer.writelines(list)
def reset(self):
self.reader.reset()
self.writer.reset()
def seek(self, offset, whence=0):
self.stream.seek(offset, whence)
self.reader.reset()
if whence == 0 and offset == 0:
self.writer.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
# these are needed to make "with codecs.open(...)" work properly
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamRecoder:
""" StreamRecoder instances provide a frontend - backend
view of encoding data.
They use the complete set of APIs returned by the
codecs.lookup() function to implement their task.
Data written to the stream is first decoded into an
intermediate format (which is dependent on the given codec
combination) and then written to the stream using an instance
of the provided Writer class.
In the other direction, data is read from the stream using a
Reader instance and then return encoded data to the caller.
"""
# Optional attributes set by the file wrappers below
data_encoding = 'unknown'
file_encoding = 'unknown'
def __init__(self, stream, encode, decode, Reader, Writer,
errors='strict'):
""" Creates a StreamRecoder instance which implements a two-way
conversion: encode and decode work on the frontend (the
input to .read() and output of .write()) while
Reader and Writer work on the backend (reading and
writing to the stream).
You can use these objects to do transparent direct
recodings from e.g. latin-1 to utf-8 and back.
stream must be a file-like object.
encode, decode must adhere to the Codec interface, Reader,
Writer must be factory functions or classes providing the
StreamReader, StreamWriter interface resp.
encode and decode are needed for the frontend translation,
Reader and Writer for the backend translation. Unicode is
used as intermediate encoding.
Error handling is done in the same way as defined for the
StreamWriter/Readers.
"""
self.stream = stream
self.encode = encode
self.decode = decode
self.reader = Reader(stream, errors)
self.writer = Writer(stream, errors)
self.errors = errors
def read(self, size=-1):
data = self.reader.read(size)
data, bytesencoded = self.encode(data, self.errors)
return data
def readline(self, size=None):
if size is None:
data = self.reader.readline()
else:
data = self.reader.readline(size)
data, bytesencoded = self.encode(data, self.errors)
return data
def readlines(self, sizehint=None):
data = self.reader.read()
data, bytesencoded = self.encode(data, self.errors)
return data.splitlines(1)
def next(self):
""" Return the next decoded line from the input stream."""
data = self.reader.next()
data, bytesencoded = self.encode(data, self.errors)
return data
def __iter__(self):
return self
def write(self, data):
data, bytesdecoded = self.decode(data, self.errors)
return self.writer.write(data)
def writelines(self, list):
data = ''.join(list)
data, bytesdecoded = self.decode(data, self.errors)
return self.writer.write(data)
def reset(self):
self.reader.reset()
self.writer.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
### Shortcuts
def open(filename, mode='rb', encoding=None, errors='strict', buffering=1):
""" Open an encoded file using the given mode and return
a wrapped version providing transparent encoding/decoding.
Note: The wrapped version will only accept the object format
defined by the codecs, i.e. Unicode objects for most builtin
codecs. Output is also codec dependent and will usually be
Unicode as well.
Files are always opened in binary mode, even if no binary mode
was specified. This is done to avoid data loss due to encodings
using 8-bit values. The default file mode is 'rb' meaning to
open the file in binary read mode.
encoding specifies the encoding which is to be used for the
file.
errors may be given to define the error handling. It defaults
to 'strict' which causes ValueErrors to be raised in case an
encoding error occurs.
buffering has the same meaning as for the builtin open() API.
It defaults to line buffered.
The returned wrapped file object provides an extra attribute
.encoding which allows querying the used encoding. This
attribute is only available if an encoding was specified as
parameter.
"""
if encoding is not None:
if 'U' in mode:
# No automatic conversion of '\n' is done on reading and writing
mode = mode.strip().replace('U', '')
if mode[:1] not in set('rwa'):
mode = 'r' + mode
if 'b' not in mode:
# Force opening of the file in binary mode
mode = mode + 'b'
file = __builtin__.open(filename, mode, buffering)
if encoding is None:
return file
info = lookup(encoding)
srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
# Add attributes to simplify introspection
srw.encoding = encoding
return srw
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
""" Return a wrapped version of file which provides transparent
encoding translation.
Strings written to the wrapped file are interpreted according
to the given data_encoding and then written to the original
file as string using file_encoding. The intermediate encoding
will usually be Unicode but depends on the specified codecs.
Strings are read from the file using file_encoding and then
passed back to the caller as string using data_encoding.
If file_encoding is not given, it defaults to data_encoding.
errors may be given to define the error handling. It defaults
to 'strict' which causes ValueErrors to be raised in case an
encoding error occurs.
The returned wrapped file object provides two extra attributes
.data_encoding and .file_encoding which reflect the given
parameters of the same name. The attributes can be used for
introspection by Python programs.
"""
if file_encoding is None:
file_encoding = data_encoding
data_info = lookup(data_encoding)
file_info = lookup(file_encoding)
sr = StreamRecoder(file, data_info.encode, data_info.decode,
file_info.streamreader, file_info.streamwriter, errors)
# Add attributes to simplify introspection
sr.data_encoding = data_encoding
sr.file_encoding = file_encoding
return sr
### Helpers for codec lookup
def getencoder(encoding):
""" Lookup up the codec for the given encoding and return
its encoder function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).encode
def getdecoder(encoding):
""" Lookup up the codec for the given encoding and return
its decoder function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).decode
def getincrementalencoder(encoding):
""" Lookup up the codec for the given encoding and return
its IncrementalEncoder class or factory function.
Raises a LookupError in case the encoding cannot be found
or the codecs doesn't provide an incremental encoder.
"""
encoder = lookup(encoding).incrementalencoder
if encoder is None:
raise LookupError(encoding)
return encoder
def getincrementaldecoder(encoding):
""" Lookup up the codec for the given encoding and return
its IncrementalDecoder class or factory function.
Raises a LookupError in case the encoding cannot be found
or the codecs doesn't provide an incremental decoder.
"""
decoder = lookup(encoding).incrementaldecoder
if decoder is None:
raise LookupError(encoding)
return decoder
def getreader(encoding):
""" Lookup up the codec for the given encoding and return
its StreamReader class or factory function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).streamreader
def getwriter(encoding):
""" Lookup up the codec for the given encoding and return
its StreamWriter class or factory function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).streamwriter
def iterencode(iterator, encoding, errors='strict', **kwargs):
"""
Encoding iterator.
Encodes the input strings from the iterator using a IncrementalEncoder.
errors and kwargs are passed through to the IncrementalEncoder
constructor.
"""
encoder = getincrementalencoder(encoding)(errors, **kwargs)
for input in iterator:
output = encoder.encode(input)
if output:
yield output
output = encoder.encode("", True)
if output:
yield output
def iterdecode(iterator, encoding, errors='strict', **kwargs):
"""
Decoding iterator.
Decodes the input strings from the iterator using a IncrementalDecoder.
errors and kwargs are passed through to the IncrementalDecoder
constructor.
"""
decoder = getincrementaldecoder(encoding)(errors, **kwargs)
for input in iterator:
output = decoder.decode(input)
if output:
yield output
output = decoder.decode("", True)
if output:
yield output
### Helpers for charmap-based codecs
def make_identity_dict(rng):
""" make_identity_dict(rng) -> dict
Return a dictionary where elements of the rng sequence are
mapped to themselves.
"""
res = {}
for i in rng:
res[i]=i
return res
def make_encoding_map(decoding_map):
""" Creates an encoding map from a decoding map.
If a target mapping in the decoding map occurs multiple
times, then that target is mapped to None (undefined mapping),
causing an exception when encountered by the charmap codec
during translation.
One example where this happens is cp875.py which decodes
multiple character to \u001a.
"""
m = {}
for k,v in decoding_map.items():
if not v in m:
m[v] = k
else:
m[v] = None
return m
### error handlers
try:
strict_errors = lookup_error("strict")
ignore_errors = lookup_error("ignore")
replace_errors = lookup_error("replace")
xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
backslashreplace_errors = lookup_error("backslashreplace")
except LookupError:
# In --disable-unicode builds, these error handler are missing
strict_errors = None
ignore_errors = None
replace_errors = None
xmlcharrefreplace_errors = None
backslashreplace_errors = None
# Tell modulefinder that using codecs probably needs the encodings
# package
_false = 0
if _false:
import encodings
### Tests
if __name__ == '__main__':
# Make stdout translate Latin-1 output into UTF-8 output
sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8')
# Have stdin translate Latin-1 input into UTF-8 input
sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1')
| apache-2.0 |
Russell-IO/ansible | test/units/modules/remote_management/oneview/test_oneview_enclosure_facts.py | 68 | 4920 | # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.compat.tests import unittest
from oneview_module_loader import OneViewModuleBase
from ansible.modules.remote_management.oneview.oneview_enclosure_facts import EnclosureFactsModule
from hpe_test_utils import FactsParamsTestCase
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(
config='config.json',
name=None
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test-Enclosure",
options=[]
)
PARAMS_GET_BY_NAME_WITH_OPTIONS = dict(
config='config.json',
name="Test-Enclosure",
options=['utilization', 'environmentalConfiguration', 'script']
)
PARAMS_GET_UTILIZATION_WITH_PARAMS = dict(
config='config.json',
name="Test-Enclosure",
options=[dict(utilization=dict(fields='AveragePower',
filter=['startDate=2016-06-30T03:29:42.000Z',
'endDate=2016-07-01T03:29:42.000Z'],
view='day',
refresh=True))]
)
PRESENT_ENCLOSURES = [{
"name": "Test-Enclosure",
"uri": "/rest/enclosures/c6bf9af9-48e7-4236-b08a-77684dc258a5"
}]
ENCLOSURE_SCRIPT = '# script content'
ENCLOSURE_UTILIZATION = {
"isFresh": "True"
}
ENCLOSURE_ENVIRONMENTAL_CONFIG = {
"calibratedMaxPower": "2500"
}
class EnclosureFactsSpec(unittest.TestCase,
FactsParamsTestCase):
def setUp(self):
self.configure_mocks(self, EnclosureFactsModule)
self.enclosures = self.mock_ov_client.enclosures
FactsParamsTestCase.configure_client_mock(self, self.enclosures)
def test_should_get_all_enclosures(self):
self.enclosures.get_all.return_value = PRESENT_ENCLOSURES
self.mock_ansible_module.params = PARAMS_GET_ALL
EnclosureFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosures=(PRESENT_ENCLOSURES))
)
def test_should_get_enclosure_by_name(self):
self.enclosures.get_by.return_value = PRESENT_ENCLOSURES
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
EnclosureFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosures=(PRESENT_ENCLOSURES))
)
def test_should_get_enclosure_by_name_with_options(self):
self.enclosures.get_by.return_value = PRESENT_ENCLOSURES
self.enclosures.get_script.return_value = ENCLOSURE_SCRIPT
self.enclosures.get_utilization.return_value = ENCLOSURE_UTILIZATION
self.enclosures.get_environmental_configuration.return_value = ENCLOSURE_ENVIRONMENTAL_CONFIG
self.mock_ansible_module.params = PARAMS_GET_BY_NAME_WITH_OPTIONS
EnclosureFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosures=PRESENT_ENCLOSURES,
enclosure_script=ENCLOSURE_SCRIPT,
enclosure_environmental_configuration=ENCLOSURE_ENVIRONMENTAL_CONFIG,
enclosure_utilization=ENCLOSURE_UTILIZATION)
)
def test_should_get_all_utilization_data(self):
self.enclosures.get_by.return_value = PRESENT_ENCLOSURES
self.enclosures.get_script.return_value = ENCLOSURE_SCRIPT
self.enclosures.get_utilization.return_value = ENCLOSURE_UTILIZATION
self.enclosures.get_environmental_configuration.return_value = ENCLOSURE_ENVIRONMENTAL_CONFIG
self.mock_ansible_module.params = PARAMS_GET_BY_NAME_WITH_OPTIONS
EnclosureFactsModule().run()
self.enclosures.get_utilization.assert_called_once_with(PRESENT_ENCLOSURES[0]['uri'], fields='', filter='',
view='', refresh='')
def test_should_get_utilization_with_parameters(self):
self.enclosures.get_by.return_value = PRESENT_ENCLOSURES
self.enclosures.get_script.return_value = ENCLOSURE_SCRIPT
self.enclosures.get_utilization.return_value = ENCLOSURE_UTILIZATION
self.enclosures.get_environmental_configuration.return_value = ENCLOSURE_ENVIRONMENTAL_CONFIG
self.mock_ansible_module.params = PARAMS_GET_UTILIZATION_WITH_PARAMS
EnclosureFactsModule().run()
date_filter = ["startDate=2016-06-30T03:29:42.000Z", "endDate=2016-07-01T03:29:42.000Z"]
self.enclosures.get_utilization.assert_called_once_with(
PRESENT_ENCLOSURES[0]['uri'], fields='AveragePower', filter=date_filter, view='day', refresh=True)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
Yong-Lee/django | tests/custom_methods/tests.py | 228 | 1197 | from __future__ import unicode_literals
from datetime import date
from django.test import TestCase
from .models import Article
class MethodsTests(TestCase):
def test_custom_methods(self):
a = Article.objects.create(
headline="Area man programs in Python", pub_date=date(2005, 7, 27)
)
b = Article.objects.create(
headline="Beatles reunite", pub_date=date(2005, 7, 27)
)
self.assertFalse(a.was_published_today())
self.assertQuerysetEqual(
a.articles_from_same_day_1(), [
"Beatles reunite",
],
lambda a: a.headline,
)
self.assertQuerysetEqual(
a.articles_from_same_day_2(), [
"Beatles reunite",
],
lambda a: a.headline
)
self.assertQuerysetEqual(
b.articles_from_same_day_1(), [
"Area man programs in Python",
],
lambda a: a.headline,
)
self.assertQuerysetEqual(
b.articles_from_same_day_2(), [
"Area man programs in Python",
],
lambda a: a.headline
)
| bsd-3-clause |
FedorSelitsky/eventrack | eventrack/settings/base.py | 1 | 5825 | import os
from celery.schedules import crontab
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.postgres',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'artist.apps.ArtistConfig',
'event.apps.EventConfig',
'venue.apps.VenueConfig',
'user.apps.UserConfig',
'rest_framework',
'corsheaders',
'channels',
'drf_yasg',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'eventrack.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'eventrack.wsgi.application'
# User model
# https://docs.djangoproject.com/en/stable/ref/settings/#auth-user-model
AUTH_USER_MODEL = 'user.User'
# Password validation
# https://docs.djangoproject.com/en/stable/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Login URL
# https://docs.djangoproject.com/en/stable/ref/settings/#login-url
LOGIN_URL = 'user:signin'
# Internationalization
# https://docs.djangoproject.com/en/stable/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Minsk'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Logging
# https://docs.djangoproject.com/en/stable/topics/logging/#configuring-logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'default': {
'verbose': '[%(asctime)s] (%(process)d/%(thread)d) %(name)s %(levelname)s: %(message)s'
},
},
'handlers': {
'console': {
'level': 'ERROR',
'class': 'logging.StreamHandler',
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
},
},
'root': {
'handlers': ['mail_admins', 'console'],
'level': 'ERROR',
'formatter': 'verbose',
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'console'],
'level': 'ERROR',
'propagate': False,
'formatter': 'verbose',
},
'django.security': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
'formatter': 'verbose',
},
},
}
# ASGI
# https://channels.readthedocs.io/en/stable/deploying.html#configuring-the-asgi-application
ASGI_APPLICATION = 'eventrack.routing.application'
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
'hosts': [os.getenv('REDIS_HOST', 'redis://localhost:6379')],
},
}
}
# CORS
# https://github.com/ottoyiu/django-cors-headers#configuration
CORS_ORIGIN_ALLOW_ALL = True
CORS_URLS_REGEX = r'^/api/.*$'
CORS_ALLOW_METHODS = (
'GET',
'HEAD',
'OPTIONS',
)
# Django REST Framework
# https://www.django-rest-framework.org/api-guide/pagination/#pagenumberpagination
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'eventrack.pagination.PagePagination',
'PAGE_SIZE': 50
}
# Artists count
ARTISTS_COUNT = 5
# Venues count
VENUES_COUNT = 5
# Celery
# http://docs.celeryproject.org/en/stable/userguide/configuration.html
CELERY_TIMEZONE = TIME_ZONE
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_BEAT_SCHEDULE = {
'fetch_artists': {
'task': 'artist.tasks.fetch_artists',
'schedule': crontab(minute=0, hour=0),
},
'update_artists': {
'task': 'artist.tasks.update_artists',
'schedule': crontab(minute=0, hour='*/1'),
},
'fetch_events': {
'task': 'event.tasks.fetch_events',
'schedule': crontab(minute=0, hour='*/12'),
},
'update_venues': {
'task': 'venue.tasks.update_venues',
'schedule': crontab(minute=0, hour='*/2'),
},
}
# Songkick API
# https://www.songkick.com/developer
SONGKICK_API_KEY = os.getenv('SONGKICK_API_KEY', 'YOUR_API_KEY')
# Bandsintown API
# https://app.swaggerhub.com/apis/Bandsintown/PublicAPI/3.0.0
BANDSINTOWN_APP_ID = os.getenv('BANDSINTOWN_APP_ID', 'YOUR_APP_ID')
# ReDoc
# https://drf-yasg.readthedocs.io/en/stable/settings.html#redoc-settings
REDOC_SETTINGS = {
'HIDE_HOSTNAME': True,
'REQUIRED_PROPS_FIRST': True,
}
| mit |
matteocrippa/dsl-n55u-bender | release/src/router/samba-3.5.8/source4/scripting/python/samba/shares.py | 24 | 1715 | #!/usr/bin/python
# Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2009
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Share management."""
# TODO: Rather than accessing Loadparm directly here, we should really
# have bindings to the param/shares.c and use those.
class SharesContainer(object):
"""A shares container."""
def __init__(self, lp):
self._lp = lp
def __getitem__(self, name):
if name == "global":
# [global] is not a share
raise KeyError
return Share(self._lp[name])
def __len__(self):
if "global" in self._lp:
return len(self._lp)-1
return len(self._lp)
def keys(self):
return [name for name in self._lp.services() if name != "global"]
def __iter__(self):
return iter(self.keys())
class Share(object):
"""A file share."""
def __init__(self, service):
self._service = service
def __getitem__(self, name):
return self._service[name]
def __setitem__(self, name, value):
self._service[name] = value
| gpl-2.0 |
gtolan/mrkttrdr | node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py | 1825 | 17014 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GYP backend that generates Eclipse CDT settings files.
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
files that can be imported into an Eclipse CDT project. The XML file contains a
list of include paths and symbols (i.e. defines).
Because a full .cproject definition is not created by this generator, it's not
possible to properly define the include dirs and symbols for each file
individually. Instead, one set of includes/symbols is generated for the entire
project. This works fairly well (and is a vast improvement in general), but may
still result in a few indexer issues here and there.
This generator has no automated tests, so expect it to be broken.
"""
from xml.sax.saxutils import escape
import os.path
import subprocess
import gyp
import gyp.common
import gyp.msvs_emulation
import shlex
import xml.etree.cElementTree as ET
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
# Some gyp steps fail if these are empty(!), so we convert them to variables
generator_default_variables[dirname] = '$' + dirname
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = ''
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
# part of the path when dealing with generated headers. This value will be
# replaced dynamically for each configuration.
generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
'$SHARED_INTERMEDIATE_DIR'
def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
flavor = gyp.common.GetFlavor(params)
default_variables.setdefault('OS', flavor)
if flavor == 'win':
# Copy additional generator configuration data from VS, which is shared
# by the Eclipse generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
if generator_flags.get('adjust_static_libraries', False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
def GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs, config_name, params,
compiler_path):
"""Calculate the set of include directories to be used.
Returns:
A list including all the include_dir's specified for every target followed
by any include directories that were added as cflag compiler options.
"""
gyp_includes_set = set()
compiler_includes_list = []
# Find compiler's default include dirs.
if compiler_path:
command = shlex.split(compiler_path)
command.extend(['-E', '-xc++', '-v', '-'])
proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = proc.communicate()[1]
# Extract the list of include dirs from the output, which has this format:
# ...
# #include "..." search starts here:
# #include <...> search starts here:
# /usr/include/c++/4.6
# /usr/local/include
# End of search list.
# ...
in_include_list = False
for line in output.splitlines():
if line.startswith('#include'):
in_include_list = True
continue
if line.startswith('End of search list.'):
break
if in_include_list:
include_dir = line.strip()
if include_dir not in compiler_includes_list:
compiler_includes_list.append(include_dir)
flavor = gyp.common.GetFlavor(params)
if flavor == 'win':
generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
if config_name in target['configurations']:
config = target['configurations'][config_name]
# Look for any include dirs that were explicitly added via cflags. This
# may be done in gyp files to force certain includes to come at the end.
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
# remove this.
if flavor == 'win':
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
cflags = msvs_settings.GetCflags(config_name)
else:
cflags = config['cflags']
for cflag in cflags:
if cflag.startswith('-I'):
include_dir = cflag[2:]
if include_dir not in compiler_includes_list:
compiler_includes_list.append(include_dir)
# Find standard gyp include dirs.
if config.has_key('include_dirs'):
include_dirs = config['include_dirs']
for shared_intermediate_dir in shared_intermediate_dirs:
for include_dir in include_dirs:
include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
shared_intermediate_dir)
if not os.path.isabs(include_dir):
base_dir = os.path.dirname(target_name)
include_dir = base_dir + '/' + include_dir
include_dir = os.path.abspath(include_dir)
gyp_includes_set.add(include_dir)
# Generate a list that has all the include dirs.
all_includes_list = list(gyp_includes_set)
all_includes_list.sort()
for compiler_include in compiler_includes_list:
if not compiler_include in gyp_includes_set:
all_includes_list.append(compiler_include)
# All done.
return all_includes_list
def GetCompilerPath(target_list, data, options):
"""Determine a command that can be used to invoke the compiler.
Returns:
If this is a gyp project that has explicit make settings, try to determine
the compiler from that. Otherwise, see if a compiler was specified via the
CC_target environment variable.
"""
# First, see if the compiler is configured in make's settings.
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get('make_global_settings', {})
for key, value in make_global_settings_dict:
if key in ['CC', 'CXX']:
return os.path.join(options.toplevel_dir, value)
# Check to see if the compiler was specified as an environment variable.
for key in ['CC_target', 'CC', 'CXX']:
compiler = os.environ.get(key)
if compiler:
return compiler
return 'gcc'
def GetAllDefines(target_list, target_dicts, data, config_name, params,
compiler_path):
"""Calculate the defines for a project.
Returns:
A dict that includes explict defines declared in gyp files along with all of
the default defines that the compiler uses.
"""
# Get defines declared in the gyp files.
all_defines = {}
flavor = gyp.common.GetFlavor(params)
if flavor == 'win':
generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
if flavor == 'win':
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
extra_defines = msvs_settings.GetComputedDefines(config_name)
else:
extra_defines = []
if config_name in target['configurations']:
config = target['configurations'][config_name]
target_defines = config['defines']
else:
target_defines = []
for define in target_defines + extra_defines:
split_define = define.split('=', 1)
if len(split_define) == 1:
split_define.append('1')
if split_define[0].strip() in all_defines:
# Already defined
continue
all_defines[split_define[0].strip()] = split_define[1].strip()
# Get default compiler defines (if possible).
if flavor == 'win':
return all_defines # Default defines already processed in the loop above.
if compiler_path:
command = shlex.split(compiler_path)
command.extend(['-E', '-dM', '-'])
cpp_proc = subprocess.Popen(args=command, cwd='.',
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cpp_output = cpp_proc.communicate()[0]
cpp_lines = cpp_output.split('\n')
for cpp_line in cpp_lines:
if not cpp_line.strip():
continue
cpp_line_parts = cpp_line.split(' ', 2)
key = cpp_line_parts[1]
if len(cpp_line_parts) >= 3:
val = cpp_line_parts[2]
else:
val = '1'
all_defines[key] = val
return all_defines
def WriteIncludePaths(out, eclipse_langs, include_dirs):
"""Write the includes section of a CDT settings export file."""
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
'settingswizards.IncludePaths">\n')
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for include_dir in include_dirs:
out.write(' <includepath workspace_path="false">%s</includepath>\n' %
include_dir)
out.write(' </language>\n')
out.write(' </section>\n')
def WriteMacros(out, eclipse_langs, defines):
"""Write the macros section of a CDT settings export file."""
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
'settingswizards.Macros">\n')
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for key in sorted(defines.iterkeys()):
out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
(escape(key), escape(defines[key])))
out.write(' </language>\n')
out.write(' </section>\n')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
config_name)
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
os.path.join(toplevel_build, 'gen')]
GenerateCdtSettingsFile(target_list,
target_dicts,
data,
params,
config_name,
os.path.join(toplevel_build,
'eclipse-cdt-settings.xml'),
options,
shared_intermediate_dirs)
GenerateClasspathFile(target_list,
target_dicts,
options.toplevel_dir,
toplevel_build,
os.path.join(toplevel_build,
'eclipse-classpath.xml'))
def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
config_name, out_name, options,
shared_intermediate_dirs):
gyp.common.EnsureDirExists(out_name)
with open(out_name, 'w') as out:
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
out.write('<cdtprojectproperties>\n')
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
'GNU C++', 'GNU C', 'Assembly']
compiler_path = GetCompilerPath(target_list, data, options)
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs,
config_name, params, compiler_path)
WriteIncludePaths(out, eclipse_langs, include_dirs)
defines = GetAllDefines(target_list, target_dicts, data, config_name,
params, compiler_path)
WriteMacros(out, eclipse_langs, defines)
out.write('</cdtprojectproperties>\n')
def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
toplevel_build, out_name):
'''Generates a classpath file suitable for symbol navigation and code
completion of Java code (such as in Android projects) by finding all
.java and .jar files used as action inputs.'''
gyp.common.EnsureDirExists(out_name)
result = ET.Element('classpath')
def AddElements(kind, paths):
# First, we need to normalize the paths so they are all relative to the
# toplevel dir.
rel_paths = set()
for path in paths:
if os.path.isabs(path):
rel_paths.add(os.path.relpath(path, toplevel_dir))
else:
rel_paths.add(path)
for path in sorted(rel_paths):
entry_element = ET.SubElement(result, 'classpathentry')
entry_element.set('kind', kind)
entry_element.set('path', path)
AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
# Include the standard JRE container and a dummy out folder
AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
# Include a dummy out folder so that Eclipse doesn't use the default /bin
# folder in the root of the project.
AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
ET.ElementTree(result).write(out_name)
def GetJavaJars(target_list, target_dicts, toplevel_dir):
'''Generates a sequence of all .jars used as inputs.'''
for target_name in target_list:
target = target_dicts[target_name]
for action in target.get('actions', []):
for input_ in action['inputs']:
if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
if os.path.isabs(input_):
yield input_
else:
yield os.path.join(os.path.dirname(target_name), input_)
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
'''Generates a sequence of all likely java package root directories.'''
for target_name in target_list:
target = target_dicts[target_name]
for action in target.get('actions', []):
for input_ in action['inputs']:
if (os.path.splitext(input_)[1] == '.java' and
not input_.startswith('$')):
dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
input_))
# If there is a parent 'src' or 'java' folder, navigate up to it -
# these are canonical package root names in Chromium. This will
# break if 'src' or 'java' exists in the package structure. This
# could be further improved by inspecting the java file for the
# package name if this proves to be too fragile in practice.
parent_search = dir_
while os.path.basename(parent_search) not in ['src', 'java']:
parent_search, _ = os.path.split(parent_search)
if not parent_search or parent_search == toplevel_dir:
# Didn't find a known root, just return the original path
yield dir_
break
else:
yield parent_search
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate an XML settings file that can be imported into a CDT project."""
if params['options'].generator_output:
raise NotImplementedError("--generator_output not implemented for eclipse")
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| mit |
lmazuel/ansible | lib/ansible/module_utils/dellos10.py | 51 | 5728 | #
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
# (c) 2017 Red Hat, Inc
#
# Copyright (c) 2016 Dell Inc.
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import re
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network_common import to_list, ComplexList
from ansible.module_utils.connection import exec_command
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
_DEVICE_CONFIGS = {}
WARNING_PROMPTS_RE = [
r"[\r\n]?\[confirm yes/no\]:\s?$",
r"[\r\n]?\[y/n\]:\s?$",
r"[\r\n]?\[yes/no\]:\s?$"
]
dellos10_argument_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
'timeout': dict(type='int'),
'provider': dict(type='dict'),
}
def check_args(module, warnings):
provider = module.params['provider'] or {}
for key in dellos10_argument_spec:
if key != 'provider' and module.params[key]:
warnings.append('argument %s has been deprecated and will be '
'removed in a future version' % key)
if provider:
for param in ('auth_pass', 'password'):
if provider.get(param):
module.no_log_values.update(return_values(provider[param]))
def get_config(module, flags=[]):
cmd = 'show running-config '
cmd += ' '.join(flags)
cmd = cmd.strip()
try:
return _DEVICE_CONFIGS[cmd]
except KeyError:
rc, out, err = exec_command(module, cmd)
if rc != 0:
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_or_strict'))
cfg = to_text(out, errors='surrogate_or_strict').strip()
_DEVICE_CONFIGS[cmd] = cfg
return cfg
def to_commands(module, commands):
spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
transform = ComplexList(spec, module)
return transform(commands)
def run_commands(module, commands, check_rc=True):
responses = list()
commands = to_commands(module, to_list(commands))
for cmd in commands:
cmd = module.jsonify(cmd)
rc, out, err = exec_command(module, cmd)
if check_rc and rc != 0:
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), rc=rc)
responses.append(to_text(out, errors='surrogate_or_strict'))
return responses
def load_config(module, commands):
rc, out, err = exec_command(module, 'configure terminal')
if rc != 0:
module.fail_json(msg='unable to enter configuration mode', err=to_text(err, errors='surrogate_or_strict'))
commands.append('commit')
for command in to_list(commands):
if command == 'end':
continue
cmd = {'command': command, 'prompt': WARNING_PROMPTS_RE, 'answer': 'yes'}
rc, out, err = exec_command(module, module.jsonify(cmd))
if rc != 0:
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), command=command, rc=rc)
exec_command(module, 'end')
def get_sublevel_config(running_config, module):
contents = list()
current_config_contents = list()
running_config = NetworkConfig(contents=running_config, indent=1)
obj = running_config.get_object(module.params['parents'])
if obj:
contents = obj.children
contents[:0] = module.params['parents']
indent = 0
for c in contents:
if isinstance(c, str):
current_config_contents.append(c.rjust(len(c) + indent, ' '))
if isinstance(c, ConfigLine):
current_config_contents.append(c.raw)
indent = 1
sublevel_config = '\n'.join(current_config_contents)
return sublevel_config
| gpl-3.0 |
mancoast/CPythonPyc_test | fail/341_test_epoll.py | 79 | 8882 | # Copyright (c) 2001-2006 Twisted Matrix Laboratories.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Tests for epoll wrapper.
"""
import errno
import os
import select
import socket
import time
import unittest
from test import support
if not hasattr(select, "epoll"):
raise unittest.SkipTest("test works only on Linux 2.6")
try:
select.epoll()
except OSError as e:
if e.errno == errno.ENOSYS:
raise unittest.SkipTest("kernel doesn't support epoll()")
raise
class TestEPoll(unittest.TestCase):
def setUp(self):
self.serverSocket = socket.socket()
self.serverSocket.bind(('127.0.0.1', 0))
self.serverSocket.listen(1)
self.connections = [self.serverSocket]
def tearDown(self):
for skt in self.connections:
skt.close()
def _connected_pair(self):
client = socket.socket()
client.setblocking(False)
try:
client.connect(('127.0.0.1', self.serverSocket.getsockname()[1]))
except OSError as e:
self.assertEqual(e.args[0], errno.EINPROGRESS)
else:
raise AssertionError("Connect should have raised EINPROGRESS")
server, addr = self.serverSocket.accept()
self.connections.extend((client, server))
return client, server
def test_create(self):
try:
ep = select.epoll(16)
except OSError as e:
raise AssertionError(str(e))
self.assertTrue(ep.fileno() > 0, ep.fileno())
self.assertTrue(not ep.closed)
ep.close()
self.assertTrue(ep.closed)
self.assertRaises(ValueError, ep.fileno)
if hasattr(select, "EPOLL_CLOEXEC"):
select.epoll(select.EPOLL_CLOEXEC).close()
self.assertRaises(OSError, select.epoll, flags=12356)
def test_badcreate(self):
self.assertRaises(TypeError, select.epoll, 1, 2, 3)
self.assertRaises(TypeError, select.epoll, 'foo')
self.assertRaises(TypeError, select.epoll, None)
self.assertRaises(TypeError, select.epoll, ())
self.assertRaises(TypeError, select.epoll, ['foo'])
self.assertRaises(TypeError, select.epoll, {})
def test_context_manager(self):
with select.epoll(16) as ep:
self.assertGreater(ep.fileno(), 0)
self.assertFalse(ep.closed)
self.assertTrue(ep.closed)
self.assertRaises(ValueError, ep.fileno)
def test_add(self):
server, client = self._connected_pair()
ep = select.epoll(2)
try:
ep.register(server.fileno(), select.EPOLLIN | select.EPOLLOUT)
ep.register(client.fileno(), select.EPOLLIN | select.EPOLLOUT)
finally:
ep.close()
# adding by object w/ fileno works, too.
ep = select.epoll(2)
try:
ep.register(server, select.EPOLLIN | select.EPOLLOUT)
ep.register(client, select.EPOLLIN | select.EPOLLOUT)
finally:
ep.close()
ep = select.epoll(2)
try:
# TypeError: argument must be an int, or have a fileno() method.
self.assertRaises(TypeError, ep.register, object(),
select.EPOLLIN | select.EPOLLOUT)
self.assertRaises(TypeError, ep.register, None,
select.EPOLLIN | select.EPOLLOUT)
# ValueError: file descriptor cannot be a negative integer (-1)
self.assertRaises(ValueError, ep.register, -1,
select.EPOLLIN | select.EPOLLOUT)
# OSError: [Errno 9] Bad file descriptor
self.assertRaises(OSError, ep.register, 10000,
select.EPOLLIN | select.EPOLLOUT)
# registering twice also raises an exception
ep.register(server, select.EPOLLIN | select.EPOLLOUT)
self.assertRaises(OSError, ep.register, server,
select.EPOLLIN | select.EPOLLOUT)
finally:
ep.close()
def test_fromfd(self):
server, client = self._connected_pair()
ep = select.epoll(2)
ep2 = select.epoll.fromfd(ep.fileno())
ep2.register(server.fileno(), select.EPOLLIN | select.EPOLLOUT)
ep2.register(client.fileno(), select.EPOLLIN | select.EPOLLOUT)
events = ep.poll(1, 4)
events2 = ep2.poll(0.9, 4)
self.assertEqual(len(events), 2)
self.assertEqual(len(events2), 2)
ep.close()
try:
ep2.poll(1, 4)
except OSError as e:
self.assertEqual(e.args[0], errno.EBADF, e)
else:
self.fail("epoll on closed fd didn't raise EBADF")
def test_control_and_wait(self):
client, server = self._connected_pair()
ep = select.epoll(16)
ep.register(server.fileno(),
select.EPOLLIN | select.EPOLLOUT | select.EPOLLET)
ep.register(client.fileno(),
select.EPOLLIN | select.EPOLLOUT | select.EPOLLET)
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.1, then - now)
events.sort()
expected = [(client.fileno(), select.EPOLLOUT),
(server.fileno(), select.EPOLLOUT)]
expected.sort()
self.assertEqual(events, expected)
events = ep.poll(timeout=2.1, maxevents=4)
self.assertFalse(events)
client.send(b"Hello!")
server.send(b"world!!!")
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.01)
events.sort()
expected = [(client.fileno(), select.EPOLLIN | select.EPOLLOUT),
(server.fileno(), select.EPOLLIN | select.EPOLLOUT)]
expected.sort()
self.assertEqual(events, expected)
ep.unregister(client.fileno())
ep.modify(server.fileno(), select.EPOLLOUT)
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.01)
expected = [(server.fileno(), select.EPOLLOUT)]
self.assertEqual(events, expected)
def test_errors(self):
self.assertRaises(ValueError, select.epoll, -2)
self.assertRaises(ValueError, select.epoll().register, -1,
select.EPOLLIN)
def test_unregister_closed(self):
server, client = self._connected_pair()
fd = server.fileno()
ep = select.epoll(16)
ep.register(server)
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.01)
server.close()
ep.unregister(fd)
def test_close(self):
open_file = open(__file__, "rb")
self.addCleanup(open_file.close)
fd = open_file.fileno()
epoll = select.epoll()
# test fileno() method and closed attribute
self.assertIsInstance(epoll.fileno(), int)
self.assertFalse(epoll.closed)
# test close()
epoll.close()
self.assertTrue(epoll.closed)
self.assertRaises(ValueError, epoll.fileno)
# close() can be called more than once
epoll.close()
# operations must fail with ValueError("I/O operation on closed ...")
self.assertRaises(ValueError, epoll.modify, fd, select.EPOLLIN)
self.assertRaises(ValueError, epoll.poll, 1.0)
self.assertRaises(ValueError, epoll.register, fd, select.EPOLLIN)
self.assertRaises(ValueError, epoll.unregister, fd)
def test_fd_non_inheritable(self):
epoll = select.epoll()
self.addCleanup(epoll.close)
self.assertEqual(os.get_inheritable(epoll.fileno()), False)
def test_main():
support.run_unittest(TestEPoll)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
hchen1202/django-react | virtualenv/lib/python3.6/site-packages/django/db/migrations/operations/fields.py | 64 | 12272 | from __future__ import unicode_literals
from django.db.models.fields import NOT_PROVIDED
from django.utils.functional import cached_property
from .base import Operation
class FieldOperation(Operation):
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
@cached_property
def model_name_lower(self):
return self.model_name.lower()
@cached_property
def name_lower(self):
return self.name.lower()
def is_same_model_operation(self, operation):
return self.model_name_lower == operation.model_name_lower
def is_same_field_operation(self, operation):
return self.is_same_model_operation(operation) and self.name_lower == operation.name_lower
def references_model(self, name, app_label=None):
return name.lower() == self.model_name_lower
def references_field(self, model_name, name, app_label=None):
return self.references_model(model_name) and name.lower() == self.name_lower
def reduce(self, operation, in_between, app_label=None):
return (
super(FieldOperation, self).reduce(operation, in_between, app_label=app_label) or
not operation.references_field(self.model_name, self.name, app_label)
)
class AddField(FieldOperation):
"""
Adds a field to a model.
"""
def __init__(self, model_name, name, field, preserve_default=True):
self.field = field
self.preserve_default = preserve_default
super(AddField, self).__init__(model_name, name)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
# If preserve default is off, don't use the default for future state
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields.append((self.name, field))
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
field = to_model._meta.get_field(self.name)
if not self.preserve_default:
field.default = self.field.default
schema_editor.add_field(
from_model,
field,
)
if not self.preserve_default:
field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def describe(self):
return "Add field %s to %s" % (self.name, self.model_name)
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, FieldOperation) and self.is_same_field_operation(operation):
if isinstance(operation, AlterField):
return [
AddField(
model_name=self.model_name,
name=operation.name,
field=operation.field,
),
]
elif isinstance(operation, RemoveField):
return []
elif isinstance(operation, RenameField):
return [
AddField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super(AddField, self).reduce(operation, in_between, app_label=app_label)
class RemoveField(FieldOperation):
"""
Removes a field from a model.
"""
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
new_fields = []
for name, instance in state.models[app_label, self.model_name_lower].fields:
if name != self.name:
new_fields.append((name, instance))
state.models[app_label, self.model_name_lower].fields = new_fields
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.add_field(from_model, to_model._meta.get_field(self.name))
def describe(self):
return "Remove field %s from %s" % (self.name, self.model_name)
class AlterField(FieldOperation):
"""
Alters a field's database column (e.g. null, max_length) to the provided new field
"""
def __init__(self, model_name, name, field, preserve_default=True):
self.field = field
self.preserve_default = preserve_default
super(AlterField, self).__init__(model_name, name)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields = [
(n, field if n == self.name else f)
for n, f in
state.models[app_label, self.model_name_lower].fields
]
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
from_field = from_model._meta.get_field(self.name)
to_field = to_model._meta.get_field(self.name)
if not self.preserve_default:
to_field.default = self.field.default
schema_editor.alter_field(from_model, from_field, to_field)
if not self.preserve_default:
to_field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Alter field %s on %s" % (self.name, self.model_name)
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, RemoveField) and self.is_same_field_operation(operation):
return [operation]
elif isinstance(operation, RenameField) and self.is_same_field_operation(operation):
return [
operation,
AlterField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super(AlterField, self).reduce(operation, in_between, app_label=app_label)
class RenameField(FieldOperation):
"""
Renames a field on the model. Might affect db_column too.
"""
def __init__(self, model_name, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super(RenameField, self).__init__(model_name, old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'old_name': self.old_name,
'new_name': self.new_name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
# Rename the field
state.models[app_label, self.model_name_lower].fields = [
(self.new_name if n == self.old_name else n, f)
for n, f in state.models[app_label, self.model_name_lower].fields
]
# Fix index/unique_together to refer to the new field
options = state.models[app_label, self.model_name_lower].options
for option in ('index_together', 'unique_together'):
if option in options:
options[option] = [
[self.new_name if n == self.old_name else n for n in together]
for together in options[option]
]
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.old_name),
to_model._meta.get_field(self.new_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.new_name),
to_model._meta.get_field(self.old_name),
)
def describe(self):
return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name)
def references_field(self, model_name, name, app_label=None):
return self.references_model(model_name) and (
name.lower() == self.old_name_lower or
name.lower() == self.new_name_lower
)
def reduce(self, operation, in_between, app_label=None):
if (isinstance(operation, RenameField) and
self.is_same_model_operation(operation) and
self.new_name_lower == operation.old_name_lower):
return [
RenameField(
self.model_name,
self.old_name,
operation.new_name,
),
]
# Skip `FieldOperation.reduce` as we want to run `references_field`
# against self.new_name.
return (
super(FieldOperation, self).reduce(operation, in_between, app_label=app_label) or
not operation.references_field(self.model_name, self.new_name, app_label)
)
| mit |
jandersson/website | lib/flask/globals.py | 783 | 1137 | # -*- coding: utf-8 -*-
"""
flask.globals
~~~~~~~~~~~~~
Defines all the global objects that are proxies to the current
active context.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from functools import partial
from werkzeug.local import LocalStack, LocalProxy
def _lookup_req_object(name):
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('working outside of request context')
return getattr(top, name)
def _lookup_app_object(name):
top = _app_ctx_stack.top
if top is None:
raise RuntimeError('working outside of application context')
return getattr(top, name)
def _find_app():
top = _app_ctx_stack.top
if top is None:
raise RuntimeError('working outside of application context')
return top.app
# context locals
_request_ctx_stack = LocalStack()
_app_ctx_stack = LocalStack()
current_app = LocalProxy(_find_app)
request = LocalProxy(partial(_lookup_req_object, 'request'))
session = LocalProxy(partial(_lookup_req_object, 'session'))
g = LocalProxy(partial(_lookup_app_object, 'g'))
| apache-2.0 |
TRESCLOUD/odoo | addons/base_status/__init__.py | 61 | 1087 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base_state
import base_stage
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
stackforge/tricircle | setup.py | 154 | 1030 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
| apache-2.0 |
wjw12/emc | density.py | 1 | 3126 | from particle import *
@mlab.show
def show(a):
mlab.pipeline.volume(mlab.pipeline.scalar_field(a))
@mlab.show
def drawp(points):
mlab.points3d(points[:,0],points[:,1],points[:,2])
def test0():
# rotate a point
a = 17
r = 8
p = np.ones((a,a))
m = np.zeros((a,a,a))
m[r,:,:] = p
new_m = np.zeros((a,a,a))
points = fibonacciSphere(500)
rot = []
p0 = points[0]
eye = np.eye(3)
for p1 in points[1:]:
v = np.cross(p0,p1)
s = np.linalg.norm(v) # sin of vectors
c = np.dot(p0,p1) # cos of vectors
v = makeCrossMatrix(v)
vv = (1-c)/(s*s) * np.dot(v,v)
rot_mat = np.eye(3) + v + vv
rot.append(rot_mat)
ro_points = np.zeros((500,3))
ro_points[0] = p0
for i in range(1,500):
p_ = np.dot(rot[i-1], p0)
ro_points[i] = p_
drawp(ro_points)
def test():
import matplotlib.pyplot as plt
# rotate a plane, without rotating around an axis
a = 17
r = 8
p = np.ones((a,a))
m = np.zeros((a,a,a))
m[:,r,:] = p
new_m = np.zeros((a,a,a))
points = fibonacciSphere(500)
rot = []
eye = np.eye(3)
p0 = points[0]
for p1 in points[1:]:
v = np.cross(p0,p1)
s = np.linalg.norm(v) # sin of vectors
c = np.dot(p0,p1) # cos of vectors
v = makeCrossMatrix(v)
vv = (1-c)/(s*s) * np.dot(v,v)
rot_mat = np.eye(3) + v + vv
rot.append(rot_mat)
show(m)
# compress all planes of random directions by simply adding them together
c = 0
for i in rot:
displace = np.array([r,r,r])
offset = -np.dot(i,displace) + displace
mm = affine_transform(m,i,offset)
new_m += mm
c += 1
show(new_m)
y = new_m[r,r,:]
x = [i for i in range(len(y))]
plt.plot(x,y)
plt.show()
def test__():
a = 17
r = 8
p = np.ones((a,a))
m = np.zeros((a,a,a))
m[:,r,:] = p
new_m = np.zeros((a,a,a))
points = fibonacciSphere(200)
rot = []
ang = 50
sin_value = np.sin(np.linspace(0, 2*np.pi, ang))
cos_value = np.cos(np.linspace(0, 2*np.pi, ang)) # store the values to reuse
eye = np.eye(3)
for p1 in points:
k = makeCrossMatrix(p1)
kk = np.dot(k,k)
for i in range(ang):
rot.append(eye + k*sin_value[i] + kk*(1-cos_value[i]))
# compress all planes of random directions by simply adding them together
for i in rot:
displace = np.array([r,r,r])
offset = -np.dot(i,displace) + displace
mm = affine_transform(m,i,offset,order=5)
new_m += mm
show(m)
show(new_m)
def test2(n):
from numpy.linalg import qr
a = 17
R = 8
p = np.ones((a,a))
m = np.zeros((a,a,a))
m[:,R,:] = p
new_m = np.zeros((a,a,a))
for i in range(n):
q, r = qr(np.random.randn(3,3))
d = np.diagonal(r)
d = d/np.abs(d)
q = np.multiply(q,d)
displace = np.array([R,R,R])
offset = -np.dot(q,displace) + displace
mm = affine_transform(m,q,offset,order=5)
new_m += mm
show(new_m) | gpl-2.0 |
cwisecarver/osf.io | api_tests/registrations/views/test_withdrawn_registrations.py | 3 | 7970 | from urlparse import urlparse
from api_tests.nodes.views.test_node_contributors_list import NodeCRUDTestCase
from nose.tools import * # flake8: noqa
from api.base.settings.defaults import API_BASE
from framework.auth.core import Auth
from tests.base import fake
from osf_tests.factories import (
ProjectFactory,
CommentFactory,
RegistrationFactory,
WithdrawnRegistrationFactory,
)
class TestWithdrawnRegistrations(NodeCRUDTestCase):
def setUp(self):
super(TestWithdrawnRegistrations, self).setUp()
self.registration = RegistrationFactory(creator=self.user, project=self.public_project)
self.withdrawn_registration = WithdrawnRegistrationFactory(registration=self.registration, user=self.registration.creator)
self.public_pointer_project = ProjectFactory(is_public=True)
self.public_pointer = self.public_project.add_pointer(self.public_pointer_project,
auth=Auth(self.user),
save=True)
self.withdrawn_url = '/{}registrations/{}/?version=2.2'.format(API_BASE, self.registration._id)
self.withdrawn_registration.justification = 'We made a major error.'
self.withdrawn_registration.save()
def test_can_access_withdrawn_contributors(self):
url = '/{}registrations/{}/contributors/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_access_withdrawn_children(self):
url = '/{}registrations/{}/children/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_comments(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_comment = CommentFactory(node=self.public_project, user=self.user)
url = '/{}registrations/{}/comments/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_can_access_withdrawn_contributor_detail(self):
url = '/{}registrations/{}/contributors/{}/'.format(API_BASE, self.registration._id, self.user._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_return_a_withdrawn_registration_at_node_detail_endpoint(self):
url = '/{}nodes/{}/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_cannot_delete_a_withdrawn_registration(self):
url = '/{}registrations/{}/'.format(API_BASE, self.registration._id)
res = self.app.delete_json_api(url, auth=self.user.auth, expect_errors=True)
self.registration.reload()
assert_equal(res.status_code, 405)
def test_cannot_access_withdrawn_files_list(self):
url = '/{}registrations/{}/files/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_detail(self):
url = '/{}registrations/{}/node_links/{}/'.format(API_BASE, self.registration._id, self.public_pointer._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_list(self):
url = '/{}registrations/{}/node_links/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_logs(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
url = '/{}registrations/{}/logs/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_registrations_list(self):
self.registration.save()
url = '/{}registrations/{}/registrations/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_withdrawn_registrations_display_limited_fields(self):
registration = self.registration
res = self.app.get(self.withdrawn_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
attributes = res.json['data']['attributes']
registration.reload()
expected_attributes = {
'title': registration.title,
'description': registration.description,
'date_created': registration.date_created.isoformat().replace('+00:00', 'Z'),
'date_registered': registration.registered_date.isoformat().replace('+00:00', 'Z'),
'date_modified': registration.date_modified.isoformat().replace('+00:00', 'Z'),
'date_withdrawn': registration.retraction.date_retracted.isoformat().replace('+00:00', 'Z'),
'withdrawal_justification': registration.retraction.justification,
'public': None,
'category': None,
'registration': True,
'fork': None,
'collection': None,
'tags': None,
'withdrawn': True,
'pending_withdrawal': None,
'pending_registration_approval': None,
'pending_embargo_approval': None,
'embargo_end_date': None,
'registered_meta': None,
'current_user_permissions': None,
'registration_supplement': registration.registered_schema.first().name
}
for attribute in expected_attributes:
assert_equal(expected_attributes[attribute], attributes[attribute])
contributors = urlparse(res.json['data']['relationships']['contributors']['links']['related']['href']).path
assert_equal(contributors, '/{}registrations/{}/contributors/'.format(API_BASE, registration._id))
assert_not_in('children', res.json['data']['relationships'])
assert_not_in('comments', res.json['data']['relationships'])
assert_not_in('node_links', res.json['data']['relationships'])
assert_not_in('registrations', res.json['data']['relationships'])
assert_not_in('parent', res.json['data']['relationships'])
assert_not_in('forked_from', res.json['data']['relationships'])
assert_not_in('files', res.json['data']['relationships'])
assert_not_in('logs', res.json['data']['relationships'])
assert_not_in('registered_by', res.json['data']['relationships'])
assert_not_in('registered_from', res.json['data']['relationships'])
assert_not_in('root', res.json['data']['relationships'])
def test_field_specific_related_counts_ignored_if_hidden_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=children'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_not_in('children', res.json['data']['relationships'])
assert_in('contributors', res.json['data']['relationships'])
def test_field_specific_related_counts_retrieved_if_visible_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=contributors'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['relationships']['contributors']['links']['related']['meta']['count'], 1)
| apache-2.0 |
outboxafrica/pimaa | PiMaa/sensors/lib/GrovePi/grove_moisture_sensor.py | 2 | 2286 | #!/usr/bin/env python
#
# GrovePi Example for using the Grove Moisture Sensor (http://www.seeedstudio.com/wiki/Grove_-_Moisture_sensor)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://forum.dexterindustries.com/c/grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
# NOTE:
# The wiki suggests the following sensor values:
# Min Typ Max Condition
# 0 0 0 sensor in open air
# 0 20 300 sensor in dry soil
# 300 580 700 sensor in humid soil
# 700 940 950 sensor in water
# Sensor values observer:
# Val Condition
# 0 sensor in open air
# 18 sensor in dry soil
# 425 sensor in humid soil
# 690 sensor in water
import time
import grovepi
# Connect the Grove Moisture Sensor to analog port A0
# SIG,NC,VCC,GND
sensor = 0
while True:
try:
print(grovepi.analogRead(sensor))
time.sleep(.5)
except KeyboardInterrupt:
break
except IOError:
print ("Error")
| gpl-3.0 |
Rctue/nao-lib | gestures/Happy3.py | 1 | 11061 | # Choregraphe bezier export in Python.
from naoqi import ALProxy
names = list()
times = list()
keys = list()
names.append("HeadPitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.51487, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.51487, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.51487, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.38746, [ 3, -1.10000, 0.12741], [ 3, 0.06667, -0.00772]], [ 0.02793, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("HeadYaw")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.00464, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.00464, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.00464, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.00464, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.00464, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LAnklePitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.07052, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.07052, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.07052, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.07052, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.07052, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LAnkleRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.10734, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.10734, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.10734, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.10734, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.10734, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LElbowRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -1.05418, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -1.05418, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.52018, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.52018, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.52018, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LElbowYaw")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -1.23317, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -1.23317, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -1.20776, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -1.20776, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -1.20776, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LHand")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.00401, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.00405, [ 3, -0.06667, -0.00004], [ 3, 0.06667, 0.00004]], [ 0.00436, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.00436, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.00436, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LHipPitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.20406, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.20406, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.20406, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.20406, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.20406, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LHipRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.11202, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.11202, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.11202, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.11202, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.11202, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LHipYawPitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.16103, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.16103, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.16103, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.16103, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.16103, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LKneePitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.09055, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.09055, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.09055, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.09055, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.09055, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LShoulderPitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 1.62490, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 1.22348, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 1.72000, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 1.72000, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 1.72000, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LShoulderRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.36652, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.36652, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.27247, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.27247, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.27247, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("LWristYaw")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.59690, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.59690, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.09376, [ 3, -0.06667, 0.00568], [ 3, 1.10000, -0.09376]], [ 0.00000, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.00000, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RAnklePitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.06294, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.06294, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.06294, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.06294, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.06294, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RAnkleRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.06600, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.06600, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.06600, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.06600, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.06600, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RElbowRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.79936, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.79936, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.40653, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.40653, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.40653, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RElbowYaw")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.94771, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.94771, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.95059, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.95059, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.95059, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RHand")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.00716, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.00716, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.00716, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.00716, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.00716, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RHipPitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.18864, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.18864, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.18864, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.18864, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.18864, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RHipRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.06745, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.06745, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.06745, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.06745, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.06745, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RHipYawPitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.16103, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.16103, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.16103, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.16103, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.16103, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RKneePitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.07359, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.07359, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.07359, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.07359, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.07359, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RShoulderPitch")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 1.42419, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 1.02102, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 1.49937, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 1.49937, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 1.49937, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RShoulderRoll")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ -0.50091, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.50091, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.18200, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ -0.18200, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ -0.18200, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
names.append("RWristYaw")
times.append([ 0.90000, 1.10000, 1.30000, 4.60000, 4.80000])
keys.append([ [ 0.23213, [ 3, -0.30000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.23213, [ 3, -0.06667, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.26529, [ 3, -0.06667, 0.00000], [ 3, 1.10000, 0.00000]], [ 0.26529, [ 3, -1.10000, 0.00000], [ 3, 0.06667, 0.00000]], [ 0.26529, [ 3, -0.06667, 0.00000], [ 3, 0.00000, 0.00000]]])
try:
# uncomment the following line and modify the IP if you use this script outside Choregraphe.
# motion = ALProxy("ALMotion", IP, 9559)
motion = ALProxy("ALMotion")
motion.angleInterpolationBezier(names, times, keys);
except BaseException, err:
print err
| gpl-2.0 |
stefanv/scipy3 | scipy/weave/catalog.py | 2 | 29261 | """ Track relationships between compiled extension functions & code fragments
catalog keeps track of which compiled(or even standard) functions are
related to which code fragments. It also stores these relationships
to disk so they are remembered between Python sessions. When
a = 1
compiler.inline('printf("printed from C: %d",a);',['a'] )
is called, inline() first looks to see if it has seen the code
'printf("printed from C");' before. If not, it calls
catalog.get_functions('printf("printed from C: %d", a);')
which returns a list of all the function objects that have been compiled
for the code fragment. Multiple functions can occur because the code
could be compiled for different types for 'a' (although not likely in
this case). The catalog first looks in its cache and quickly returns
a list of the functions if possible. If the cache lookup fails, it then
looks through possibly multiple catalog files on disk and fills its
cache with all the functions that match the code fragment.
In case where the code fragment hasn't been compiled, inline() compiles
the code and then adds it to the catalog:
function = <code to compile function>
catalog.add_function('printf("printed from C: %d", a);',function)
add_function() adds function to the front of the cache. function,
along with the path information to its module, are also stored in a
persistent catalog for future use by python sessions.
"""
import os
import sys
import pickle
import socket
import tempfile
try:
import dbhash
import shelve
dumb = 0
except ImportError:
import scipy.io.dumb_shelve as shelve
dumb = 1
#For testing...
#import scipy.io.dumb_shelve as shelve
#dumb = 1
#import shelve
#dumb = 0
def getmodule(object):
""" Discover the name of the module where object was defined.
This is an augmented version of inspect.getmodule that can discover
the parent module for extension functions.
"""
import inspect
value = inspect.getmodule(object)
if value is None:
#walk trough all modules looking for function
for name,mod in sys.modules.items():
# try except used because of some comparison failures
# in wxPoint code. Need to review this
try:
if mod and object in mod.__dict__.values():
value = mod
# if it is a built-in module, keep looking to see
# if a non-builtin also has it. Otherwise quit and
# consider the module found. (ain't perfect, but will
# have to do for now).
if str(mod) not in '(built-in)':
break
except (TypeError, KeyError, ImportError):
pass
return value
def expr_to_filename(expr):
""" Convert an arbitrary expr string to a valid file name.
The name is based on the md5 check sum for the string and
Something that was a little more human readable would be
nice, but the computer doesn't seem to care.
"""
import scipy.weave.md5_load as md5
base = 'sc_'
return base + md5.new(expr).hexdigest()
def unique_file(d,expr):
""" Generate a unqiue file name based on expr in directory d
This is meant for use with building extension modules, so
a file name is considered unique if none of the following
extension '.cpp','.o','.so','module.so','.py', or '.pyd'
exists in directory d. The fully qualified path to the
new name is returned. You'll need to append your own
extension to it before creating files.
"""
files = os.listdir(d)
#base = 'scipy_compile'
base = expr_to_filename(expr)
for i in xrange(1000000):
fname = base + `i`
if not (fname+'.cpp' in files or
fname+'.o' in files or
fname+'.so' in files or
fname+'module.so' in files or
fname+'.py' in files or
fname+'.pyd' in files):
break
return os.path.join(d,fname)
def create_dir(p):
""" Create a directory and any necessary intermediate directories."""
if not os.path.exists(p):
try:
os.mkdir(p)
except OSError:
# perhaps one or more intermediate path components don't exist
# try to create them
base,dir = os.path.split(p)
create_dir(base)
# don't enclose this one in try/except - we want the user to
# get failure info
os.mkdir(p)
def is_writable(dir):
"""Determine whether a given directory is writable in a portable manner.
:Parameters:
- dir: string
A string represeting a path to a directory on the filesystem.
:Returns:
True or False.
"""
# Do NOT use a hardcoded name here due to the danger from race conditions
# on NFS when multiple processes are accessing the same base directory in
# parallel. We use both hostname and pocess id for the prefix in an
# attempt to ensure that there can really be no name collisions (tempfile
# appends 6 random chars to this prefix).
prefix = 'dummy_%s_%s_' % (socket.gethostname(),os.getpid())
try:
tmp = tempfile.TemporaryFile(prefix=prefix,dir=dir)
except OSError:
return False
# The underlying file is destroyed upon closing the file object (under
# *nix, it was unlinked at creation time)
tmp.close()
return True
def whoami():
"""return a string identifying the user."""
return os.environ.get("USER") or os.environ.get("USERNAME") or "unknown"
def default_dir():
""" Return a default location to store compiled files and catalogs.
XX is the Python version number in all paths listed below
On windows, the default location is the temporary directory
returned by gettempdir()/pythonXX.
On Unix, ~/.pythonXX_compiled is the default location. If it doesn't
exist, it is created. The directory is marked rwx------.
If for some reason it isn't possible to build a default directory
in the user's home, /tmp/<uid>_pythonXX_compiled is used. If it
doesn't exist, it is created. The directory is marked rwx------
to try and keep people from being able to sneak a bad module
in on you.
"""
# Use a cached value for fast return if possible
if hasattr(default_dir,"cached_path") and \
os.path.exists(default_dir.cached_path) and \
os.access(default_dir.cached_path, os.W_OK):
return default_dir.cached_path
python_name = "python%d%d_compiled" % tuple(sys.version_info[:2])
path_candidates = []
if sys.platform != 'win32':
try:
path_candidates.append(os.path.join(os.environ['HOME'],
'.' + python_name))
except KeyError:
pass
temp_dir = `os.getuid()` + '_' + python_name
path_candidates.append(os.path.join(tempfile.gettempdir(), temp_dir))
else:
path_candidates.append(os.path.join(tempfile.gettempdir(),
"%s" % whoami(), python_name))
writable = False
for path in path_candidates:
if not os.path.exists(path):
create_dir(path)
os.chmod(path, 0700) # make it only accessible by this user.
if is_writable(path):
writable = True
break
if not writable:
print 'warning: default directory is not write accessible.'
print 'default:', path
# Cache the default dir path so that this function returns quickly after
# being called once (nothing in it should change after the first call)
default_dir.cached_path = path
return path
def intermediate_dir():
""" Location in temp dir for storing .cpp and .o files during
builds.
"""
python_name = "python%d%d_intermediate" % tuple(sys.version_info[:2])
path = os.path.join(tempfile.gettempdir(),"%s"%whoami(),python_name)
if not os.path.exists(path):
create_dir(path)
return path
def default_temp_dir():
path = os.path.join(default_dir(),'temp')
if not os.path.exists(path):
create_dir(path)
os.chmod(path,0700) # make it only accessible by this user.
if not is_writable(path):
print 'warning: default directory is not write accessible.'
print 'default:', path
return path
def os_dependent_catalog_name():
""" Generate catalog name dependent on OS and Python version being used.
This allows multiple platforms to have catalog files in the
same directory without stepping on each other. For now, it
bases the name of the value returned by sys.platform and the
version of python being run. If this isn't enough to descriminate
on some platforms, we can try to add other info. It has
occured to me that if we get fancy enough to optimize for different
architectures, then chip type might be added to the catalog name also.
"""
version = '%d%d' % sys.version_info[:2]
return sys.platform+version+'compiled_catalog'
def catalog_path(module_path):
""" Return the full path name for the catalog file in the given directory.
module_path can either be a file name or a path name. If it is a
file name, the catalog file name in its parent directory is returned.
If it is a directory, the catalog file in that directory is returned.
If module_path doesn't exist, None is returned. Note though, that the
catalog file does *not* have to exist, only its parent. '~', shell
variables, and relative ('.' and '..') paths are all acceptable.
catalog file names are os dependent (based on sys.platform), so this
should support multiple platforms sharing the same disk space
(NFS mounts). See os_dependent_catalog_name() for more info.
"""
module_path = os.path.expanduser(module_path)
module_path = os.path.expandvars(module_path)
module_path = os.path.abspath(module_path)
if not os.path.exists(module_path):
catalog_file = None
elif not os.path.isdir(module_path):
module_path,dummy = os.path.split(module_path)
catalog_file = os.path.join(module_path,os_dependent_catalog_name())
else:
catalog_file = os.path.join(module_path,os_dependent_catalog_name())
return catalog_file
def get_catalog(module_path,mode='r'):
""" Return a function catalog (shelve object) from the path module_path
If module_path is a directory, the function catalog returned is
from that directory. If module_path is an actual module_name,
then the function catalog returned is from its parent directory.
mode uses the standard 'c' = create, 'n' = new, 'r' = read,
'w' = write file open modes available for anydbm databases.
Well... it should be. Stuck with dumbdbm for now and the modes
almost don't matter. We do some checking for 'r' mode, but that
is about it.
See catalog_path() for more information on module_path.
"""
if mode not in ['c','r','w','n']:
msg = " mode must be 'c', 'n', 'r', or 'w'. See anydbm for more info"
raise ValueError, msg
catalog_file = catalog_path(module_path)
if (catalog_file is not None) \
and ((dumb and os.path.exists(catalog_file+'.dat')) \
or os.path.exists(catalog_file)):
sh = shelve.open(catalog_file,mode)
else:
if mode=='r':
sh = None
else:
sh = shelve.open(catalog_file,mode)
return sh
class catalog(object):
""" Stores information about compiled functions both in cache and on disk.
catalog stores (code, list_of_function) pairs so that all the functions
that have been compiled for code are available for calling (usually in
inline or blitz).
catalog keeps a dictionary of previously accessed code values cached
for quick access. It also handles the looking up of functions compiled
in previously called Python sessions on disk in function catalogs.
catalog searches the directories in the PYTHONCOMPILED environment
variable in order loading functions that correspond to the given code
fragment. A default directory is also searched for catalog functions.
On unix, the default directory is usually '~/.pythonxx_compiled' where
xx is the version of Python used. On windows, it is the directory
returned by temfile.gettempdir(). Functions closer to the front are of
the variable list are guaranteed to be closer to the front of the
function list so that they will be called first. See
get_cataloged_functions() for more info on how the search order is
traversed.
Catalog also handles storing information about compiled functions to
a catalog. When writing this information, the first writable catalog
file in PYTHONCOMPILED path is used. If a writable catalog is not
found, it is written to the catalog in the default directory. This
directory should always be writable.
"""
def __init__(self,user_path_list=None):
""" Create a catalog for storing/searching for compiled functions.
user_path_list contains directories that should be searched
first for function catalogs. They will come before the path
entries in the PYTHONCOMPILED environment varilable.
"""
if isinstance(user_path_list, str):
self.user_path_list = [user_path_list]
elif user_path_list:
self.user_path_list = user_path_list
else:
self.user_path_list = []
self.cache = {}
self.module_dir = None
self.paths_added = 0
# unconditionally append the default dir for auto-generated compiled
# extension modules, so that pickle.load()s don't fail.
sys.path.append(default_dir())
def set_module_directory(self,module_dir):
""" Set the path that will replace 'MODULE' in catalog searches.
You should call clear_module_directory() when your finished
working with it.
"""
self.module_dir = module_dir
def get_module_directory(self):
""" Return the path used to replace the 'MODULE' in searches.
"""
return self.module_dir
def clear_module_directory(self):
""" Reset 'MODULE' path to None so that it is ignored in searches.
"""
self.module_dir = None
def get_environ_path(self):
""" Return list of paths from 'PYTHONCOMPILED' environment variable.
On Unix the path in PYTHONCOMPILED is a ':' separated list of
directories. On Windows, a ';' separated list is used.
"""
paths = []
if 'PYTHONCOMPILED' in os.environ:
path_string = os.environ['PYTHONCOMPILED']
paths = path_string.split(os.path.pathsep)
return paths
def build_search_order(self):
""" Returns a list of paths that are searched for catalogs.
Values specified in the catalog constructor are searched first,
then values found in the PYTHONCOMPILED environment variable.
The directory returned by default_dir() is always returned at
the end of the list.
There is a 'magic' path name called 'MODULE' that is replaced
by the directory defined by set_module_directory(). If the
module directory hasn't been set, 'MODULE' is ignored.
"""
paths = self.user_path_list + self.get_environ_path()
search_order = []
for path in paths:
if path == 'MODULE':
if self.module_dir:
search_order.append(self.module_dir)
else:
search_order.append(path)
search_order.append(default_dir())
return search_order
def get_catalog_files(self):
""" Returns catalog file list in correct search order.
Some of the catalog files may not currently exists.
However, all will be valid locations for a catalog
to be created (if you have write permission).
"""
files = map(catalog_path,self.build_search_order())
files = filter(lambda x: x is not None,files)
return files
def get_existing_files(self):
""" Returns all existing catalog file list in correct search order.
"""
files = self.get_catalog_files()
# open every stinking file to check if it exists.
# This is because anydbm doesn't provide a consistent naming
# convention across platforms for its files
existing_files = []
for file in files:
cat = get_catalog(os.path.dirname(file),'r')
if cat is not None:
existing_files.append(file)
cat.close()
# This is the non-portable (and much faster) old code
#existing_files = filter(os.path.exists,files)
return existing_files
def get_writable_file(self,existing_only=0):
""" Return the name of the first writable catalog file.
Its parent directory must also be writable. This is so that
compiled modules can be written to the same directory.
"""
# note: both file and its parent directory must be writeable
if existing_only:
files = self.get_existing_files()
else:
files = self.get_catalog_files()
# filter for (file exists and is writable) OR directory is writable
def file_test(x):
from os import access, F_OK, W_OK
return (access(x,F_OK) and access(x,W_OK) or
access(os.path.dirname(x),W_OK))
writable = filter(file_test,files)
if writable:
file = writable[0]
else:
file = None
return file
def get_writable_dir(self):
""" Return the parent directory of first writable catalog file.
The returned directory has write access.
"""
return os.path.dirname(self.get_writable_file())
def unique_module_name(self,code,module_dir=None):
""" Return full path to unique file name that in writable location.
The directory for the file is the first writable directory in
the catalog search path. The unique file name is derived from
the code fragment. If, module_dir is specified, it is used
to replace 'MODULE' in the search path.
"""
if module_dir is not None:
self.set_module_directory(module_dir)
try:
d = self.get_writable_dir()
finally:
if module_dir is not None:
self.clear_module_directory()
return unique_file(d, code)
def path_key(self,code):
""" Return key for path information for functions associated with code.
"""
return '__path__' + code
def configure_path(self,cat,code):
""" Add the python path for the given code to the sys.path
unconfigure_path() should be called as soon as possible after
imports associated with code are finished so that sys.path
is restored to normal.
"""
try:
paths = cat[self.path_key(code)]
self.paths_added = len(paths)
sys.path = paths + sys.path
except:
self.paths_added = 0
def unconfigure_path(self):
""" Restores sys.path to normal after calls to configure_path()
Remove the previously added paths from sys.path
"""
sys.path = sys.path[self.paths_added:]
self.paths_added = 0
def get_cataloged_functions(self,code):
""" Load all functions associated with code from catalog search path.
Sometimes there can be trouble loading a function listed in a
catalog file because the actual module that holds the function
has been moved or deleted. When this happens, that catalog file
is "repaired", meaning the entire entry for this function is
removed from the file. This only affects the catalog file that
has problems -- not the others in the search path.
The "repair" behavior may not be needed, but I'll keep it for now.
"""
mode = 'r'
cat = None
function_list = []
for path in self.build_search_order():
cat = get_catalog(path,mode)
if cat is not None and code in cat:
# set up the python path so that modules for this
# function can be loaded.
self.configure_path(cat,code)
try:
function_list += cat[code]
except: #SystemError and ImportError so far seen
# problems loading a function from the catalog. Try to
# repair the cause.
cat.close()
self.repair_catalog(path,code)
self.unconfigure_path()
if cat is not None:
# ensure that the catalog is properly closed
cat.close()
return function_list
def repair_catalog(self,catalog_path,code):
""" Remove entry for code from catalog_path
Occasionally catalog entries could get corrupted. An example
would be when a module that had functions in the catalog was
deleted or moved on the disk. The best current repair method is
just to trash the entire catalog entry for this piece of code.
This may loose function entries that are valid, but thats life.
catalog_path must be writable for repair. If it isn't, the
function exists with a warning.
"""
writable_cat = None
if (catalog_path is not None) and (not os.path.exists(catalog_path)):
return
try:
writable_cat = get_catalog(catalog_path,'w')
except:
print 'warning: unable to repair catalog entry\n %s\n in\n %s' % \
(code,catalog_path)
# shelve doesn't guarantee flushing, so it's safest to explicitly
# close the catalog
writable_cat.close()
return
if code in writable_cat:
print 'repairing catalog by removing key'
del writable_cat[code]
# it is possible that the path key doesn't exist (if the function
# registered was a built-in function), so we have to check if the path
# exists before arbitrarily deleting it.
path_key = self.path_key(code)
if path_key in writable_cat:
del writable_cat[path_key]
writable_cat.close()
def get_functions_fast(self,code):
""" Return list of functions for code from the cache.
Return an empty list if the code entry is not found.
"""
return self.cache.get(code,[])
def get_functions(self,code,module_dir=None):
""" Return the list of functions associated with this code fragment.
The cache is first searched for the function. If an entry
in the cache is not found, then catalog files on disk are
searched for the entry. This is slooooow, but only happens
once per code object. All the functions found in catalog files
on a cache miss are loaded into the cache to speed up future calls.
The search order is as follows:
1. user specified path (from catalog initialization)
2. directories from the PYTHONCOMPILED environment variable
3. The temporary directory on your platform.
The path specified by module_dir will replace the 'MODULE'
place holder in the catalog search path. See build_search_order()
for more info on the search path.
"""
# Fast!! try cache first.
if code in self.cache:
return self.cache[code]
# 2. Slow!! read previously compiled functions from disk.
try:
self.set_module_directory(module_dir)
function_list = self.get_cataloged_functions(code)
# put function_list in cache to save future lookups.
if function_list:
self.cache[code] = function_list
# return function_list, empty or otherwise.
finally:
self.clear_module_directory()
return function_list
def add_function(self,code,function,module_dir=None):
""" Adds a function to the catalog.
The function is added to the cache as well as the first
writable file catalog found in the search path. If no
code entry exists in the cache, the on disk catalogs
are loaded into the cache and function is added to the
beginning of the function list.
The path specified by module_dir will replace the 'MODULE'
place holder in the catalog search path. See build_search_order()
for more info on the search path.
"""
# 1. put it in the cache.
if code in self.cache:
if function not in self.cache[code]:
self.cache[code].insert(0,function)
else:
# if it is in the cache, then it is also
# been persisted
return
else:
# Load functions and put this one up front
self.cache[code] = self.get_functions(code)
self.fast_cache(code,function)
# 2. Store the function entry to disk.
try:
self.set_module_directory(module_dir)
self.add_function_persistent(code,function)
finally:
self.clear_module_directory()
def add_function_persistent(self,code,function):
""" Store the code->function relationship to disk.
Two pieces of information are needed for loading functions
from disk -- the function pickle (which conveniently stores
the module name, etc.) and the path to its module's directory.
The latter is needed so that the function can be loaded no
matter what the user's Python path is.
"""
# add function to data in first writable catalog
mode = 'c' # create if doesn't exist, otherwise, use existing
cat_dir = self.get_writable_dir()
cat = get_catalog(cat_dir,mode)
if cat is None:
cat_dir = default_dir()
cat = get_catalog(cat_dir,mode)
if cat is None:
cat_dir = default_dir()
cat_file = catalog_path(cat_dir)
print 'problems with default catalog -- removing'
import glob
files = glob.glob(cat_file+'*')
for f in files:
os.remove(f)
cat = get_catalog(cat_dir,mode)
if cat is None:
raise ValueError, 'Failed to access a catalog for storing functions'
# Prabhu was getting some corrupt catalog errors. I'll put a try/except
# to protect against this, but should really try and track down the issue.
function_list = [function]
try:
function_list = function_list + cat.get(code,[])
except pickle.UnpicklingError:
pass
cat[code] = function_list
# now add needed path information for loading function
module = getmodule(function)
try:
# built in modules don't have the __file__ extension, so this
# will fail. Just pass in this case since path additions aren't
# needed for built-in modules.
mod_path,f=os.path.split(os.path.abspath(module.__file__))
pkey = self.path_key(code)
cat[pkey] = [mod_path] + cat.get(pkey,[])
except:
pass
cat.close()
def fast_cache(self,code,function):
""" Move function to the front of the cache entry for code
If future calls to the function have the same type signature,
this will speed up access significantly because the first
function call is correct.
Note: The cache added to the inline_tools module is significantly
faster than always calling get_functions, so this isn't
as necessary as it used to be. Still, it's probably worth
doing.
"""
try:
if self.cache[code][0] == function:
return
except: # KeyError, IndexError
pass
try:
self.cache[code].remove(function)
except ValueError:
pass
# put new function at the beginning of the list to search.
self.cache[code].insert(0,function)
| bsd-3-clause |
willingc/zulip | zerver/management/commands/rate_limit.py | 115 | 2756 | from __future__ import absolute_import
from zerver.models import UserProfile, get_user_profile_by_email
from zerver.lib.rate_limiter import block_user, unblock_user
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = """Manually block or unblock a user from accessing the API"""
def add_arguments(self, parser):
parser.add_argument('-e', '--email',
dest='email',
help="Email account of user.")
parser.add_argument('-a', '--api-key',
dest='api_key',
help="API key of user.")
parser.add_argument('-s', '--seconds',
dest='seconds',
default=60,
type=int,
help="Seconds to block for.")
parser.add_argument('-d', '--domain',
dest='domain',
default='all',
help="Rate-limiting domain. Defaults to 'all'.")
parser.add_argument('-b', '--all-bots',
dest='bots',
action='store_true',
default=False,
help="Whether or not to also block all bots for this user.")
parser.add_argument('operation', metavar='<operation>', type=str, choices=['block', 'unblock'],
help="operation to perform (block or unblock)")
def handle(self, *args, **options):
if (not options['api_key'] and not options['email']) or \
(options['api_key'] and options['email']):
print "Please enter either an email or API key to manage"
exit(1)
if options['email']:
user_profile = get_user_profile_by_email(options['email'])
else:
try:
user_profile = UserProfile.objects.get(api_key=options['api_key'])
except:
print "Unable to get user profile for api key %s" % (options['api_key'], )
exit(1)
users = [user_profile]
if options['bots']:
users.extend(bot for bot in UserProfile.objects.filter(is_bot=True,
bot_owner=user_profile))
operation = options['operation']
for user in users:
print "Applying operation to User ID: %s: %s" % (user.id, operation)
if operation == 'block':
block_user(user, options['seconds'], options['domain'])
elif operation == 'unblock':
unblock_user(user, options['domain'])
| apache-2.0 |
liuzzfnst/tp-libvirt | libvirt/tests/src/virsh_cmd/filter/virsh_nwfilter_dumpxml.py | 7 | 3551 | import logging
from autotest.client.shared import error
from virttest import virsh, libvirt_xml
from provider import libvirt_version
def check_list(uuid, name):
"""
Return True if filter found in nwfilter-list
:param uuid: filter uuid
:param name: filter name
:return: True if found, False if not found
"""
cmd_result = virsh.nwfilter_list(options="",
ignore_status=True, debug=True)
output = cmd_result.stdout.strip().split('\n')
for i in range(2, len(output)):
if output[i].split() == [uuid, name]:
return True
return False
def run(test, params, env):
"""
Test command: virsh nwfilter-dumpxml.
1) Prepare parameters.
2) Run dumpxml command.
3) Check result.
"""
# Prepare parameters
filter_name = params.get("dumpxml_filter_name", "")
options_ref = params.get("dumpxml_options_ref", "")
status_error = params.get("status_error", "no")
# acl polkit params
uri = params.get("virsh_uri")
unprivileged_user = params.get('unprivileged_user')
if unprivileged_user:
if unprivileged_user.count('EXAMPLE'):
unprivileged_user = 'testacl'
if not libvirt_version.version_compare(1, 1, 1):
if params.get('setup_libvirt_polkit') == 'yes':
raise error.TestNAError("API acl test not supported in current"
" libvirt version.")
virsh_dargs = {'ignore_status': True, 'debug': True}
if params.get('setup_libvirt_polkit') == 'yes':
virsh_dargs['unprivileged_user'] = unprivileged_user
virsh_dargs['uri'] = uri
# Run command
cmd_result = virsh.nwfilter_dumpxml(filter_name, options=options_ref,
**virsh_dargs)
output = cmd_result.stdout.strip()
status = cmd_result.exit_status
# Check result
if status_error == "yes":
if status == 0:
raise error.TestFail("Run successfully with wrong command.")
elif status_error == "no":
if status:
raise error.TestFail("Run failed with right command.")
# Get uuid and name from output xml and compare with nwfilter-list
# output
new_filter = libvirt_xml.NwfilterXML()
new_filter['xml'] = output
uuid = new_filter.uuid
name = new_filter.filter_name
if check_list(uuid, name):
logging.debug("The filter with uuid %s and name %s" % (uuid, name) +
" from nwfilter-dumpxml was found in"
" nwfilter-list output")
else:
raise error.TestFail("The uuid %s with name %s from" % (uuid, name) +
" nwfilter-dumpxml did not match with"
" nwfilter-list output")
# Run command second time with uuid
cmd_result = virsh.nwfilter_dumpxml(uuid, options=options_ref,
**virsh_dargs)
output1 = cmd_result.stdout.strip()
status1 = cmd_result.exit_status
if status_error == "yes":
if status1 == 0:
raise error.TestFail("Run successfully with wrong command.")
elif status_error == "no":
if status1:
raise error.TestFail("Run failed with right command.")
if output1 != output:
raise error.TestFail("nwfilter dumpxml output was different" +
" between using filter uuid and name")
| gpl-2.0 |
mrbox/django | django/contrib/gis/db/backends/postgis/adapter.py | 373 | 1695 | """
This object provides quoting for GEOS geometries into PostgreSQL/PostGIS.
"""
from __future__ import unicode_literals
from psycopg2 import Binary
from psycopg2.extensions import ISQLQuote
class PostGISAdapter(object):
def __init__(self, geom, geography=False):
"Initializes on the geometry."
# Getting the WKB (in string form, to allow easy pickling of
# the adaptor) and the SRID from the geometry.
self.ewkb = bytes(geom.ewkb)
self.srid = geom.srid
self.geography = geography
self._adapter = Binary(self.ewkb)
def __conform__(self, proto):
# Does the given protocol conform to what Psycopg2 expects?
if proto == ISQLQuote:
return self
else:
raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?')
def __eq__(self, other):
if not isinstance(other, PostGISAdapter):
return False
return (self.ewkb == other.ewkb) and (self.srid == other.srid)
def __hash__(self):
return hash((self.ewkb, self.srid))
def __str__(self):
return self.getquoted()
def prepare(self, conn):
"""
This method allows escaping the binary in the style required by the
server's `standard_conforming_string` setting.
"""
self._adapter.prepare(conn)
def getquoted(self):
"Returns a properly quoted string for use in PostgreSQL/PostGIS."
# psycopg will figure out whether to use E'\\000' or '\000'
return str('%s(%s)' % (
'ST_GeogFromWKB' if self.geography else 'ST_GeomFromEWKB',
self._adapter.getquoted().decode())
)
| bsd-3-clause |
fnkr/POSS | config.dist.py | 1 | 1144 | import sys
import os
# Debug mode
DEBUG = False
# Enable debug mode if --debug
for argument in sys.argv:
if argument == '--debug':
DEBUG = True
# Host, Port
HOST = '127.0.0.1'
PORT = 8080
# Domain
SERVER_NAME = 'localhost:8080'
# Database
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://poss:poss@localhost/poss'
SQLALCHEMY_ECHO = DEBUG
DATABASE_CONNECT_OPTIONS = {}
# Login, Cookie and Session settings
CSRF_ENABLED = True
# Random strings for cookie generation, 40 chars should be enough,
# https://api.fnkr.net/random/?length=40&count=2
CSRF_SESSION_KEY = '--- CHANGE THIS TO SOME RANDOM VALUE ---'
SECRET_KEY = '--- CHANGE THIS TO SOME RANDOM VALUE ---'
SESSION_COOKIE_NAME = 'poss'
PERMANENT_SESSION_LIFETIME = 2678400
SESSION_COOKIE_SECURE = False
# URL scheme that should be used for URL generation
# if no URL scheme is available
PREFERRED_URL_SCHEME = 'http'
# Data storage, will be created if it does not exist
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
# Define the application directory
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
# Config file version
CONFIG_VERSION = 1
| mit |
commshare/testLiveSRS | trunk/objs/CherryPy-3.2.4/cherrypy/wsgiserver/ssl_pyopenssl.py | 51 | 9377 | """A library for integrating pyOpenSSL with CherryPy.
The OpenSSL module must be importable for SSL functionality.
You can obtain it from http://pyopenssl.sourceforge.net/
To use this module, set CherryPyWSGIServer.ssl_adapter to an instance of
SSLAdapter. There are two ways to use SSL:
Method One
----------
* ``ssl_adapter.context``: an instance of SSL.Context.
If this is not None, it is assumed to be an SSL.Context instance,
and will be passed to SSL.Connection on bind(). The developer is
responsible for forming a valid Context object. This approach is
to be preferred for more flexibility, e.g. if the cert and key are
streams instead of files, or need decryption, or SSL.SSLv3_METHOD
is desired instead of the default SSL.SSLv23_METHOD, etc. Consult
the pyOpenSSL documentation for complete options.
Method Two (shortcut)
---------------------
* ``ssl_adapter.certificate``: the filename of the server SSL certificate.
* ``ssl_adapter.private_key``: the filename of the server's private key file.
Both are None by default. If ssl_adapter.context is None, but .private_key
and .certificate are both given and valid, they will be read, and the
context will be automatically created from them.
"""
import socket
import threading
import time
from cherrypy import wsgiserver
try:
from OpenSSL import SSL
from OpenSSL import crypto
except ImportError:
SSL = None
class SSL_fileobject(wsgiserver.CP_fileobject):
"""SSL file object attached to a socket object."""
ssl_timeout = 3
ssl_retry = .01
def _safe_call(self, is_reader, call, *args, **kwargs):
"""Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
errnum = e.args[0]
if is_reader and errnum in wsgiserver.socket_errors_to_ignore:
return ""
raise socket.error(errnum)
except SSL.Error, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise wsgiserver.NoSSLError()
raise wsgiserver.FatalSSLAlert(*e.args)
except:
raise
if time.time() - start > self.ssl_timeout:
raise socket.timeout("timed out")
def recv(self, *args, **kwargs):
buf = []
r = super(SSL_fileobject, self).recv
while True:
data = self._safe_call(True, r, *args, **kwargs)
buf.append(data)
p = self._sock.pending()
if not p:
return "".join(buf)
def sendall(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).sendall,
*args, **kwargs)
def send(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).send,
*args, **kwargs)
class SSLConnection:
"""A thread-safe wrapper for an SSL.Connection.
``*args``: the arguments to create the wrapped ``SSL.Connection(*args)``.
"""
def __init__(self, *args):
self._ssl_conn = SSL.Connection(*args)
self._lock = threading.RLock()
for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
'renegotiate', 'bind', 'listen', 'connect', 'accept',
'setblocking', 'fileno', 'close', 'get_cipher_list',
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
'makefile', 'get_app_data', 'set_app_data', 'state_string',
'sock_shutdown', 'get_peer_certificate', 'want_read',
'want_write', 'set_connect_state', 'set_accept_state',
'connect_ex', 'sendall', 'settimeout', 'gettimeout'):
exec("""def %s(self, *args):
self._lock.acquire()
try:
return self._ssl_conn.%s(*args)
finally:
self._lock.release()
""" % (f, f))
def shutdown(self, *args):
self._lock.acquire()
try:
# pyOpenSSL.socket.shutdown takes no args
return self._ssl_conn.shutdown()
finally:
self._lock.release()
class pyOpenSSLAdapter(wsgiserver.SSLAdapter):
"""A wrapper for integrating pyOpenSSL with CherryPy."""
context = None
"""An instance of SSL.Context."""
certificate = None
"""The filename of the server SSL certificate."""
private_key = None
"""The filename of the server's private key file."""
certificate_chain = None
"""Optional. The filename of CA's intermediate certificate bundle.
This is needed for cheaper "chained root" SSL certificates, and should be
left as None if not required."""
def __init__(self, certificate, private_key, certificate_chain=None):
if SSL is None:
raise ImportError("You must install pyOpenSSL to use HTTPS.")
self.context = None
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
self._environ = None
def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
return sock, self._environ.copy()
def get_context(self):
"""Return an SSL.Context from self attributes."""
# See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c
def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
"HTTPS": "on",
# pyOpenSSL doesn't provide access to any of these AFAICT
## 'SSL_PROTOCOL': 'SSLv2',
## SSL_CIPHER string The cipher specification name
## SSL_VERSION_INTERFACE string The mod_ssl program version
## SSL_VERSION_LIBRARY string The OpenSSL program version
}
if self.certificate:
# Server certificate attributes
cert = open(self.certificate, 'rb').read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
## 'SSL_SERVER_V_START': Validity of server's certificate (start time),
## 'SSL_SERVER_V_END': Validity of server's certificate (end time),
})
for prefix, dn in [("I", cert.get_issuer()),
("S", cert.get_subject())]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind("=")
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind("/")
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ
def makefile(self, sock, mode='r', bufsize=-1):
if SSL and isinstance(sock, SSL.ConnectionType):
timeout = sock.gettimeout()
f = SSL_fileobject(sock, mode, bufsize)
f.ssl_timeout = timeout
return f
else:
return wsgiserver.CP_fileobject(sock, mode, bufsize)
| mit |
cxhernandez/osprey | osprey/tests/test_cli_worker_and_dump.py | 1 | 2065 | from __future__ import print_function, absolute_import, division
import os
import sys
import json
import shutil
import subprocess
import tempfile
from distutils.spawn import find_executable
from numpy.testing.decorators import skipif
try:
__import__('msmbuilder')
HAVE_MSMBUILDER = True
except:
HAVE_MSMBUILDER = False
OSPREY_BIN = find_executable('osprey')
@skipif(not HAVE_MSMBUILDER, 'this test requires MSMBuilder')
def test_1():
from msmbuilder.example_datasets import FsPeptide
assert OSPREY_BIN is not None
cwd = os.path.abspath(os.curdir)
dirname = tempfile.mkdtemp()
FsPeptide(dirname).get()
try:
os.chdir(dirname)
subprocess.check_call([OSPREY_BIN, 'skeleton', '-t', 'msmbuilder',
'-f', 'config.yaml'])
subprocess.check_call([OSPREY_BIN, 'worker', 'config.yaml', '-n', '1'])
assert os.path.exists('osprey-trials.db')
yield _test_dump_1
yield _test_plot_1
finally:
os.chdir(cwd)
shutil.rmtree(dirname)
def test_2():
assert OSPREY_BIN is not None
cwd = os.path.abspath(os.curdir)
dirname = tempfile.mkdtemp()
try:
os.chdir(dirname)
subprocess.check_call([OSPREY_BIN, 'skeleton', '-t', 'sklearn',
'-f', 'config.yaml'])
subprocess.check_call([OSPREY_BIN, 'worker', 'config.yaml', '-n', '1'])
assert os.path.exists('osprey-trials.db')
subprocess.check_call([OSPREY_BIN, 'current_best', 'config.yaml'])
yield _test_dump_1
yield _test_plot_1
finally:
os.chdir(cwd)
shutil.rmtree(dirname)
def _test_dump_1():
out = subprocess.check_output(
[OSPREY_BIN, 'dump', 'config.yaml', '-o', 'json'])
if sys.version_info >= (3, 0):
out = out.decode()
json.loads(out)
def _test_plot_1():
out = subprocess.check_output(
[OSPREY_BIN, 'plot', 'config.yaml', '--no-browser'])
if not os.path.isfile('./plot.html'):
raise ValueError('Plot not created')
| apache-2.0 |
bopo/django-allauth | allauth/socialaccount/providers/angellist/views.py | 80 | 1056 | import requests
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from .provider import AngelListProvider
class AngelListOAuth2Adapter(OAuth2Adapter):
provider_id = AngelListProvider.id
access_token_url = 'https://angel.co/api/oauth/token/'
authorize_url = 'https://angel.co/api/oauth/authorize/'
profile_url = 'https://api.angel.co/1/me/'
supports_state = False
def complete_login(self, request, app, token, **kwargs):
resp = requests.get(self.profile_url,
params={'access_token': token.token})
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(AngelListOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(AngelListOAuth2Adapter)
| mit |
myriadteam/myriadcoin | test/functional/tool_wallet.py | 6 | 3831 | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoin-wallet."""
import subprocess
import textwrap
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class ToolWalletTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def bitcoin_wallet_process(self, *args):
binary = self.config["environment"]["BUILDDIR"] + '/src/bitcoin-wallet' + self.config["environment"]["EXEEXT"]
args = ['-datadir={}'.format(self.nodes[0].datadir), '-regtest'] + list(args)
return subprocess.Popen([binary] + args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
def assert_raises_tool_error(self, error, *args):
p = self.bitcoin_wallet_process(*args)
stdout, stderr = p.communicate()
assert_equal(p.poll(), 1)
assert_equal(stdout, '')
assert_equal(stderr.strip(), error)
def assert_tool_output(self, output, *args):
p = self.bitcoin_wallet_process(*args)
stdout, stderr = p.communicate()
assert_equal(p.poll(), 0)
assert_equal(stderr, '')
assert_equal(stdout, output)
def run_test(self):
self.assert_raises_tool_error('Invalid command: foo', 'foo')
# `bitcoin-wallet help` is an error. Use `bitcoin-wallet -help`
self.assert_raises_tool_error('Invalid command: help', 'help')
self.assert_raises_tool_error('Error: two methods provided (info and create). Only one method should be provided.', 'info', 'create')
self.assert_raises_tool_error('Error parsing command line arguments: Invalid parameter -foo', '-foo')
self.assert_raises_tool_error('Error loading wallet.dat. Is wallet being used by other process?', '-wallet=wallet.dat', 'info')
self.assert_raises_tool_error('Error: no wallet file at nonexistent.dat', '-wallet=nonexistent.dat', 'info')
# stop the node to close the wallet to call info command
self.stop_node(0)
out = textwrap.dedent('''\
Wallet info
===========
Encrypted: no
HD (hd seed available): yes
Keypool Size: 2
Transactions: 0
Address Book: 3
''')
self.assert_tool_output(out, '-wallet=wallet.dat', 'info')
# mutate the wallet to check the info command output changes accordingly
self.start_node(0)
self.nodes[0].generate(1)
self.stop_node(0)
out = textwrap.dedent('''\
Wallet info
===========
Encrypted: no
HD (hd seed available): yes
Keypool Size: 2
Transactions: 1
Address Book: 3
''')
self.assert_tool_output(out, '-wallet=wallet.dat', 'info')
out = textwrap.dedent('''\
Topping up keypool...
Wallet info
===========
Encrypted: no
HD (hd seed available): yes
Keypool Size: 2000
Transactions: 0
Address Book: 0
''')
self.assert_tool_output(out, '-wallet=foo', 'create')
self.start_node(0, ['-wallet=foo'])
out = self.nodes[0].getwalletinfo()
self.stop_node(0)
assert_equal(0, out['txcount'])
assert_equal(1000, out['keypoolsize'])
assert_equal(1000, out['keypoolsize_hd_internal'])
assert_equal(True, 'hdseedid' in out)
if __name__ == '__main__':
ToolWalletTest().main()
| mit |
jsirois/pex | pex/vendor/_vendored/pip/pip/_vendor/six.py | 59 | 34159 | # Copyright (c) 2010-2020 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Utilities for writing code that runs on Python 2 and 3"""
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.15.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
del io
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
_assertNotRegex = "assertNotRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
def assertNotRegex(self, *args, **kwargs):
return getattr(self, _assertNotRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
try:
raise tp, value, tb
finally:
tb = None
""")
if sys.version_info[:2] > (3,):
exec_("""def raise_from(value, from_value):
try:
raise value from from_value
finally:
value = None
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
# This does exactly the same what the :func:`py3:functools.update_wrapper`
# function does on Python versions after 3.2. It sets the ``__wrapped__``
# attribute on ``wrapper`` object and it doesn't raise an error if any of
# the attributes mentioned in ``assigned`` and ``updated`` are missing on
# ``wrapped`` object.
def _update_wrapper(wrapper, wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
for attr in assigned:
try:
value = getattr(wrapped, attr)
except AttributeError:
continue
else:
setattr(wrapper, attr, value)
for attr in updated:
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
wrapper.__wrapped__ = wrapped
return wrapper
_update_wrapper.__doc__ = functools.update_wrapper.__doc__
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
return functools.partial(_update_wrapper, wrapped=wrapped,
assigned=assigned, updated=updated)
wraps.__doc__ = functools.wraps.__doc__
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
if sys.version_info[:2] >= (3, 7):
# This version introduced PEP 560 that requires a bit
# of extra care (we mimic what is done by __build_class__).
resolved_bases = types.resolve_bases(bases)
if resolved_bases is not bases:
d['__orig_bases__'] = bases
else:
resolved_bases = bases
return meta(name, resolved_bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
return meta.__prepare__(name, bases)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
if hasattr(cls, '__qualname__'):
orig_vars['__qualname__'] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def ensure_binary(s, encoding='utf-8', errors='strict'):
"""Coerce **s** to six.binary_type.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> encoded to `bytes`
- `bytes` -> `bytes`
"""
if isinstance(s, binary_type):
return s
if isinstance(s, text_type):
return s.encode(encoding, errors)
raise TypeError("not expecting type '%s'" % type(s))
def ensure_str(s, encoding='utf-8', errors='strict'):
"""Coerce *s* to `str`.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
# Optimization: Fast return for the common case.
if type(s) is str:
return s
if PY2 and isinstance(s, text_type):
return s.encode(encoding, errors)
elif PY3 and isinstance(s, binary_type):
return s.decode(encoding, errors)
elif not isinstance(s, (text_type, binary_type)):
raise TypeError("not expecting type '%s'" % type(s))
return s
def ensure_text(s, encoding='utf-8', errors='strict'):
"""Coerce *s* to six.text_type.
For Python 2:
- `unicode` -> `unicode`
- `str` -> `unicode`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
if isinstance(s, binary_type):
return s.decode(encoding, errors)
elif isinstance(s, text_type):
return s
else:
raise TypeError("not expecting type '%s'" % type(s))
def python_2_unicode_compatible(klass):
"""
A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| apache-2.0 |
Ernesto99/odoo | addons/hr_recruitment/__init__.py | 433 | 1145 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_recruitment
import report
import wizard
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Zyell/home-assistant | tests/components/thermostat/test_heat_control.py | 8 | 7976 | """The tests for the heat control thermostat."""
import unittest
from homeassistant.bootstrap import _setup_component
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
STATE_OFF,
TEMP_CELSIUS,
)
from homeassistant.components import thermostat
from tests.common import get_test_home_assistant
ENTITY = 'thermostat.test'
ENT_SENSOR = 'sensor.test'
ENT_SWITCH = 'switch.test'
MIN_TEMP = 3.0
MAX_TEMP = 65.0
TARGET_TEMP = 42.0
class TestSetupThermostatHeatControl(unittest.TestCase):
"""Test the Heat Control thermostat with custom config."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_missing_conf(self):
"""Test set up heat_control with missing config values."""
config = {
'name': 'test',
'target_sensor': ENT_SENSOR
}
self.assertFalse(_setup_component(self.hass, 'thermostat', {
'thermostat': config}))
def test_valid_conf(self):
"""Test set up heat_control with valid config values."""
self.assertTrue(_setup_component(self.hass, 'thermostat',
{'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR}}))
def test_setup_with_sensor(self):
"""Test set up heat_control with sensor to trigger update at init."""
self.hass.states.set(ENT_SENSOR, 22.0, {
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS
})
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR
}})
state = self.hass.states.get(ENTITY)
self.assertEqual(
TEMP_CELSIUS, state.attributes.get('unit_of_measurement'))
self.assertEqual(22.0, state.attributes.get('current_temperature'))
class TestThermostatHeatControl(unittest.TestCase):
"""Test the Heat Control thermostat."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config.temperature_unit = TEMP_CELSIUS
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR
}})
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_defaults_to_unknown(self):
"""Test the setting of defaults to unknown."""
self.assertEqual('unknown', self.hass.states.get(ENTITY).state)
def test_default_setup_params(self):
"""Test the setup with default parameters."""
state = self.hass.states.get(ENTITY)
self.assertEqual(7, state.attributes.get('min_temp'))
self.assertEqual(35, state.attributes.get('max_temp'))
self.assertEqual(None, state.attributes.get('temperature'))
def test_custom_setup_params(self):
"""Test the setup with custom parameters."""
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR,
'min_temp': MIN_TEMP,
'max_temp': MAX_TEMP,
'target_temp': TARGET_TEMP
}})
state = self.hass.states.get(ENTITY)
self.assertEqual(MIN_TEMP, state.attributes.get('min_temp'))
self.assertEqual(MAX_TEMP, state.attributes.get('max_temp'))
self.assertEqual(TARGET_TEMP, state.attributes.get('temperature'))
self.assertEqual(str(TARGET_TEMP), self.hass.states.get(ENTITY).state)
def test_set_target_temp(self):
"""Test the setting of the target temperature."""
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self.assertEqual('30.0', self.hass.states.get(ENTITY).state)
def test_sensor_bad_unit(self):
"""Test sensor that have bad unit."""
self._setup_sensor(22.0, unit='bad_unit')
self.hass.pool.block_till_done()
state = self.hass.states.get(ENTITY)
self.assertEqual(None, state.attributes.get('unit_of_measurement'))
self.assertEqual(None, state.attributes.get('current_temperature'))
def test_sensor_bad_value(self):
"""Test sensor that have None as state."""
self._setup_sensor(None)
self.hass.pool.block_till_done()
state = self.hass.states.get(ENTITY)
self.assertEqual(None, state.attributes.get('unit_of_measurement'))
self.assertEqual(None, state.attributes.get('current_temperature'))
def test_set_target_temp_heater_on(self):
"""Test if target temperature turn heater on."""
self._setup_switch(False)
self._setup_sensor(25)
self.hass.pool.block_till_done()
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_ON, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_set_target_temp_heater_off(self):
"""Test if target temperature turn heater off."""
self._setup_switch(True)
self._setup_sensor(30)
self.hass.pool.block_till_done()
thermostat.set_temperature(self.hass, 25)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_OFF, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_set_temp_change_heater_on(self):
"""Test if temperature change turn heater on."""
self._setup_switch(False)
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self._setup_sensor(25)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_ON, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_temp_change_heater_off(self):
"""Test if temperature change turn heater off."""
self._setup_switch(True)
thermostat.set_temperature(self.hass, 25)
self.hass.pool.block_till_done()
self._setup_sensor(30)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_OFF, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def _setup_sensor(self, temp, unit=TEMP_CELSIUS):
"""Setup the test sensor."""
self.hass.states.set(ENT_SENSOR, temp, {
ATTR_UNIT_OF_MEASUREMENT: unit
})
def _setup_switch(self, is_on):
"""Setup the test switch."""
self.hass.states.set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF)
self.calls = []
def log_call(call):
"""Log service calls."""
self.calls.append(call)
self.hass.services.register('switch', SERVICE_TURN_ON, log_call)
self.hass.services.register('switch', SERVICE_TURN_OFF, log_call)
| mit |
dostavro/dotfiles | sublime2/Packages/Package Control/package_control/clients/readme_client.py | 9 | 2434 | import re
import os
import base64
try:
# Python 3
from urllib.parse import urlencode
except (ImportError):
# Python 2
from urllib import urlencode
from .json_api_client import JSONApiClient
from ..downloaders.downloader_exception import DownloaderException
# Used to map file extensions to formats
_readme_formats = {
'.md': 'markdown',
'.mkd': 'markdown',
'.mdown': 'markdown',
'.markdown': 'markdown',
'.textile': 'textile',
'.creole': 'creole',
'.rst': 'rst'
}
class ReadmeClient(JSONApiClient):
def readme_info(self, url):
"""
Retrieve the readme and info about it
:param url:
The URL of the readme file
:raises:
DownloaderException: if there is an error downloading the readme
ClientException: if there is an error parsing the response
:return:
A dict with the following keys:
`filename`
`format` - `markdown`, `textile`, `creole`, `rst` or `txt`
`contents` - contents of the readme as str/unicode
"""
contents = None
# Try to grab the contents of a GitHub-based readme by grabbing the cached
# content of the readme API call
github_match = re.match('https://raw.github.com/([^/]+/[^/]+)/([^/]+)/readme(\.(md|mkd|mdown|markdown|textile|creole|rst|txt))?$', url, re.I)
if github_match:
user_repo = github_match.group(1)
branch = github_match.group(2)
query_string = urlencode({'ref': branch})
readme_json_url = 'https://api.github.com/repos/%s/readme?%s' % (user_repo, query_string)
try:
info = self.fetch_json(readme_json_url, prefer_cached=True)
contents = base64.b64decode(info['content'])
except (ValueError) as e:
pass
if not contents:
contents = self.fetch(url)
basename, ext = os.path.splitext(url)
format = 'txt'
ext = ext.lower()
if ext in _readme_formats:
format = _readme_formats[ext]
try:
contents = contents.decode('utf-8')
except (UnicodeDecodeError) as e:
contents = contents.decode('cp1252', errors='replace')
return {
'filename': os.path.basename(url),
'format': format,
'contents': contents
}
| mit |
oaeproject/oae-fabric | fabfile/cluster/search/__init__.py | 1 | 4757 | from fabric.api import env, runs_once, settings, task
from fabric.tasks import execute
from .. import hosts as cluster_hosts, util as cluster_util
from ... import apt, puppet, search, hilary
__all__ = ["upgrade", "upgrade_host"]
@runs_once
@task
def upgrade(delete_index=False, rebuild_index=False, uninstall=True):
"""Runs through a general upgrade procedure for all known search nodes.
This will:
1. Ask for a password with which to sudo. All servers must have
the same sudo passowrd
2. Stop puppet on the search nodes
3. Perform a git pull on the puppet node to get the latest
configuration
4. If `delete_index` was `True`, delete the search index
5. Unless `uninstall` was `False`, uninstall ElasticSearch
6. Do a full search cluster shut down
7. Run puppet on each search node
8. Bring the cluster back up
9. If `rebuild_index` was `True`, restart an app node to recreate the
search index. This will not reindex all data, though
"""
cluster_util.ensure_sudo_pass()
# Stop puppet on the search nodes
with settings(hosts=cluster_hosts.search(), parallel=True):
execute(puppet.stop, force=True)
# Pull the updated puppet configuration
with settings(hosts=[cluster_hosts.puppet()], parallel=True):
execute(puppet.git_update)
# If we're deleting the data, clear index
if delete_index:
with settings(hosts=[cluster_hosts.search()[0]]):
execute(search.delete_index, index_name=search_index_name())
# Uninstall ElasticSearch if the option has not been disabled
if uninstall:
with settings(hosts=cluster_hosts.search(), parallel=True):
execute(search.uninstall)
execute(apt.update)
# Bring the full cluster down. We bring the full cluster down as a rule
# since ElasticSearch has gossip, sometimes upgrades can require that all
# nodes come back gossiping on the same version
with settings(hosts=cluster_hosts.search(), parallel=True):
execute(search.stop)
# Run puppet on the search nodes and ensure they come back up
with settings(hosts=cluster_hosts.search(), parallel=True):
execute(puppet.run, force=False)
execute(search.start)
# If we deleted the search index, bounce an app server so it will recreate
# the index and its mappings
if rebuild_index:
with settings(hosts=[cluster_hosts.app()[0]]):
execute(hilary.stop)
execute(hilary.start)
execute(hilary.wait_until_ready)
# Start puppet on the search nodes
with settings(hosts=cluster_hosts.search(), parallel=True):
execute(puppet.start)
@runs_once
@task
def upgrade_host(delete_index=False, rebuild_index=False, uninstall=True,
hilary_reboot_host="pp0"):
"""Run through the general upgrade procedure for a search node, assuming
puppet has already been updated.
This will:
1. Ask for a password with which to sudo
2. Forcefully stop any current puppet runs
3. If `delete_index` was enabled, the search index will be deleted
4. If `uninstall` was not disabled, uninstall ElasticSearch
5. Ensure the search service is stopped
6. Run puppet to re-install and start the search service
7. Ensure the search service is started
8. If `rebuild_index` was enabled, the Hilary node as set by
`hilary_reboot_host` will have its Hilary service restarted to
let it rebuild the search index
9. Start the puppet service
"""
cluster_util.ensure_sudo_pass()
# Stop puppet on the search node
execute(puppet.stop, force=True)
# If we're deleting the data, clear the index
if delete_index:
execute(search.delete_index, index_name=search_index_name())
# Uninstall ElasticSearch if the option has not been disabled
if uninstall:
execute(search.uninstall)
# Bring the search node down
execute(search.stop)
# Run puppet on the search node
execute(puppet.run, force=False)
# Ensure the node is running again
execute(search.start)
# If we refreshed the data, reboot a hilary node so it can
# create the schema again
if rebuild_index:
with settings(hosts=[hilary_reboot_host]):
execute(hilary.stop)
execute(hilary.start)
execute(hilary.wait_until_ready)
# Start puppet on the search node again
execute(puppet.start)
def search_index_name():
return getattr(env, 'search_index_name', 'oae')
| apache-2.0 |
yarikoptic/pystatsmodels | tools/examples_rst.py | 30 | 5894 | #! /usr/bin/env python
import os
import sys
import re
import subprocess
import pickle
from StringIO import StringIO
# 3rd party
from matplotlib import pyplot as plt
# Ours
import hash_funcs
#----------------------------------------------------
# Globals
#----------------------------------------------------
# these files do not get made into .rst files because of
# some problems, they may need a simple cleaning up
exclude_list = ['run_all.py',
# these need to be cleaned up
'example_ols_tftest.py',
'example_glsar.py',
'example_ols_table.py',
#not finished yet
'example_arima.py',
'try_wls.py']
file_path = os.path.dirname(__file__)
docs_rst_dir = os.path.realpath(os.path.join(file_path,
'../docs/source/examples/generated/'))
example_dir = os.path.realpath(os.path.join(file_path,
'../examples/'))
def check_script(filename):
"""
Run all the files in filelist from run_all. Add any with problems
to exclude_list and return it.
"""
file_to_run = "python -c\"import warnings; "
file_to_run += "warnings.simplefilter('ignore'); "
file_to_run += "from matplotlib import use; use('Agg'); "
file_to_run += "execfile(r'%s')\"" % os.path.join(example_dir, filename)
proc = subprocess.Popen(file_to_run, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
#NOTE: use communicate to wait for process termination
stdout, stderr = proc.communicate()
result = proc.returncode
if result != 0: # raised an error
msg = "Not generating reST from %s. An error occurred.\n" % filename
msg += stderr
print msg
return False
return True
def parse_docstring(block):
"""
Strips the docstring from a string representation of the file.
Returns the docstring and block without it
"""
ds = "\"{3}|'{3}"
try:
start = re.search(ds, block).end()
end = re.search(ds, block[start:]).start()
except: #TODO: make more informative
raise IOError("File %s does not have a docstring?")
docstring = block[start:start+end]
block = block[start+end+3:]
return docstring.strip(), block
def parse_file(block):
"""
Block is a raw string file.
"""
docstring, block = parse_docstring(block)
# just get the first line from the docstring
docstring = docstring.split('\n')[0] or docstring.split('\n')[1]
outfile = [docstring,'='*len(docstring),'']
block = block.split('\n')
# iterate through the rest of block, anything in comments is stripped of #
# anything else is fair game to go in an ipython directive
code_snippet = False
for line in block:
#if not len(line):
# continue
# preserve blank lines
if line.startswith('#') and not (line.startswith('#%') or
line.startswith('#@')):
# on some ReST text
if code_snippet: # were on a code snippet
outfile.append('')
code_snippet = False
line = line.strip()
# try to remove lines like # hello -> #hello
line = re.sub("(?<=#) (?!\s)", "", line)
# make sure commented out things have a space
line = re.sub("#\.\.(?!\s)", "#.. ", line)
line = re.sub("^#+", "", line) # strip multiple hashes
outfile.append(line)
else:
if not code_snippet: # new code block
outfile.append('\n.. ipython:: python\n')
code_snippet = True
# handle decorators and magic functions
if line.startswith('#%') or line.startswith('#@'):
line = line[1:]
outfile.append(' '+line.strip('\n'))
return '\n'.join(outfile)
def write_file(outfile, rst_file_pth):
"""
Write outfile to rst_file_pth
"""
print "Writing ", os.path.basename(rst_file_pth)
write_file = open(rst_file_pth, 'w')
write_file.writelines(outfile)
write_file.close()
def restify(example_file, filehash, fname):
"""
Takes a whole file ie., the result of file.read(), its md5 hash, and
the filename
Parse the file
Write the new .rst
Update the hash_dict
"""
write_filename = os.path.join(docs_rst_dir, fname[:-2] + 'rst')
try:
rst_file = parse_file(example_file)
except IOError as err:
raise IOError(err.message % fname)
write_file(rst_file, write_filename)
if filehash is not None:
hash_funcs.update_hash_dict(filehash, fname)
if __name__ == "__main__":
sys.path.insert(0, example_dir)
from run_all import filelist
sys.path.remove(example_dir)
if not os.path.exists(docs_rst_dir):
os.makedirs(docs_rst_dir)
if len(sys.argv) > 1: # given a file,files to process, no help flag yet
for example_file in sys.argv[1:]:
whole_file = open(example_file, 'r').read()
restify(whole_file, None, example_file)
else: # process the whole directory
for root, dirnames, filenames in os.walk(example_dir):
if 'notebooks' in root:
continue
for example in filenames:
example_file = os.path.join(root, example)
whole_file = open(example_file, 'r').read()
to_write, filehash = hash_funcs.check_hash(whole_file,
example)
if not to_write:
print "Hash has not changed for file %s" % example
continue
elif (not example.endswith('.py') or example in exclude_list or
not check_script(example_file)):
continue
restify(whole_file, filehash, example)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.