repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
csnake-org/CSnake | refs/heads/master | tests/data/thirdParty/Four/csnFour.py | 4 | # Csnake project configuration
import csnCilab
from csnAll import three
four = csnCilab.CilabModuleProject("Four", "third party")
four.pathsManager.useFilePath = "%s/Four/UseFour.cmake" % four.GetBuildFolder()
four.pathsManager.configFilePath = "%s/Four/FourConfig.cmake" % four.GetBuildFolder()
four.AddProjects([three])
|
zhukite/nikola-install | refs/heads/master | tests/test_command_import_wordpress.py | 2 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from context import nikola
import os
import unittest
import mock
import nikola.plugins.command.import_wordpress
class BasicCommandImportWordpress(unittest.TestCase):
def setUp(self):
self.module = nikola.plugins.command.import_wordpress
self.import_command = self.module.CommandImportWordpress()
self.import_filename = os.path.abspath(os.path.join(
os.path.dirname(__file__), 'wordpress_export_example.xml'))
def tearDown(self):
del self.import_command
del self.import_filename
class TestXMLGlueing(BasicCommandImportWordpress):
def test_making_correct_newlines(self):
xml = [b"Some information about how to (un)subscripe to a google group with a normal mail client.\n",
b"<ul>\n",
b" <li>to post: <strong>groupname@googlegroups.com</strong></li>\n",
b" <li>to <em>subscribe</em>: <strong>groupname+subscribe@googlegroups.com</strong></li>\n",
b" <li>to <em>unsubscribe</em>: <strong>groupname+unsubscribe@googlegroups.com</strong></li>\n",
b"</ul>\n",
b"Easy.\n"]
expected_xml = b"""Some information about how to (un)subscripe to a google group with a normal mail client.
<ul>
<li>to post: <strong>groupname@googlegroups.com</strong></li>
<li>to <em>subscribe</em>: <strong>groupname+subscribe@googlegroups.com</strong></li>
<li>to <em>unsubscribe</em>: <strong>groupname+unsubscribe@googlegroups.com</strong></li>
</ul>
Easy.
"""
self.assertEqual(expected_xml, self.import_command._glue_xml_lines(xml))
class CommandImportWordpressRunTest(BasicCommandImportWordpress):
def setUp(self):
super(self.__class__, self).setUp()
self.data_import = mock.MagicMock()
self.site_generation = mock.MagicMock()
self.write_urlmap = mock.MagicMock()
self.write_configuration = mock.MagicMock()
site_generation_patch = mock.patch('os.system', self.site_generation)
data_import_patch = mock.patch(
'nikola.plugins.command.import_wordpress.CommandImportWordpress.import_posts', self.data_import)
write_urlmap_patch = mock.patch(
'nikola.plugins.command.import_wordpress.CommandImportWordpress.write_urlmap_csv', self.write_urlmap)
write_configuration_patch = mock.patch(
'nikola.plugins.command.import_wordpress.CommandImportWordpress.write_configuration', self.write_configuration)
self.patches = [site_generation_patch, data_import_patch,
write_urlmap_patch, write_configuration_patch]
for patch in self.patches:
patch.start()
def tearDown(self):
del self.data_import
del self.site_generation
del self.write_urlmap
del self.write_configuration
for patch in self.patches:
patch.stop()
del self.patches
super(self.__class__, self).tearDown()
def test_create_import(self):
valid_import_arguments = (
dict(options={'output_folder': 'some_folder'},
args=[self.import_filename]),
dict(args=[self.import_filename]),
dict(args=[self.import_filename, 'folder_argument']),
)
for arguments in valid_import_arguments:
self.import_command.execute(**arguments)
self.assertTrue(self.site_generation.called)
self.assertTrue(self.data_import.called)
self.assertTrue(self.write_urlmap.called)
self.assertTrue(self.write_configuration.called)
self.assertFalse(self.import_command.exclude_drafts)
def test_ignoring_drafts(self):
valid_import_arguments = (
dict(options={'exclude_drafts': True}, args=[
self.import_filename]),
dict(
options={'exclude_drafts': True,
'output_folder': 'some_folder'},
args=[self.import_filename]),
)
for arguments in valid_import_arguments:
self.import_command.execute(**arguments)
self.assertTrue(self.import_command.exclude_drafts)
class CommandImportWordpressTest(BasicCommandImportWordpress):
def test_create_import_work_without_argument(self):
# Running this without an argument must not fail.
# It should show the proper usage of the command.
self.import_command.execute()
def test_populate_context(self):
channel = self.import_command.get_channel_from_file(
self.import_filename)
context = self.import_command.populate_context(channel)
for required_key in ('POSTS', 'PAGES', 'COMPILERS'):
self.assertTrue(required_key in context)
self.assertEqual('de', context['DEFAULT_LANG'])
self.assertEqual('Wordpress blog title', context['BLOG_TITLE'])
self.assertEqual('Nikola test blog ;) - with moré Ümläüts',
context['BLOG_DESCRIPTION'])
self.assertEqual('http://some.blog', context['SITE_URL'])
self.assertEqual('mail@some.blog', context['BLOG_EMAIL'])
self.assertEqual('Niko', context['BLOG_AUTHOR'])
def test_importing_posts_and_attachments(self):
channel = self.import_command.get_channel_from_file(
self.import_filename)
self.import_command.context = self.import_command.populate_context(
channel)
self.import_command.output_folder = 'new_site'
self.import_command.squash_newlines = True
self.import_command.no_downloads = False
# Ensuring clean results
self.import_command.url_map = {}
self.module.links = {}
write_metadata = mock.MagicMock()
write_content = mock.MagicMock()
download_mock = mock.MagicMock()
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.write_content', write_content):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.write_metadata', write_metadata):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.download_url_content_to_file', download_mock):
with mock.patch('nikola.plugins.command.import_wordpress.os.makedirs'):
self.import_command.import_posts(channel)
self.assertTrue(download_mock.called)
download_mock.assert_any_call(
'http://some.blog/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png',
'new_site/files/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png')
self.assertTrue(write_metadata.called)
write_metadata.assert_any_call(
'new_site/stories/kontakt.meta', 'Kontakt',
'kontakt', '2009-07-16 20:20:32', None, [])
self.assertTrue(write_content.called)
write_content.assert_any_call('new_site/posts/200704hoert.wp',
"""An image.
<img class="size-full wp-image-16" title="caption test" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="caption test" width="739" height="517" />
Some source code.
~~~~~~~~~~~~{.Python}
import sys
print sys.version
~~~~~~~~~~~~
The end.
""")
write_content.assert_any_call(
'new_site/posts/200807arzt-und-pfusch-s-i-c-k.wp',
'''<img class="size-full wp-image-10 alignright" title="Arzt+Pfusch - S.I.C.K." src="http://some.blog/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png" alt="Arzt+Pfusch - S.I.C.K." width="210" height="209" />Arzt+Pfusch - S.I.C.K.Gerade bin ich \xfcber das Album <em>S.I.C.K</em> von <a title="Arzt+Pfusch" href="http://www.arztpfusch.com/" target="_blank">Arzt+Pfusch</a> gestolpert, welches Arzt+Pfusch zum Download f\xfcr lau anbieten. Das Album steht unter einer Creative Commons <a href="http://creativecommons.org/licenses/by-nc-nd/3.0/de/">BY-NC-ND</a>-Lizenz.
Die Ladung <em>noisebmstupidevildustrial</em> gibts als MP3s mit <a href="http://www.archive.org/download/dmp005/dmp005_64kb_mp3.zip">64kbps</a> und <a href="http://www.archive.org/download/dmp005/dmp005_vbr_mp3.zip">VBR</a>, als Ogg Vorbis und als FLAC (letztere <a href="http://www.archive.org/details/dmp005">hier</a>). <a href="http://www.archive.org/download/dmp005/dmp005-artwork.zip">Artwork</a> und <a href="http://www.archive.org/download/dmp005/dmp005-lyrics.txt">Lyrics</a> gibts nochmal einzeln zum Download.''')
write_content.assert_any_call(
'new_site/stories/kontakt.wp', """<h1>Datenschutz</h1>
Ich erhebe und speichere automatisch in meine Server Log Files Informationen, die dein Browser an mich \xfcbermittelt. Dies sind:
<ul>
<li>Browsertyp und -version</li>
<li>verwendetes Betriebssystem</li>
<li>Referrer URL (die zuvor besuchte Seite)</li>
<li>IP Adresse des zugreifenden Rechners</li>
<li>Uhrzeit der Serveranfrage.</li>
</ul>
Diese Daten sind f\xfcr mich nicht bestimmten Personen zuordenbar. Eine Zusammenf\xfchrung dieser Daten mit anderen Datenquellen wird nicht vorgenommen, die Daten werden einzig zu statistischen Zwecken erhoben.""")
self.assertTrue(len(self.import_command.url_map) > 0)
self.assertEqual(
self.import_command.url_map['http://some.blog/2007/04/hoert/'],
'http://some.blog/posts/200704hoert.html')
self.assertEqual(
self.import_command.url_map[
'http://some.blog/2008/07/arzt-und-pfusch-s-i-c-k/'],
'http://some.blog/posts/200807arzt-und-pfusch-s-i-c-k.html')
self.assertEqual(
self.import_command.url_map['http://some.blog/kontakt/'],
'http://some.blog/stories/kontakt.html')
image_thumbnails = [
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-64x64.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-300x175.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-36x36.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-24x24.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-48x48.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-150x150.png'
]
for link in image_thumbnails:
self.assertTrue(
link in self.module.links,
'No link to "{0}" found in {map}.'.format(
link,
map=self.module.links
)
)
def test_transforming_content(self):
"""Applying markup conversions to content."""
transform_sourcecode = mock.MagicMock()
transform_caption = mock.MagicMock()
transform_newlines = mock.MagicMock()
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_sourcecode', transform_sourcecode):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_caption', transform_caption):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_multiple_newlines', transform_newlines):
self.import_command.transform_content("random content")
self.assertTrue(transform_sourcecode.called)
self.assertTrue(transform_caption.called)
self.assertTrue(transform_newlines.called)
def test_transforming_source_code(self):
"""
Tests the handling of sourcecode tags.
"""
content = """Hello World.
[sourcecode language="Python"]
import sys
print sys.version
[/sourcecode]"""
content = self.import_command.transform_sourcecode(content)
self.assertFalse('[/sourcecode]' in content)
self.assertFalse('[sourcecode language=' in content)
replaced_content = """Hello World.
~~~~~~~~~~~~{.Python}
import sys
print sys.version
~~~~~~~~~~~~
"""
self.assertEqual(content, replaced_content)
def test_transform_caption(self):
caption = '[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]'
transformed_content = self.import_command.transform_caption(caption)
expected_content = '<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />'
self.assertEqual(transformed_content, expected_content)
def test_transform_multiple_captions_in_a_post(self):
content = """asdasdas
[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]
asdasdas
asdasdas
[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" title="pretty" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]
asdasdas"""
expected_content = """asdasdas
<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />
asdasdas
asdasdas
<img class="size-full wp-image-16" title="pretty" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />
asdasdas"""
self.assertEqual(
expected_content, self.import_command.transform_caption(content))
def test_transform_multiple_newlines(self):
content = """This
has
way to many
newlines.
"""
expected_content = """This
has
way to many
newlines.
"""
self.import_command.squash_newlines = False
self.assertEqual(content,
self.import_command.transform_multiple_newlines(content))
self.import_command.squash_newlines = True
self.assertEqual(expected_content,
self.import_command.transform_multiple_newlines(content))
def test_transform_caption_with_link_inside(self):
content = """[caption caption="Fehlermeldung"]<a href="http://some.blog/openttd-missing_sound.png"><img class="size-thumbnail wp-image-551" title="openttd-missing_sound" src="http://some.blog/openttd-missing_sound-150x150.png" alt="Fehlermeldung" /></a>[/caption]"""
transformed_content = self.import_command.transform_caption(content)
expected_content = """<a href="http://some.blog/openttd-missing_sound.png"><img class="size-thumbnail wp-image-551" title="openttd-missing_sound" src="http://some.blog/openttd-missing_sound-150x150.png" alt="Fehlermeldung" /></a>"""
self.assertEqual(expected_content, transformed_content)
def test_get_configuration_output_path(self):
self.import_command.output_folder = 'new_site'
default_config_path = os.path.join('new_site', 'conf.py')
self.import_command.import_into_existing_site = False
self.assertEqual(default_config_path,
self.import_command.get_configuration_output_path())
self.import_command.import_into_existing_site = True
config_path_with_timestamp = self.import_command.get_configuration_output_path(
)
self.assertNotEqual(default_config_path, config_path_with_timestamp)
self.assertTrue(self.import_command.name in config_path_with_timestamp)
def test_write_content_does_not_detroy_text(self):
content = b"""<h1>Installation</h1>
Follow the instructions <a title="Installing Jenkins" href="https://wiki.jenkins-ci.org/display/JENKINS/Installing+Jenkins">described here</a>.
<h1>Plugins</h1>
There are many plugins.
<h2>Violations</h2>
You can use the <a title="Jenkins Plugin: Violations" href="https://wiki.jenkins-ci.org/display/JENKINS/Violations">Violations</a> plugin."""
open_mock = mock.mock_open()
with mock.patch('nikola.plugins.basic_import.open', open_mock, create=True):
self.import_command.write_content('some_file', content)
open_mock.assert_called_once_with('some_file', 'wb+')
call_context = open_mock()
call_context.write.assert_called_once_with(
content.join([b'<html><body>', b'</body></html>']))
def test_configure_redirections(self):
"""
Testing the configuration of the redirections.
We need to make sure that we have valid sources and target links.
"""
url_map = {
'/somewhere/else': 'http://foo.bar/posts/somewhereelse.html'
}
redirections = self.import_command.configure_redirections(url_map)
self.assertEqual(1, len(redirections))
self.assertTrue(('somewhere/else/index.html', '/posts/somewhereelse.html') in redirections)
if __name__ == '__main__':
unittest.main()
|
wilebeast/FireFox-OS | refs/heads/master | B2G/external/gtest/test/gtest_shuffle_test.py | 3023 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
|
quarkslab/irma | refs/heads/master | frontend/extras/migration/versions/1ea7100d95d0_version_2_0_5.py | 1 | """version 2.0.5
Revision ID: 1ea7100d95d0
Revises: 17cafa6e8016
Create Date: 2018-01-17 16:07:54.181133
"""
# revision identifiers, used by Alembic.
revision = '1ea7100d95d0'
down_revision = '17cafa6e8016'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_irma_fileExt_id_file'),
'irma_fileExt',
['id_file'],
unique=False)
op.create_index(op.f('ix_irma_fileExt_id_scan'),
'irma_fileExt',
['id_scan'],
unique=False)
op.create_index(op.f('ix_irma_probeResult_id_file'),
'irma_probeResult',
['id_file'],
unique=False)
op.create_index(op.f('ix_irma_probeResult_fileExt_id_fe'),
'irma_probeResult_fileExt',
['id_fe'],
unique=False)
op.create_index(op.f('ix_irma_file_ts_pathnotnull'),
'irma_file',
['timestamp_last_scan'],
postgresql_where=text("path is not Null"),
unique=False)
op.create_unique_constraint('u_file_sha256',
'irma_file',
['sha256'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_irma_probeResult_fileExt_id_fe'),
table_name='irma_probeResult_fileExt')
op.drop_index(op.f('ix_irma_probeResult_id_file'),
table_name='irma_probeResult')
op.drop_index(op.f('ix_irma_fileExt_id_scan'),
table_name='irma_fileExt')
op.drop_index(op.f('ix_irma_fileExt_id_file'),
table_name='irma_fileExt')
op.drop_index(op.f('ix_irma_file_ts_pathnotnull'),
table_name='irma_file')
op.drop_constraint('u_file_sha256', 'irma_file')
# ### end Alembic commands ###
|
jeffreylu9/django-cms | refs/heads/wlsite | cms/south_migrations/0005_mptt_added_to_plugins.py | 1680 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.model_name)
user_ptr_name = '%s_ptr' % User._meta.object_name.lower()
class Migration(SchemaMigration):
def forwards(self, orm):
# Dummy migration
pass
def backwards(self, orm):
# Dummy migration
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'})
},
'auth.permission': {
'Meta': {
'ordering': "('content_type__app_label', 'content_type__model', 'codename')",
'unique_together': "(('content_type', 'codename'),)",
'object_name': 'Permission'},
'codename': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['contenttypes.ContentType']"}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [],
{'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Group']", 'symmetrical': 'False',
'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'password': (
'django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': (
'django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.CMSPlugin']", 'null': 'True',
'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [],
{'symmetrical': 'False', 'to': "orm['sites.Site']",
'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')",
'object_name': 'Page'},
'changed_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'created_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'limit_visibility_in_menu': (
'django.db.models.fields.SmallIntegerField', [],
{'default': 'None', 'null': 'True', 'db_index': 'True',
'blank': 'True'}),
'login_required': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '80',
'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'blank': 'True', 'related_name': "'children'",
'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['cms.Placeholder']",
'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'published': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'publisher_public': (
'django.db.models.fields.related.OneToOneField', [],
{'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True',
'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '40', 'null': 'True',
'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'template': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.pagemoderator': {
'Meta': {'object_name': 'PageModerator'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderate_children': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_descendants': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_page': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')",
'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [],
{'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [],
{'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label, 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': (
'django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [user_orm_label]},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_users'",
'to': "orm['%s']" % user_orm_label}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['%s']" % user_orm_label, 'unique': 'True',
'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_usergroups'",
'to': "orm['%s']" % user_orm_label}),
'group_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['auth.Group']", 'unique': 'True',
'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': (
'django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)",
'object_name': 'Title'},
'application_urls': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '200',
'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'meta_keywords': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': (
'django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': (
'django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"},
'app_label': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site',
'db_table': "'django_site'"},
'domain': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
|
aatchison/mycroft-core | refs/heads/master | mycroft/util/log.py | 1 | # Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import inspect
import logging
import sys
from os.path import isfile
from mycroft.util.json_helper import load_commented_json
def getLogger(name="MYCROFT"):
"""Depreciated. Use LOG instead"""
return logging.getLogger(name)
def _make_log_method(fn):
@classmethod
def method(cls, *args, **kwargs):
cls._log(fn, *args, **kwargs)
method.__func__.__doc__ = fn.__doc__
return method
class LOG:
"""
Custom logger class that acts like logging.Logger
The logger name is automatically generated by the module of the caller
Usage:
LOG.debug('My message: %s', debug_str)
LOG('custom_name').debug('Another message')
"""
_custom_name = None
handler = None
level = None
# Copy actual logging methods from logging.Logger
# Usage: LOG.debug(message)
debug = _make_log_method(logging.Logger.debug)
info = _make_log_method(logging.Logger.info)
warning = _make_log_method(logging.Logger.warning)
error = _make_log_method(logging.Logger.error)
exception = _make_log_method(logging.Logger.exception)
@classmethod
def init(cls):
sys_config = '/etc/mycroft/mycroft.conf'
config = load_commented_json(sys_config) if isfile(sys_config) else {}
cls.level = logging.getLevelName(config.get('log_level', 'DEBUG'))
fmt = '%(asctime)s.%(msecs)03d - ' \
'%(name)s - %(levelname)s - %(message)s'
datefmt = '%H:%M:%S'
formatter = logging.Formatter(fmt, datefmt)
cls.handler = logging.StreamHandler(sys.stdout)
cls.handler.setFormatter(formatter)
cls.create_logger('') # Enables logging in external modules
@classmethod
def create_logger(cls, name):
l = logging.getLogger(name)
l.propagate = False
l.addHandler(cls.handler)
l.setLevel(cls.level)
return l
def __init__(self, name):
LOG._custom_name = name
@classmethod
def _log(cls, func, *args, **kwargs):
if cls._custom_name is not None:
name = cls._custom_name
cls._custom_name = None
else:
# Stack:
# [0] - _log()
# [1] - debug(), info(), warning(), or error()
# [2] - caller
stack = inspect.stack()
# Record:
# [0] - frame object
# [1] - filename
# [2] - line number
# [3] - function
# ...
record = stack[2]
mod = inspect.getmodule(record[0])
module_name = mod.__name__ if mod else ''
name = module_name + ':' + record[3] + ':' + str(record[2])
func(cls.create_logger(name), *args, **kwargs)
LOG.init()
|
yuananzhang/myscrapy | refs/heads/master | google_translate/google_translate/items.py | 1 | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class GoogleTranslateItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
|
google/grr | refs/heads/master | grr/client/grr_response_client/unprivileged/memory/server.py | 1 | #!/usr/bin/env python
"""Functionality to create an unprivileged memory server."""
from typing import List
from grr_response_client.unprivileged import communication
from grr_response_client.unprivileged import interface_registry
from grr_response_client.unprivileged import server
def CreateMemoryServer(
process_file_descriptors: List[communication.FileDescriptor]
) -> communication.Server:
return server.CreateServer(process_file_descriptors,
interface_registry.Interface.MEMORY)
|
stemount/taylettings | refs/heads/master | node_modules/npm-shrinkwrap/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py | 395 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the MSVSSettings.py file."""
import StringIO
import unittest
import gyp.MSVSSettings as MSVSSettings
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def _ExpectedWarnings(self, expected):
"""Compares recorded lines to expected warnings."""
self.stderr.seek(0)
actual = self.stderr.read().split('\n')
actual = [line for line in actual if line]
self.assertEqual(sorted(expected), sorted(actual))
def testValidateMSVSSettings_tool_names(self):
"""Tests that only MSVS tool names are allowed."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {},
'VCLinkerTool': {},
'VCMIDLTool': {},
'foo': {},
'VCResourceCompilerTool': {},
'VCLibrarianTool': {},
'VCManifestTool': {},
'ClCompile': {}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized tool foo',
'Warning: unrecognized tool ClCompile'])
def testValidateMSVSSettings_settings(self):
"""Tests that for invalid MSVS settings."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '5',
'BrowseInformation': 'fdkslj',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '-1',
'CompileAs': '1',
'DebugInformationFormat': '2',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': '1',
'ExceptionHandling': '1',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '1',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '1',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'string1;string2',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '1',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '1',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalDependencies_excluded': 'file3',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '2',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'CLRImageType': '2',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '2',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': '2',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'ErrorReporting': '2',
'FixedBaseAddress': '2',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '2',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '2',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '2',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '2',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '2',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'true',
'Version': 'a string1'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'CPreprocessOptions': 'a string1',
'DefaultCharType': '1',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '1',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'notgood': 'bogus',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'VCResourceCompilerTool': {
'AdditionalOptions': 'a string1',
'AdditionalIncludeDirectories': 'folder1;folder2',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'notgood2': 'bogus',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a string1',
'ManifestResourceFile': 'a_file_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'truel',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}},
self.stderr)
self._ExpectedWarnings([
'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
'index value (5) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/BrowseInformation, '
"invalid literal for int() with base 10: 'fdkslj'",
'Warning: for VCCLCompilerTool/CallingConvention, '
'index value (-1) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
'Warning: for VCLinkerTool/TargetMachine, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCMIDLTool/notgood',
'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
'Warning: for VCManifestTool/UpdateFileHashes, '
"expected bool; got 'truel'"
''])
def testValidateMSBuildSettings_settings(self):
"""Tests that for invalid MSBuild settings."""
MSVSSettings.ValidateMSBuildSettings(
{'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'false',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'CompileAsManaged': 'Pure',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'SyncCThrow',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Precise',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'FunctionLevelLinking': 'false',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'false',
'MinimalRebuild': 'true',
'MultiProcessorCompilation': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Disabled',
'PrecompiledHeader': 'NotUsing',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'string1;string2',
'PreprocessOutputPath': 'a string1',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'false',
'ProcessorNumber': '33',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TreatSpecificWarningsAsErrors': 'string1;string2',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UseUnicodeForAssemblerListing': 'true',
'WarningLevel': 'TurnOffAllWarnings',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'Link': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'BuildingInIDE': 'true',
'CLRImageType': 'ForceIJWImage',
'CLRSupportLastError': 'Enabled',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'CreateHotPatchableImage': 'X86Image',
'DataExecutionPrevention': 'false',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': 'NotSet',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'FixedBaseAddress': 'false',
'ForceFileOutput': 'Enabled',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'a_file_list',
'ImageHasSafeExceptionHandlers': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'false',
'LinkDLL': 'true',
'LinkErrorReporting': 'SendErrorReport',
'LinkStatus': 'true',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'MSDOSStubFileName': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': 'false',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'PreventDllBinding': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SectionAlignment': '33',
'SetChecksum': 'true',
'ShowProgress': 'LinkVerboseREF',
'SpecifySectionAttributes': 'a string1',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Console',
'SupportNobindOfDelayLoadedDLL': 'true',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TrackerLogDirectory': 'a_folder',
'TreatLinkerWarningAsErrors': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'AsInvoker',
'UACUIAccess': 'true',
'Version': 'a string1'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'Culture': '0x236',
'IgnoreStandardIncludePath': 'true',
'NullTerminateStrings': 'true',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ApplicationConfigurationMode': 'true',
'ClientStubFile': 'a_file_name',
'CPreprocessOptions': 'a string1',
'DefaultCharType': 'Signed',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'EnableCustom',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateClientFiles': 'Stub',
'GenerateServerFiles': 'None',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'LocaleID': '33',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'ServerStubFile': 'a_file_name',
'StructMemberAlignment': 'NotSet',
'SuppressCompilerWarnings': 'true',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Itanium',
'TrackerLogDirectory': 'a_folder',
'TypeLibFormat': 'NewFormat',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'Lib': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'DisplayLibrary': 'a string1',
'ErrorReporting': 'PromptImmediately',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkTimeCodeGeneration': 'true',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'Name': 'a_file_name',
'OutputFile': 'a_file_name',
'RemoveObjects': 'file1;file2',
'SubSystem': 'Console',
'SuppressStartupBanner': 'true',
'TargetMachine': 'MachineX86i',
'TrackerLogDirectory': 'a_folder',
'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'EnableDPIAwareness': 'fal',
'GenerateCatalogFiles': 'truel',
'GenerateCategoryTags': 'true',
'InputResourceManifests': 'a string1',
'ManifestFromManagedAssembly': 'a_file_name',
'notgood3': 'bogus',
'OutputManifestFile': 'a_file_name',
'OutputResourceManifests': 'a string1',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressDependencyElement': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'a_file_name'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Manifest/notgood3',
'Warning: for Manifest/GenerateCatalogFiles, '
"expected bool; got 'truel'",
'Warning: for Lib/TargetMachine, unrecognized enumerated value '
'MachineX86i',
"Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
def testConvertToMSBuildSettings_empty(self):
"""Tests an empty conversion."""
msvs_settings = {}
expected_msbuild_settings = {}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_minimal(self):
"""Tests a minimal conversion."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': '0',
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': '1',
'ErrorReporting': '1',
'DataExecutionPrevention': '2',
},
}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': 'Default',
},
'Link': {
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'LinkErrorReporting': 'PromptImmediately',
'DataExecutionPrevention': 'true',
},
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_warnings(self):
"""Tests conversion that generates warnings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2',
# These are incorrect values:
'BasicRuntimeChecks': '12',
'BrowseInformation': '21',
'UsePrecompiledHeader': '13',
'GeneratePreprocessedFile': '14'},
'VCLinkerTool': {
# These are incorrect values:
'Driver': '10',
'LinkTimeCodeGeneration': '31',
'ErrorReporting': '21',
'FixedBaseAddress': '6'},
'VCResourceCompilerTool': {
# Custom
'Culture': '1003'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2'},
'Link': {},
'ResourceCompile': {
# Custom
'Culture': '0x03eb'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([
'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
'MSBuild, index value (12) not in expected range [0, 4)',
'Warning: while converting VCCLCompilerTool/BrowseInformation to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
'MSBuild, index value (13) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
'MSBuild, value must be one of [0, 1, 2]; got 14',
'Warning: while converting VCLinkerTool/Driver to '
'MSBuild, index value (10) not in expected range [0, 4)',
'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
'MSBuild, index value (31) not in expected range [0, 5)',
'Warning: while converting VCLinkerTool/ErrorReporting to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCLinkerTool/FixedBaseAddress to '
'MSBuild, index value (6) not in expected range [0, 3)',
])
def testConvertToMSBuildSettings_full_synthetic(self):
"""Tests conversion of all the MSBuild settings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '1',
'BrowseInformation': '2',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '0',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': '0',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '1',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '0',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '2',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '0',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '0',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': '1',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': '1',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '0',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'ErrorReporting': '0',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2;file3',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '1',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '0',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '0',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '3',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '1',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'false',
'UseUnicodeResponseFiles': 'true',
'Version': 'a_string'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': '0',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '2',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'EmbedManifest': 'true',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'ManifestResourceFile': 'my_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string /J',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'true',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': 'NotSet',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'AnySuitable',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'Create',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'WarningLevel': 'Level2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'Link': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': 'ForceIJWImage',
'CLRThreadAttribute': 'STAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': 'Driver',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'NoErrorReport',
'LinkTimeCodeGeneration': 'PGInstrument',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': '',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'true',
'ShowProgress': 'NotSet',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Windows',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineARM',
'TerminalServerAware': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'HighestAvailable',
'UACUIAccess': 'true',
'Version': 'a_string'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '0x03eb',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': 'Unsigned',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'All',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '4',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Win32',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'Lib': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'my_name'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'false'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_actual(self):
"""Tests the conversion of an actual project.
A VS2008 project with most of the options defined was created through the
VS2008 IDE. It was then converted to VS2010. The tool settings found in
the .vcproj and .vcxproj files were converted to the two dictionaries
msvs_settings and expected_msbuild_settings.
Note that for many settings, the VS2010 converter adds macros like
%(AdditionalIncludeDirectories) to make sure than inherited values are
included. Since the Gyp projects we generate do not use inheritance,
we removed these macros. They were:
ClCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
AdditionalOptions: ' %(AdditionalOptions)'
AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
ForcedUsingFiles: ';%(ForcedUsingFiles)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
UndefinePreprocessorDefinitions:
';%(UndefinePreprocessorDefinitions)',
Link:
AdditionalDependencies: ';%(AdditionalDependencies)',
AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
AdditionalManifestDependencies:
';%(AdditionalManifestDependencies)',
AdditionalOptions: ' %(AdditionalOptions)',
AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
AssemblyLinkResource: ';%(AssemblyLinkResource)',
DelayLoadDLLs: ';%(DelayLoadDLLs)',
EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
ForceSymbolReferences: ';%(ForceSymbolReferences)',
IgnoreSpecificDefaultLibraries:
';%(IgnoreSpecificDefaultLibraries)',
ResourceCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Manifest:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)',
"""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)\\a',
'AssemblerOutput': '1',
'BasicRuntimeChecks': '3',
'BrowseInformation': '1',
'BrowseInformationFile': '$(IntDir)\\e',
'BufferSecurityCheck': 'false',
'CallingConvention': '1',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '2',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '2',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'GeneratePreprocessedFile': '2',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': '$(IntDir)\\b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
'PrecompiledHeaderThrough': 'StdAfx.hd',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
'RuntimeLibrary': '3',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'false',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '0',
'UseUnicodeResponseFiles': 'false',
'WarnAsError': 'true',
'WarningLevel': '3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)\\c'},
'VCLinkerTool': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': '1',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': '3',
'CLRThreadAttribute': '1',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': '1',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'ErrorReporting': '2',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'false',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'flob;flok',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': '2',
'LinkIncremental': '0',
'LinkLibraryDependencies': 'false',
'LinkTimeCodeGeneration': '1',
'ManifestFile':
'$(IntDir)\\$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'OptimizeForWindows98': '2',
'OptimizeReferences': '2',
'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'false',
'ShowProgress': '1',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': '1',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '1',
'TerminalServerAware': '1',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'false',
'Version': '333'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '3084',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
'ShowProgress': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
'EmbedManifest': 'false',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'ManifestResourceFile':
'$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'false',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more /J',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)a',
'AssemblerOutput': 'AssemblyCode',
'BasicRuntimeChecks': 'EnableFastChecks',
'BrowseInformation': 'true',
'BrowseInformationFile': '$(IntDir)e',
'BufferSecurityCheck': 'false',
'CallingConvention': 'FastCall',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Queue',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Size',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': '$(IntDir)b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
'PrecompiledHeaderFile': 'StdAfx.hd',
'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'PreprocessSuppressLineNumbers': 'true',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
'RuntimeLibrary': 'MultiThreadedDebugDLL',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '4Bytes',
'SuppressStartupBanner': 'false',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'WarningLevel': 'Level3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)c'},
'Link': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': 'true',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': 'ForceSafeILImage',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': 'UpOnly',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'flob;flok',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'QueueForNextLogin',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'NoEntryPoint': 'true',
'OptimizeReferences': 'true',
'OutputFile': '$(OutDir)$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'false',
'ShowProgress': 'LinkVerbose',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': 'Console',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': 'RequireAdministrator',
'UACUIAccess': 'true',
'Version': '333'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '0x0c0c',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'false',
'UseLibraryDependencyInputs': 'true'},
'': {
'EmbedManifest': 'false',
'GenerateManifest': 'false',
'IgnoreImportLibrary': 'true',
'LinkIncremental': ''
},
'ManifestResourceCompile': {
'ResourceOutputFileName':
'$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
if __name__ == '__main__':
unittest.main()
|
nikolay-fedotov/networking-cisco | refs/heads/master | networking_cisco/plugins/cisco/extensions/qos.py | 31 | # Copyright 2011 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from webob import exc
from neutron.api import api_common as common
from neutron.api import extensions
from neutron import manager
from neutron.plugins.cisco.common import cisco_exceptions as exception
from neutron.plugins.cisco.common import cisco_faults as faults
from neutron.plugins.cisco.extensions import _qos_view as qos_view
from neutron import wsgi
class Qos(extensions.ExtensionDescriptor):
"""Qos extension file."""
@classmethod
def get_name(cls):
"""Returns Ext Resource Name."""
return "Cisco qos"
@classmethod
def get_alias(cls):
"""Returns Ext Resource Alias."""
return "Cisco qos"
@classmethod
def get_description(cls):
"""Returns Ext Resource Description."""
return "qos includes qos_name and qos_desc"
@classmethod
def get_namespace(cls):
"""Returns Ext Resource Namespace."""
return "http://docs.ciscocloud.com/api/ext/qos/v1.0"
@classmethod
def get_updated(cls):
"""Returns Ext Resource update."""
return "2011-07-25T13:25:27-06:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
parent_resource = dict(member_name="tenant",
collection_name="extensions/csco/tenants")
controller = QosController(manager.NeutronManager.get_plugin())
return [extensions.ResourceExtension('qoss', controller,
parent=parent_resource)]
class QosController(common.NeutronController, wsgi.Controller):
"""qos API controller based on NeutronController."""
_qos_ops_param_list = [
{'param-name': 'qos_name', 'required': True},
{'param-name': 'qos_desc', 'required': True},
]
_serialization_metadata = {
"application/xml": {
"attributes": {
"qos": ["id", "name"],
},
},
}
def __init__(self, plugin):
self._resource_name = 'qos'
self._plugin = plugin
def index(self, request, tenant_id):
"""Returns a list of qos ids."""
return self._items(request, tenant_id, is_detail=False)
def _items(self, request, tenant_id, is_detail):
"""Returns a list of qoss."""
qoss = self._plugin.get_all_qoss(tenant_id)
builder = qos_view.get_view_builder(request)
result = [builder.build(qos, is_detail)['qos'] for qos in qoss]
return dict(qoss=result)
# pylint: disable=no-member
def show(self, request, tenant_id, id):
"""Returns qos details for the given qos id."""
try:
qos = self._plugin.get_qos_details(tenant_id, id)
builder = qos_view.get_view_builder(request)
#build response with details
result = builder.build(qos, True)
return dict(qoss=result)
except exception.QosNotFound as exp:
return faults.Fault(faults.QosNotFound(exp))
def create(self, request, tenant_id):
"""Creates a new qos for a given tenant."""
#look for qos name in request
try:
body = self._deserialize(request.body, request.get_content_type())
req_body = self._prepare_request_body(body,
self._qos_ops_param_list)
req_params = req_body[self._resource_name]
except exc.HTTPError as exp:
return faults.Fault(exp)
qos = self._plugin.create_qos(tenant_id,
req_params['qos_name'],
req_params['qos_desc'])
builder = qos_view.get_view_builder(request)
result = builder.build(qos)
return dict(qoss=result)
def update(self, request, tenant_id, id):
"""Updates the name for the qos with the given id."""
try:
body = self._deserialize(request.body, request.get_content_type())
req_body = self._prepare_request_body(body,
self._qos_ops_param_list)
req_params = req_body[self._resource_name]
except exc.HTTPError as exp:
return faults.Fault(exp)
try:
qos = self._plugin.rename_qos(tenant_id, id,
req_params['qos_name'])
builder = qos_view.get_view_builder(request)
result = builder.build(qos, True)
return dict(qoss=result)
except exception.QosNotFound as exp:
return faults.Fault(faults.QosNotFound(exp))
def delete(self, request, tenant_id, id):
"""Destroys the qos with the given id."""
try:
self._plugin.delete_qos(tenant_id, id)
return exc.HTTPOk()
except exception.QosNotFound as exp:
return faults.Fault(faults.QosNotFound(exp))
|
youdonghai/intellij-community | refs/heads/master | python/testData/quickFixes/PyMoveAttributeToInitQuickFixTest/propertyNegative.py | 83 | class FavouriteManager(object):
"""Favourite manager"""
def __init__(self, session):
self._session = session
@property
def _favourite_ids(self):
"""Get favourites"""
try:
return map(int, self._session.get('favourite', '').split(','))
except ValueError:
return []
@_favourite_ids.setter
def _favourite_ids(self, ids):
"""Set favourites ids"""
self._session['favourite'] = ','.join(set(ids))
def add(self, estate):
"""Add estate to favourite"""
ids = self._favourite_ids
ids.append(estate.id)
self._favourite_ids = ids |
JamesMGreene/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/QueueStatusServer/filters/webkit_extras.py | 121 | # Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from django.template.defaultfilters import stringfilter
from google.appengine.ext import webapp
register = webapp.template.create_template_register()
bug_regexp = re.compile(r"bug (?P<bug_id>\d+)")
patch_regexp = re.compile(r"patch (?P<patch_id>\d+)")
@register.filter
@stringfilter
def webkit_linkify(value):
value = bug_regexp.sub(r'<a href="http://webkit.org/b/\g<bug_id>">bug \g<bug_id></a>', value)
value = patch_regexp.sub(r'<a href="https://bugs.webkit.org/attachment.cgi?id=\g<patch_id>&action=prettypatch">patch \g<patch_id></a>', value)
return value
@register.filter
@stringfilter
def webkit_bug_id(value):
return '<a href="http://webkit.org/b/%s">%s</a>' % (value, value)
@register.filter
@stringfilter
def webkit_attachment_id(value):
return '<a href="https://bugs.webkit.org/attachment.cgi?id=%s&action=prettypatch">%s</a>' % (value, value)
@register.filter
@stringfilter
def results_link(status_id):
return '<a href="/results/%s">results</a>' % status_id
@register.filter
@stringfilter
def queue_status_link(queue_name, text):
return '<a href="/queue-status/%s">%s</a>' % (queue_name, text)
@register.filter
@stringfilter
def queue_charts_link(queue_name, text):
return '<a href="/queue-charts/%s">%s</a>' % (queue_name, text)
|
trezorg/django | refs/heads/master | django/contrib/sites/models.py | 387 | from django.db import models
from django.utils.translation import ugettext_lazy as _
SITE_CACHE = {}
class SiteManager(models.Manager):
def get_current(self):
"""
Returns the current ``Site`` based on the SITE_ID in the
project's settings. The ``Site`` object is cached the first
time it's retrieved from the database.
"""
from django.conf import settings
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\" without having set the SITE_ID setting. Create a site in your database and set the SITE_ID setting to fix this error.")
try:
current_site = SITE_CACHE[sid]
except KeyError:
current_site = self.get(pk=sid)
SITE_CACHE[sid] = current_site
return current_site
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100)
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __unicode__(self):
return self.domain
def save(self, *args, **kwargs):
super(Site, self).save(*args, **kwargs)
# Cached information will likely be incorrect now.
if self.id in SITE_CACHE:
del SITE_CACHE[self.id]
def delete(self):
pk = self.pk
super(Site, self).delete()
try:
del SITE_CACHE[pk]
except KeyError:
pass
class RequestSite(object):
"""
A class that shares the primary interface of Site (i.e., it has
``domain`` and ``name`` attributes) but gets its data from a Django
HttpRequest object rather than from a database.
The save() and delete() methods raise NotImplementedError.
"""
def __init__(self, request):
self.domain = self.name = request.get_host()
def __unicode__(self):
return self.domain
def save(self, force_insert=False, force_update=False):
raise NotImplementedError('RequestSite cannot be saved.')
def delete(self):
raise NotImplementedError('RequestSite cannot be deleted.')
def get_current_site(request):
"""
Checks if contrib.sites is installed and returns either the current
``Site`` object or a ``RequestSite`` object based on the request.
"""
if Site._meta.installed:
current_site = Site.objects.get_current()
else:
current_site = RequestSite(request)
return current_site
|
ryfeus/lambda-packs | refs/heads/master | Keras_tensorflow_nightly/source2.7/tensorflow/contrib/crf/__init__.py | 25 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Linear-chain CRF layer.
See the @{$python/contrib.crf} guide.
@@crf_binary_score
@@crf_decode
@@crf_log_likelihood
@@crf_log_norm
@@crf_sequence_score
@@crf_unary_score
@@CrfDecodeBackwardRnnCell
@@CrfDecodeForwardRnnCell
@@CrfForwardRnnCell
@@viterbi_decode
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.crf.python.ops.crf import crf_binary_score
from tensorflow.contrib.crf.python.ops.crf import crf_decode
from tensorflow.contrib.crf.python.ops.crf import crf_log_likelihood
from tensorflow.contrib.crf.python.ops.crf import crf_log_norm
from tensorflow.contrib.crf.python.ops.crf import crf_sequence_score
from tensorflow.contrib.crf.python.ops.crf import crf_unary_score
from tensorflow.contrib.crf.python.ops.crf import CrfDecodeBackwardRnnCell
from tensorflow.contrib.crf.python.ops.crf import CrfDecodeForwardRnnCell
from tensorflow.contrib.crf.python.ops.crf import CrfForwardRnnCell
from tensorflow.contrib.crf.python.ops.crf import viterbi_decode
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__)
|
ammiranda/CS325 | refs/heads/master | week1/code/find_min_max.py | 1 | def find_min_max(arr):
start_index = None
comparisons = 0
if len(arr) % 2 == 0:
if arr[0] > arr[1]:
max_val = arr[0]
min_val = arr[1]
else:
max_val = arr[1]
min_val = arr[0]
start_index = 2
comparisons += 1
else:
max_val = arr[0]
min_val = arr[0]
start_index = 1
for i in range(start_index, len(arr) - 1):
if arr[i] < arr[i + 1]:
if arr[i] < min_val:
min_val = arr[i]
if arr[i + 1] > max_val:
max_val = arr[i + 1]
comparisons += 2
else:
if arr[i + 1] < min_val:
min_val = arr[i + 1]
if arr[i] > max_val:
max_val = arr[i]
comparisons += 2
print(comparisons, len(arr))
return {'max': max_val, 'min': min_val}
example = find_min_max([40, 9, 3, 5, 10, 1, 7, 12])
print("Max: %d Min: %d" % (example['max'], example['min']))
|
projectcalico/calico-nova | refs/heads/calico-readme | nova/tests/unit/api/openstack/compute/contrib/test_tenant_networks.py | 7 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import webob
from nova.api.openstack.compute.contrib import os_tenant_networks as networks
from nova.api.openstack.compute.plugins.v3 import tenant_networks \
as networks_v21
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
class TenantNetworksTestV21(test.NoDBTestCase):
ctrlr = networks_v21.TenantNetworkController
def setUp(self):
super(TenantNetworksTestV21, self).setUp()
self.controller = self.ctrlr()
self.flags(enable_network_quota=True)
@mock.patch('nova.network.api.API.delete',
side_effect=exception.NetworkInUse(network_id=1))
def test_network_delete_in_use(self, mock_delete):
req = fakes.HTTPRequest.blank('/v2/1234/os-tenant-networks/1')
self.assertRaises(webob.exc.HTTPConflict,
self.controller.delete, req, 1)
@mock.patch('nova.quota.QUOTAS.reserve')
@mock.patch('nova.quota.QUOTAS.rollback')
@mock.patch('nova.network.api.API.delete')
def _test_network_delete_exception(self, ex, expex, delete_mock,
rollback_mock, reserve_mock):
req = fakes.HTTPRequest.blank('/v2/1234/os-tenant-networks')
ctxt = req.environ['nova.context']
reserve_mock.return_value = 'rv'
delete_mock.side_effect = ex
self.assertRaises(expex, self.controller.delete, req, 1)
delete_mock.assert_called_once_with(ctxt, 1)
rollback_mock.assert_called_once_with(ctxt, 'rv')
reserve_mock.assert_called_once_with(ctxt, networks=-1)
def test_network_delete_exception_network_not_found(self):
ex = exception.NetworkNotFound(network_id=1)
expex = webob.exc.HTTPNotFound
self._test_network_delete_exception(ex, expex)
def test_network_delete_exception_policy_failed(self):
ex = exception.PolicyNotAuthorized(action='dummy')
expex = webob.exc.HTTPForbidden
self._test_network_delete_exception(ex, expex)
def test_network_delete_exception_network_in_use(self):
ex = exception.NetworkInUse(network_id=1)
expex = webob.exc.HTTPConflict
self._test_network_delete_exception(ex, expex)
class TenantNetworksTestV2(TenantNetworksTestV21):
ctrlr = networks.NetworkController
|
detiber/ansible-modules-core | refs/heads/devel | files/fetch.py | 94 | # this is a virtual module that is entirely implemented server side
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: fetch
short_description: Fetches a file from remote nodes
description:
- This module works like M(copy), but in reverse. It is used for fetching
files from remote machines and storing them locally in a file tree,
organized by hostname. Note that this module is written to transfer
log files that might not be present, so a missing remote file won't
be an error unless fail_on_missing is set to 'yes'.
version_added: "0.2"
options:
src:
description:
- The file on the remote system to fetch. This I(must) be a file, not a
directory. Recursive fetching may be supported in a later release.
required: true
default: null
aliases: []
dest:
description:
- A directory to save the file into. For example, if the I(dest)
directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into
C(/backup/host.example.com/etc/profile)
required: true
default: null
fail_on_missing:
version_added: "1.1"
description:
- Makes it fails when the source file is missing.
required: false
choices: [ "yes", "no" ]
default: "no"
validate_checksum:
version_added: "1.4"
description:
- Verify that the source and destination checksums match after the files are fetched.
required: false
choices: [ "yes", "no" ]
default: "yes"
aliases: [ "validate_md5" ]
flat:
version_added: "1.2"
description:
- Allows you to override the default behavior of appending
hostname/path/to/file to the destination. If dest ends with '/', it
will use the basename of the source file, similar to the copy module.
Obviously this is only handy if the filenames are unique.
requirements: []
author:
- "Ansible Core Team"
- "Michael DeHaan"
'''
EXAMPLES = '''
# Store file into /tmp/fetched/host.example.com/tmp/somefile
- fetch: src=/tmp/somefile dest=/tmp/fetched
# Specifying a path directly
- fetch: src=/tmp/somefile dest=/tmp/prefix-{{ ansible_hostname }} flat=yes
# Specifying a destination path
- fetch: src=/tmp/uniquefile dest=/tmp/special/ flat=yes
# Storing in a path relative to the playbook
- fetch: src=/tmp/uniquefile dest=special/prefix-{{ ansible_hostname }} flat=yes
'''
|
cernops/rally | refs/heads/master | rally/cli/commands/plugin.py | 10 | # Copyright 2015: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import textwrap
from rally.cli import cliutils
from rally.common.plugin import plugin
from rally.common import utils
from rally import plugins
class PluginCommands(object):
"""Command allows to manage Rally plugins."""
@staticmethod
def _print_plugins_list(plugin_list):
rows = [utils.Struct(name=f.get_name(),
namespace=f.get_namespace(),
title=f.get_info()["title"])
for f in plugin_list]
cliutils.print_list(rows, fields=["name", "namespace", "title"])
@cliutils.args("--name", dest="name", type=str,
help="Plugin name.")
@cliutils.args("--namespace", dest="namespace", type=str,
help="Plugin namespace.")
@plugins.ensure_plugins_are_loaded
def show(self, name, namespace=None):
"""Show detailed information about Rally plugin."""
name_lw = name.lower()
all_plugins = plugin.Plugin.get_all(namespace=namespace)
found = [p for p in all_plugins if name_lw in p.get_name().lower()]
exact_match = [p for p in found if name_lw == p.get_name().lower()]
if not found:
if namespace:
print(
"There is no plugin: %(name)s in %(namespace)s namespace"
% {"name": name, "namespace": namespace}
)
else:
print("There is no plugin: %s" % name)
elif len(found) == 1 or exact_match:
plugin_ = found[0] if len(found) == 1 else exact_match[0]
plugin_info = plugin_.get_info()
print(cliutils.make_header(plugin_info["title"]))
print("NAME\n\t%s" % plugin_info["name"])
print("NAMESPACE\n\t%s" % plugin_info["namespace"])
print("MODULE\n\t%s" % plugin_info["module"])
if plugin_info["description"]:
print("DESCRIPTION\n\t", end="")
print(textwrap.fill(plugin_info["description"],
subsequent_indent="\t"))
if plugin_info["parameters"]:
print("PARAMETERS")
rows = [utils.Struct(name=p["name"],
description="g%s\n" % p["doc"])
for p in plugin_info["parameters"]]
cliutils.print_list(rows, fields=["name", "description"])
else:
print("Multiple plugins found:")
self._print_plugins_list(found)
@cliutils.args("--name", dest="name", type=str,
help="List only plugins that match passed name.")
@cliutils.args("--namespace", dest="namespace", type=str,
help="List only plugins that are in specified namespace")
@plugins.ensure_plugins_are_loaded
def list(self, name=None, namespace=None):
"""List all Rally plugins that match name and namespace."""
all_plugins = plugin.Plugin.get_all(namespace=namespace)
matched = all_plugins
if name:
name_lw = name.lower()
matched = [p for p in all_plugins
if name_lw in p.get_name().lower()]
if not all_plugins:
print("There is no plugin namespace: %s" % namespace)
elif not matched:
print("There is no plugin: %s" % name)
else:
self._print_plugins_list(matched)
|
inovtec-solutions/OpenERP | refs/heads/branch_openerp | openerp/addons/account/account.py | 1 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from datetime import datetime
from dateutil.relativedelta import relativedelta
from operator import itemgetter
import time
import openerp
from openerp import SUPERUSER_ID
from openerp import pooler, tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round
import openerp.addons.decimal_precision as dp
_logger = logging.getLogger(__name__)
def check_cycle(self, cr, uid, ids, context=None):
""" climbs the ``self._table.parent_id`` chains for 100 levels or
until it can't find any more parent(s)
Returns true if it runs out of parents (no cycle), false if
it can recurse 100 times without ending all chains
"""
level = 100
while len(ids):
cr.execute('SELECT DISTINCT parent_id '\
'FROM '+self._table+' '\
'WHERE id IN %s '\
'AND parent_id IS NOT NULL',(tuple(ids),))
ids = map(itemgetter(0), cr.fetchall())
if not level:
return False
level -= 1
return True
class account_payment_term(osv.osv):
_name = "account.payment.term"
_description = "Payment Term"
_columns = {
'name': fields.char('Payment Term', size=64, translate=True, required=True),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the payment term without removing it."),
'note': fields.text('Description', translate=True),
'line_ids': fields.one2many('account.payment.term.line', 'payment_id', 'Terms'),
}
_defaults = {
'active': 1,
}
_order = "name"
def compute(self, cr, uid, id, value, date_ref=False, context=None):
if not date_ref:
date_ref = datetime.now().strftime('%Y-%m-%d')
pt = self.browse(cr, uid, id, context=context)
amount = value
result = []
obj_precision = self.pool.get('decimal.precision')
prec = obj_precision.precision_get(cr, uid, 'Account')
for line in pt.line_ids:
if line.value == 'fixed':
amt = round(line.value_amount, prec)
elif line.value == 'procent':
amt = round(value * line.value_amount, prec)
elif line.value == 'balance':
amt = round(amount, prec)
if amt:
next_date = (datetime.strptime(date_ref, '%Y-%m-%d') + relativedelta(days=line.days))
if line.days2 < 0:
next_first_date = next_date + relativedelta(day=1,months=1) #Getting 1st of next month
next_date = next_first_date + relativedelta(days=line.days2)
if line.days2 > 0:
next_date += relativedelta(day=line.days2, months=1)
result.append( (next_date.strftime('%Y-%m-%d'), amt) )
amount -= amt
amount = reduce(lambda x,y: x+y[1], result, 0.0)
dist = round(value-amount, prec)
if dist:
result.append( (time.strftime('%Y-%m-%d'), dist) )
return result
class account_payment_term_line(osv.osv):
_name = "account.payment.term.line"
_description = "Payment Term Line"
_columns = {
'value': fields.selection([('procent', 'Percent'),
('balance', 'Balance'),
('fixed', 'Fixed Amount')], 'Computation',
required=True, help="""Select here the kind of valuation related to this payment term line. Note that you should have your last line with the type 'Balance' to ensure that the whole amount will be treated."""),
'value_amount': fields.float('Amount To Pay', digits_compute=dp.get_precision('Payment Term'), help="For percent enter a ratio between 0-1."),
'days': fields.integer('Number of Days', required=True, help="Number of days to add before computation of the day of month." \
"If Date=15/01, Number of Days=22, Day of Month=-1, then the due date is 28/02."),
'days2': fields.integer('Day of the Month', required=True, help="Day of the month, set -1 for the last day of the current month. If it's positive, it gives the day of the next month. Set 0 for net days (otherwise it's based on the beginning of the month)."),
'payment_id': fields.many2one('account.payment.term', 'Payment Term', required=True, select=True, ondelete='cascade'),
}
_defaults = {
'value': 'balance',
'days': 30,
'days2': 0,
}
_order = "value desc,days"
def _check_percent(self, cr, uid, ids, context=None):
obj = self.browse(cr, uid, ids[0], context=context)
if obj.value == 'procent' and ( obj.value_amount < 0.0 or obj.value_amount > 1.0):
return False
return True
_constraints = [
(_check_percent, 'Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2%.', ['value_amount']),
]
account_payment_term_line()
class account_account_type(osv.osv):
_name = "account.account.type"
_description = "Account Type"
def _get_financial_report_ref(self, cr, uid, context=None):
obj_data = self.pool.get('ir.model.data')
obj_financial_report = self.pool.get('account.financial.report')
financial_report_ref = {}
for key, financial_report in [
('asset','account_financial_report_assets0'),
('liability','account_financial_report_liability0'),
('income','account_financial_report_income0'),
('expense','account_financial_report_expense0'),
]:
try:
financial_report_ref[key] = obj_financial_report.browse(cr, uid,
obj_data.get_object_reference(cr, uid, 'account', financial_report)[1],
context=context)
except ValueError:
pass
return financial_report_ref
def _get_current_report_type(self, cr, uid, ids, name, arg, context=None):
res = {}
financial_report_ref = self._get_financial_report_ref(cr, uid, context=context)
for record in self.browse(cr, uid, ids, context=context):
res[record.id] = 'none'
for key, financial_report in financial_report_ref.items():
list_ids = [x.id for x in financial_report.account_type_ids]
if record.id in list_ids:
res[record.id] = key
return res
def _save_report_type(self, cr, uid, account_type_id, field_name, field_value, arg, context=None):
field_value = field_value or 'none'
obj_financial_report = self.pool.get('account.financial.report')
#unlink if it exists somewhere in the financial reports related to BS or PL
financial_report_ref = self._get_financial_report_ref(cr, uid, context=context)
for key, financial_report in financial_report_ref.items():
list_ids = [x.id for x in financial_report.account_type_ids]
if account_type_id in list_ids:
obj_financial_report.write(cr, uid, [financial_report.id], {'account_type_ids': [(3, account_type_id)]})
#write it in the good place
if field_value != 'none':
return obj_financial_report.write(cr, uid, [financial_report_ref[field_value].id], {'account_type_ids': [(4, account_type_id)]})
_columns = {
'name': fields.char('Account Type', size=64, required=True, translate=True),
'code': fields.char('Code', size=32, required=True, select=True),
'close_method': fields.selection([('none', 'None'), ('balance', 'Balance'), ('detail', 'Detail'), ('unreconciled', 'Unreconciled')], 'Deferral Method', required=True, help="""Set here the method that will be used to generate the end of year journal entries for all the accounts of this type.
'None' means that nothing will be done.
'Balance' will generally be used for cash accounts.
'Detail' will copy each existing journal item of the previous year, even the reconciled ones.
'Unreconciled' will copy only the journal items that were unreconciled on the first day of the new fiscal year."""),
'report_type': fields.function(_get_current_report_type, fnct_inv=_save_report_type, type='selection', string='P&L / BS Category', store=True,
selection= [('none','/'),
('income', _('Profit & Loss (Income account)')),
('expense', _('Profit & Loss (Expense account)')),
('asset', _('Balance Sheet (Asset account)')),
('liability', _('Balance Sheet (Liability account)'))], help="This field is used to generate legal reports: profit and loss, balance sheet.", required=True),
'note': fields.text('Description'),
}
_defaults = {
'close_method': 'none',
'report_type': 'none',
}
_order = "code"
account_account_type()
def _code_get(self, cr, uid, context=None):
acc_type_obj = self.pool.get('account.account.type')
ids = acc_type_obj.search(cr, uid, [])
res = acc_type_obj.read(cr, uid, ids, ['code', 'name'], context=context)
return [(r['code'], r['name']) for r in res]
#----------------------------------------------------------
# Accounts
#----------------------------------------------------------
class account_tax(osv.osv):
_name = 'account.tax'
account_tax()
class account_account(osv.osv):
_order = "parent_left"
_parent_order = "code"
_name = "account.account"
_description = "Account"
_parent_store = True
def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False):
if context is None:
context = {}
pos = 0
while pos < len(args):
if args[pos][0] == 'code' and args[pos][1] in ('like', 'ilike') and args[pos][2]:
args[pos] = ('code', '=like', tools.ustr(args[pos][2].replace('%', ''))+'%')
if args[pos][0] == 'journal_id':
if not args[pos][2]:
del args[pos]
continue
jour = self.pool.get('account.journal').browse(cr, uid, args[pos][2], context=context)
if (not (jour.account_control_ids or jour.type_control_ids)) or not args[pos][2]:
args[pos] = ('type','not in',('consolidation','view'))
continue
ids3 = map(lambda x: x.id, jour.type_control_ids)
ids1 = super(account_account, self).search(cr, uid, [('user_type', 'in', ids3)])
ids1 += map(lambda x: x.id, jour.account_control_ids)
args[pos] = ('id', 'in', ids1)
pos += 1
if context and context.has_key('consolidate_children'): #add consolidated children of accounts
ids = super(account_account, self).search(cr, uid, args, offset, limit,
order, context=context, count=count)
for consolidate_child in self.browse(cr, uid, context['account_id'], context=context).child_consol_ids:
ids.append(consolidate_child.id)
return ids
return super(account_account, self).search(cr, uid, args, offset, limit,
order, context=context, count=count)
def _get_children_and_consol(self, cr, uid, ids, context=None):
#this function search for all the children and all consolidated children (recursively) of the given account ids
ids2 = self.search(cr, uid, [('parent_id', 'child_of', ids)], context=context)
ids3 = []
for rec in self.browse(cr, uid, ids2, context=context):
for child in rec.child_consol_ids:
ids3.append(child.id)
if ids3:
ids3 = self._get_children_and_consol(cr, uid, ids3, context)
return ids2 + ids3
def __compute(self, cr, uid, ids, field_names, arg=None, context=None,
query='', query_params=()):
""" compute the balance, debit and/or credit for the provided
account ids
Arguments:
`ids`: account ids
`field_names`: the fields to compute (a list of any of
'balance', 'debit' and 'credit')
`arg`: unused fields.function stuff
`query`: additional query filter (as a string)
`query_params`: parameters for the provided query string
(__compute will handle their escaping) as a
tuple
"""
mapping = {
'balance': "COALESCE(SUM(l.debit),0) - COALESCE(SUM(l.credit), 0) as balance",
'debit': "COALESCE(SUM(l.debit), 0) as debit",
'credit': "COALESCE(SUM(l.credit), 0) as credit",
# by convention, foreign_balance is 0 when the account has no secondary currency, because the amounts may be in different currencies
'foreign_balance': "(SELECT CASE WHEN currency_id IS NULL THEN 0 ELSE COALESCE(SUM(l.amount_currency), 0) END FROM account_account WHERE id IN (l.account_id)) as foreign_balance",
}
#get all the necessary accounts
children_and_consolidated = self._get_children_and_consol(cr, uid, ids, context=context)
#compute for each account the balance/debit/credit from the move lines
accounts = {}
res = {}
null_result = dict((fn, 0.0) for fn in field_names)
if children_and_consolidated:
aml_query = self.pool.get('account.move.line')._query_get(cr, uid, context=context)
wheres = [""]
if query.strip():
wheres.append(query.strip())
if aml_query.strip():
wheres.append(aml_query.strip())
filters = " AND ".join(wheres)
# IN might not work ideally in case there are too many
# children_and_consolidated, in that case join on a
# values() e.g.:
# SELECT l.account_id as id FROM account_move_line l
# INNER JOIN (VALUES (id1), (id2), (id3), ...) AS tmp (id)
# ON l.account_id = tmp.id
# or make _get_children_and_consol return a query and join on that
request = ("SELECT l.account_id as id, " +\
', '.join(mapping.values()) +
" FROM account_move_line l" \
" WHERE l.account_id IN %s " \
+ filters +
" GROUP BY l.account_id")
params = (tuple(children_and_consolidated),) + query_params
cr.execute(request, params)
for row in cr.dictfetchall():
accounts[row['id']] = row
# consolidate accounts with direct children
children_and_consolidated.reverse()
brs = list(self.browse(cr, uid, children_and_consolidated, context=context))
sums = {}
currency_obj = self.pool.get('res.currency')
while brs:
current = brs.pop(0)
# can_compute = True
# for child in current.child_id:
# if child.id not in sums:
# can_compute = False
# try:
# brs.insert(0, brs.pop(brs.index(child)))
# except ValueError:
# brs.insert(0, child)
# if can_compute:
for fn in field_names:
sums.setdefault(current.id, {})[fn] = accounts.get(current.id, {}).get(fn, 0.0)
for child in current.child_id:
if child.company_id.currency_id.id == current.company_id.currency_id.id:
sums[current.id][fn] += sums[child.id][fn]
else:
sums[current.id][fn] += currency_obj.compute(cr, uid, child.company_id.currency_id.id, current.company_id.currency_id.id, sums[child.id][fn], context=context)
# as we have to relay on values computed before this is calculated separately than previous fields
if current.currency_id and current.exchange_rate and \
('adjusted_balance' in field_names or 'unrealized_gain_loss' in field_names):
# Computing Adjusted Balance and Unrealized Gains and losses
# Adjusted Balance = Foreign Balance / Exchange Rate
# Unrealized Gains and losses = Adjusted Balance - Balance
adj_bal = sums[current.id].get('foreign_balance', 0.0) / current.exchange_rate
sums[current.id].update({'adjusted_balance': adj_bal, 'unrealized_gain_loss': adj_bal - sums[current.id].get('balance', 0.0)})
for id in ids:
res[id] = sums.get(id, null_result)
else:
for id in ids:
res[id] = null_result
return res
def _get_company_currency(self, cr, uid, ids, field_name, arg, context=None):
result = {}
for rec in self.browse(cr, uid, ids, context=context):
result[rec.id] = (rec.company_id.currency_id.id,rec.company_id.currency_id.symbol)
return result
def _get_child_ids(self, cr, uid, ids, field_name, arg, context=None):
result = {}
for record in self.browse(cr, uid, ids, context=context):
if record.child_parent_ids:
result[record.id] = [x.id for x in record.child_parent_ids]
else:
result[record.id] = []
if record.child_consol_ids:
for acc in record.child_consol_ids:
if acc.id not in result[record.id]:
result[record.id].append(acc.id)
return result
def _get_level(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
#we may not know the level of the parent at the time of computation, so we
# can't simply do res[account.id] = account.parent_id.level + 1
level = 0
parent = account.parent_id
while parent:
level += 1
parent = parent.parent_id
res[account.id] = level
return res
def _set_credit_debit(self, cr, uid, account_id, name, value, arg, context=None):
if context.get('config_invisible', True):
return True
account = self.browse(cr, uid, account_id, context=context)
diff = value - getattr(account,name)
if not diff:
return True
journal_obj = self.pool.get('account.journal')
jids = journal_obj.search(cr, uid, [('type','=','situation'),('centralisation','=',1),('company_id','=',account.company_id.id)], context=context)
if not jids:
raise osv.except_osv(_('Error!'),_("You need an Opening journal with centralisation checked to set the initial balance."))
period_obj = self.pool.get('account.period')
pids = period_obj.search(cr, uid, [('special','=',True),('company_id','=',account.company_id.id)], context=context)
if not pids:
raise osv.except_osv(_('Error!'),_("There is no opening/closing period defined, please create one to set the initial balance."))
move_obj = self.pool.get('account.move.line')
move_id = move_obj.search(cr, uid, [
('journal_id','=',jids[0]),
('period_id','=',pids[0]),
('account_id','=', account_id),
(name,'>', 0.0),
('name','=', _('Opening Balance'))
], context=context)
if move_id:
move = move_obj.browse(cr, uid, move_id[0], context=context)
move_obj.write(cr, uid, move_id[0], {
name: diff+getattr(move,name)
}, context=context)
else:
if diff<0.0:
raise osv.except_osv(_('Error!'),_("Unable to adapt the initial balance (negative value)."))
nameinv = (name=='credit' and 'debit') or 'credit'
move_id = move_obj.create(cr, uid, {
'name': _('Opening Balance'),
'account_id': account_id,
'journal_id': jids[0],
'period_id': pids[0],
name: diff,
nameinv: 0.0
}, context=context)
return True
_columns = {
'name': fields.char('Name', size=256, required=True, select=True),
'currency_id': fields.many2one('res.currency', 'Secondary Currency', help="Forces all moves for this account to have this secondary currency."),
'code': fields.char('Code', size=64, required=True, select=1),
'type': fields.selection([
('view', 'View'),
('other', 'Regular'),
('receivable', 'Receivable'),
('payable', 'Payable'),
('liquidity','Liquidity'),
('consolidation', 'Consolidation'),
('closed', 'Closed'),
], 'Internal Type', required=True, help="The 'Internal Type' is used for features available on "\
"different types of accounts: view can not have journal items, consolidation are accounts that "\
"can have children accounts for multi-company consolidations, payable/receivable are for "\
"partners accounts (for debit/credit computations), closed for depreciated accounts."),
'user_type': fields.many2one('account.account.type', 'Account Type', required=True,
help="Account Type is used for information purpose, to generate "
"country-specific legal reports, and set the rules to close a fiscal year and generate opening entries."),
'financial_report_ids': fields.many2many('account.financial.report', 'account_account_financial_report', 'account_id', 'report_line_id', 'Financial Reports'),
'parent_id': fields.many2one('account.account', 'Parent', ondelete='cascade', domain=[('type','=','view')]),
'child_parent_ids': fields.one2many('account.account','parent_id','Children'),
'child_consol_ids': fields.many2many('account.account', 'account_account_consol_rel', 'child_id', 'parent_id', 'Consolidated Children'),
'child_id': fields.function(_get_child_ids, type='many2many', relation="account.account", string="Child Accounts"),
'balance': fields.function(__compute, digits_compute=dp.get_precision('Account'), string='Balance', multi='balance'),
'credit': fields.function(__compute, fnct_inv=_set_credit_debit, digits_compute=dp.get_precision('Account'), string='Credit', multi='balance'),
'debit': fields.function(__compute, fnct_inv=_set_credit_debit, digits_compute=dp.get_precision('Account'), string='Debit', multi='balance'),
'foreign_balance': fields.function(__compute, digits_compute=dp.get_precision('Account'), string='Foreign Balance', multi='balance',
help="Total amount (in Secondary currency) for transactions held in secondary currency for this account."),
'adjusted_balance': fields.function(__compute, digits_compute=dp.get_precision('Account'), string='Adjusted Balance', multi='balance',
help="Total amount (in Company currency) for transactions held in secondary currency for this account."),
'unrealized_gain_loss': fields.function(__compute, digits_compute=dp.get_precision('Account'), string='Unrealized Gain or Loss', multi='balance',
help="Value of Loss or Gain due to changes in exchange rate when doing multi-currency transactions."),
'reconcile': fields.boolean('Allow Reconciliation', help="Check this box if this account allows reconciliation of journal items."),
'exchange_rate': fields.related('currency_id', 'rate', type='float', string='Exchange Rate', digits=(12,6)),
'shortcut': fields.char('Shortcut', size=12),
'tax_ids': fields.many2many('account.tax', 'account_account_tax_default_rel',
'account_id', 'tax_id', 'Default Taxes'),
'note': fields.text('Internal Notes'),
'company_currency_id': fields.function(_get_company_currency, type='many2one', relation='res.currency', string='Company Currency'),
'company_id': fields.many2one('res.company', 'Company', required=True),
'active': fields.boolean('Active', select=2, help="If the active field is set to False, it will allow you to hide the account without removing it."),
'parent_left': fields.integer('Parent Left', select=1),
'parent_right': fields.integer('Parent Right', select=1),
'currency_mode': fields.selection([('current', 'At Date'), ('average', 'Average Rate')], 'Outgoing Currencies Rate',
help=
'This will select how the current currency rate for outgoing transactions is computed. '\
'In most countries the legal method is "average" but only a few software systems are able to '\
'manage this. So if you import from another software system you may have to use the rate at date. ' \
'Incoming transactions always use the rate at date.', \
required=True),
'level': fields.function(_get_level, string='Level', method=True, type='integer',
store={
'account.account': (_get_children_and_consol, ['level', 'parent_id'], 10),
}),
}
_defaults = {
'type': 'other',
'reconcile': False,
'active': True,
'currency_mode': 'current',
'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'account.account', context=c),
}
def _check_recursion(self, cr, uid, ids, context=None):
obj_self = self.browse(cr, uid, ids[0], context=context)
p_id = obj_self.parent_id and obj_self.parent_id.id
if (obj_self in obj_self.child_consol_ids) or (p_id and (p_id is obj_self.id)):
return False
while(ids):
cr.execute('SELECT DISTINCT child_id '\
'FROM account_account_consol_rel '\
'WHERE parent_id IN %s', (tuple(ids),))
child_ids = map(itemgetter(0), cr.fetchall())
c_ids = child_ids
if (p_id and (p_id in c_ids)) or (obj_self.id in c_ids):
return False
while len(c_ids):
s_ids = self.search(cr, uid, [('parent_id', 'in', c_ids)])
if p_id and (p_id in s_ids):
return False
c_ids = s_ids
ids = child_ids
return True
def _check_type(self, cr, uid, ids, context=None):
if context is None:
context = {}
accounts = self.browse(cr, uid, ids, context=context)
for account in accounts:
if account.child_id and account.type not in ('view', 'consolidation'):
return False
return True
def _check_account_type(self, cr, uid, ids, context=None):
for account in self.browse(cr, uid, ids, context=context):
if account.type in ('receivable', 'payable') and account.user_type.close_method != 'unreconciled':
return False
return True
def _check_company_account(self, cr, uid, ids, context=None):
for account in self.browse(cr, uid, ids, context=context):
if account.parent_id:
if account.company_id != account.parent_id.company_id:
return False
return True
_constraints = [
(_check_recursion, 'Error!\nYou cannot create recursive accounts.', ['parent_id']),
(_check_type, 'Configuration Error!\nYou cannot define children to an account with internal type different of "View".', ['type']),
(_check_account_type, 'Configuration Error!\nYou cannot select an account type with a deferral method different of "Unreconciled" for accounts with internal type "Payable/Receivable".', ['user_type','type']),
(_check_company_account, 'Error!\nYou cannot create an account which has parent account of different company.', ['parent_id']),
]
_sql_constraints = [
('code_company_uniq', 'unique (code,company_id)', 'The code of the account must be unique per company !')
]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
args = args[:]
ids = []
try:
if name and str(name).startswith('partner:'):
part_id = int(name.split(':')[1])
part = self.pool.get('res.partner').browse(cr, user, part_id, context=context)
args += [('id', 'in', (part.property_account_payable.id, part.property_account_receivable.id))]
name = False
if name and str(name).startswith('type:'):
type = name.split(':')[1]
args += [('type', '=', type)]
name = False
except:
pass
if name:
ids = self.search(cr, user, [('code', '=like', name+"%")]+args, limit=limit)
if not ids:
ids = self.search(cr, user, [('shortcut', '=', name)]+ args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name', operator, name)]+ args, limit=limit)
if not ids and len(name.split()) >= 2:
#Separating code and name of account for searching
operand1,operand2 = name.split(' ',1) #name can contain spaces e.g. OpenERP S.A.
ids = self.search(cr, user, [('code', operator, operand1), ('name', operator, operand2)]+ args, limit=limit)
else:
ids = self.search(cr, user, args, context=context, limit=limit)
return self.name_get(cr, user, ids, context=context)
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name', 'code'], context=context)
res = []
for record in reads:
name = record['name']
if record['code']:
name = record['code'] + ' ' + name
res.append((record['id'], name))
return res
def copy(self, cr, uid, id, default=None, context=None, done_list=None, local=False):
default = {} if default is None else default.copy()
if done_list is None:
done_list = []
account = self.browse(cr, uid, id, context=context)
new_child_ids = []
default.update(code=_("%s (copy)") % (account['code'] or ''))
if not local:
done_list = []
if account.id in done_list:
return False
done_list.append(account.id)
if account:
for child in account.child_id:
child_ids = self.copy(cr, uid, child.id, default, context=context, done_list=done_list, local=True)
if child_ids:
new_child_ids.append(child_ids)
default['child_parent_ids'] = [(6, 0, new_child_ids)]
else:
default['child_parent_ids'] = False
return super(account_account, self).copy(cr, uid, id, default, context=context)
def _check_moves(self, cr, uid, ids, method, context=None):
line_obj = self.pool.get('account.move.line')
account_ids = self.search(cr, uid, [('id', 'child_of', ids)])
if line_obj.search(cr, uid, [('account_id', 'in', account_ids)]):
if method == 'write':
raise osv.except_osv(_('Error!'), _('You cannot deactivate an account that contains journal items.'))
elif method == 'unlink':
raise osv.except_osv(_('Error!'), _('You cannot remove an account that contains journal items.'))
#Checking whether the account is set as a property to any Partner or not
value = 'account.account,' + str(ids[0])
partner_prop_acc = self.pool.get('ir.property').search(cr, uid, [('value_reference','=',value)], context=context)
if partner_prop_acc:
raise osv.except_osv(_('Warning!'), _('You cannot remove/deactivate an account which is set on a customer or supplier.'))
return True
def _check_allow_type_change(self, cr, uid, ids, new_type, context=None):
restricted_groups = ['consolidation','view']
line_obj = self.pool.get('account.move.line')
for account in self.browse(cr, uid, ids, context=context):
old_type = account.type
account_ids = self.search(cr, uid, [('id', 'child_of', [account.id])])
if line_obj.search(cr, uid, [('account_id', 'in', account_ids)]):
#Check for 'Closed' type
if old_type == 'closed' and new_type !='closed':
raise osv.except_osv(_('Warning!'), _("You cannot change the type of account from 'Closed' to any other type as it contains journal items!"))
# Forbid to change an account type for restricted_groups as it contains journal items (or if one of its children does)
if (new_type in restricted_groups):
raise osv.except_osv(_('Warning!'), _("You cannot change the type of account to '%s' type as it contains journal items!") % (new_type,))
return True
# For legal reason (forbiden to modify journal entries which belongs to a closed fy or period), Forbid to modify
# the code of an account if journal entries have been already posted on this account. This cannot be simply
# 'configurable' since it can lead to a lack of confidence in OpenERP and this is what we want to change.
def _check_allow_code_change(self, cr, uid, ids, context=None):
line_obj = self.pool.get('account.move.line')
for account in self.browse(cr, uid, ids, context=context):
account_ids = self.search(cr, uid, [('id', 'child_of', [account.id])], context=context)
if line_obj.search(cr, uid, [('account_id', 'in', account_ids)], context=context):
raise osv.except_osv(_('Warning !'), _("You cannot change the code of account which contains journal items!"))
return True
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
if not ids:
return True
if isinstance(ids, (int, long)):
ids = [ids]
# Dont allow changing the company_id when account_move_line already exist
if 'company_id' in vals:
move_lines = self.pool.get('account.move.line').search(cr, uid, [('account_id', 'in', ids)])
if move_lines:
# Allow the write if the value is the same
for i in [i['company_id'][0] for i in self.read(cr,uid,ids,['company_id'])]:
if vals['company_id']!=i:
raise osv.except_osv(_('Warning!'), _('You cannot change the owner company of an account that already contains journal items.'))
if 'active' in vals and not vals['active']:
self._check_moves(cr, uid, ids, "write", context=context)
if 'type' in vals.keys():
self._check_allow_type_change(cr, uid, ids, vals['type'], context=context)
if 'code' in vals.keys():
self._check_allow_code_change(cr, uid, ids, context=context)
return super(account_account, self).write(cr, uid, ids, vals, context=context)
def unlink(self, cr, uid, ids, context=None):
self._check_moves(cr, uid, ids, "unlink", context=context)
return super(account_account, self).unlink(cr, uid, ids, context=context)
account_account()
class account_journal(osv.osv):
_name = "account.journal"
_description = "Journal"
_columns = {
'with_last_closing_balance' : fields.boolean('Opening With Last Closing Balance'),
'name': fields.char('Journal Name', size=64, required=True),
'code': fields.char('Code', size=5, required=True, help="The code will be displayed on reports."),
'type': fields.selection([('sale', 'Sale'),('sale_refund','Sale Refund'), ('purchase', 'Purchase'), ('purchase_refund','Purchase Refund'), ('cash', 'Cash'), ('bank', 'Bank and Checks'), ('general', 'General'), ('situation', 'Opening/Closing Situation')], 'Type', size=32, required=True,
help="Select 'Sale' for customer invoices journals."\
" Select 'Purchase' for supplier invoices journals."\
" Select 'Cash' or 'Bank' for journals that are used in customer or supplier payments."\
" Select 'General' for miscellaneous operations journals."\
" Select 'Opening/Closing Situation' for entries generated for new fiscal years."),
'type_control_ids': fields.many2many('account.account.type', 'account_journal_type_rel', 'journal_id','type_id', 'Type Controls', domain=[('code','<>','view'), ('code', '<>', 'closed')]),
'account_control_ids': fields.many2many('account.account', 'account_account_type_rel', 'journal_id','account_id', 'Account', domain=[('type','<>','view'), ('type', '<>', 'closed')]),
'default_credit_account_id': fields.many2one('account.account', 'Default Credit Account', domain="[('type','!=','view')]", help="It acts as a default account for credit amount"),
'default_debit_account_id': fields.many2one('account.account', 'Default Debit Account', domain="[('type','!=','view')]", help="It acts as a default account for debit amount"),
'centralisation': fields.boolean('Centralized Counterpart', help="Check this box to determine that each entry of this journal won't create a new counterpart but will share the same counterpart. This is used in fiscal year closing."),
'update_posted': fields.boolean('Allow Cancelling Entries', help="Check this box if you want to allow the cancellation the entries related to this journal or of the invoice related to this journal"),
'group_invoice_lines': fields.boolean('Group Invoice Lines', help="If this box is checked, the system will try to group the accounting lines when generating them from invoices."),
'sequence_id': fields.many2one('ir.sequence', 'Entry Sequence', help="This field contains the information related to the numbering of the journal entries of this journal.", required=True),
'user_id': fields.many2one('res.users', 'User', help="The user responsible for this journal"),
'groups_id': fields.many2many('res.groups', 'account_journal_group_rel', 'journal_id', 'group_id', 'Groups'),
'currency': fields.many2one('res.currency', 'Currency', help='The currency used to enter statement'),
'entry_posted': fields.boolean('Skip \'Draft\' State for Manual Entries', help='Check this box if you don\'t want new journal entries to pass through the \'draft\' state and instead goes directly to the \'posted state\' without any manual validation. \nNote that journal entries that are automatically created by the system are always skipping that state.'),
'company_id': fields.many2one('res.company', 'Company', required=True, select=1, help="Company related to this journal"),
'allow_date':fields.boolean('Check Date in Period', help= 'If set to True then do not accept the entry if the entry date is not into the period dates'),
'profit_account_id' : fields.many2one('account.account', 'Profit Account'),
'loss_account_id' : fields.many2one('account.account', 'Loss Account'),
'internal_account_id' : fields.many2one('account.account', 'Internal Transfers Account', select=1),
'cash_control' : fields.boolean('Cash Control', help='If you want the journal should be control at opening/closing, check this option'),
}
_defaults = {
'cash_control' : False,
'with_last_closing_balance' : False,
'user_id': lambda self, cr, uid, context: uid,
'company_id': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
_sql_constraints = [
('code_company_uniq', 'unique (code, company_id)', 'The code of the journal must be unique per company !'),
('name_company_uniq', 'unique (name, company_id)', 'The name of the journal must be unique per company !'),
]
_order = 'code'
def _check_currency(self, cr, uid, ids, context=None):
for journal in self.browse(cr, uid, ids, context=context):
if journal.currency:
if journal.default_credit_account_id and not journal.default_credit_account_id.currency_id.id == journal.currency.id:
return False
if journal.default_debit_account_id and not journal.default_debit_account_id.currency_id.id == journal.currency.id:
return False
return True
_constraints = [
(_check_currency, 'Configuration error!\nThe currency chosen should be shared by the default accounts too.', ['currency','default_debit_account_id','default_credit_account_id']),
]
def copy(self, cr, uid, id, default=None, context=None, done_list=None, local=False):
default = {} if default is None else default.copy()
if done_list is None:
done_list = []
journal = self.browse(cr, uid, id, context=context)
default.update(
code=_("%s (copy)") % (journal['code'] or ''),
name=_("%s (copy)") % (journal['name'] or ''),
sequence_id=False)
return super(account_journal, self).copy(cr, uid, id, default, context=context)
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
for journal in self.browse(cr, uid, ids, context=context):
if 'company_id' in vals and journal.company_id.id != vals['company_id']:
move_lines = self.pool.get('account.move.line').search(cr, uid, [('journal_id', 'in', ids)])
if move_lines:
raise osv.except_osv(_('Warning!'), _('This journal already contains items, therefore you cannot modify its company field.'))
return super(account_journal, self).write(cr, uid, ids, vals, context=context)
def create_sequence(self, cr, uid, vals, context=None):
""" Create new no_gap entry sequence for every new Joural
"""
# in account.journal code is actually the prefix of the sequence
# whereas ir.sequence code is a key to lookup global sequences.
prefix = vals['code'].upper()
seq = {
'name': vals['name'],
'implementation':'no_gap',
'prefix': prefix + "/%(year)s/",
'padding': 4,
'number_increment': 1
}
if 'company_id' in vals:
seq['company_id'] = vals['company_id']
return self.pool.get('ir.sequence').create(cr, uid, seq)
def create(self, cr, uid, vals, context=None):
if not 'sequence_id' in vals or not vals['sequence_id']:
# if we have the right to create a journal, we should be able to
# create it's sequence.
vals.update({'sequence_id': self.create_sequence(cr, SUPERUSER_ID, vals, context)})
return super(account_journal, self).create(cr, uid, vals, context)
def name_get(self, cr, user, ids, context=None):
"""
Returns a list of tupples containing id, name.
result format: {[(id, name), (id, name), ...]}
@param cr: A database cursor
@param user: ID of the user currently logged in
@param ids: list of ids for which name should be read
@param context: context arguments, like lang, time zone
@return: Returns a list of tupples containing id, name
"""
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
result = self.browse(cr, user, ids, context=context)
res = []
for rs in result:
if rs.currency:
currency = rs.currency
else:
currency = rs.company_id.currency_id
name = "%s (%s)" % (rs.name, currency.name)
res += [(rs.id, name)]
return res
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
if context is None:
context = {}
ids = []
if context.get('journal_type', False):
args += [('type','=',context.get('journal_type'))]
if name:
ids = self.search(cr, user, [('code', 'ilike', name)]+ args, limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('name', 'ilike', name)]+ args, limit=limit, context=context)#fix it ilike should be replace with operator
return self.name_get(cr, user, ids, context=context)
account_journal()
class account_fiscalyear(osv.osv):
_name = "account.fiscalyear"
_description = "Fiscal Year"
_columns = {
'name': fields.char('Fiscal Year', size=64, required=True),
'code': fields.char('Code', size=6, required=True),
'company_id': fields.many2one('res.company', 'Company', required=True),
'date_start': fields.date('Start Date', required=True),
'date_stop': fields.date('End Date', required=True),
'period_ids': fields.one2many('account.period', 'fiscalyear_id', 'Periods'),
'state': fields.selection([('draft','Open'), ('done','Closed')], 'Status', readonly=True),
}
_defaults = {
'state': 'draft',
'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
_order = "date_start, id"
def _check_duration(self, cr, uid, ids, context=None):
obj_fy = self.browse(cr, uid, ids[0], context=context)
if obj_fy.date_stop < obj_fy.date_start:
return False
return True
_constraints = [
(_check_duration, 'Error!\nThe start date of a fiscal year must precede its end date.', ['date_start','date_stop'])
]
def create_period3(self, cr, uid, ids, context=None):
return self.create_period(cr, uid, ids, context, 3)
def create_period(self, cr, uid, ids, context=None, interval=1):
period_obj = self.pool.get('account.period')
for fy in self.browse(cr, uid, ids, context=context):
ds = datetime.strptime(fy.date_start, '%Y-%m-%d')
period_obj.create(cr, uid, {
'name': "%s %s" % (_('Opening Period'), ds.strftime('%Y')),
'code': ds.strftime('00/%Y'),
'date_start': ds,
'date_stop': ds,
'special': True,
'fiscalyear_id': fy.id,
})
while ds.strftime('%Y-%m-%d') < fy.date_stop:
de = ds + relativedelta(months=interval, days=-1)
if de.strftime('%Y-%m-%d') > fy.date_stop:
de = datetime.strptime(fy.date_stop, '%Y-%m-%d')
period_obj.create(cr, uid, {
'name': ds.strftime('%m/%Y'),
'code': ds.strftime('%m/%Y'),
'date_start': ds.strftime('%Y-%m-%d'),
'date_stop': de.strftime('%Y-%m-%d'),
'fiscalyear_id': fy.id,
})
ds = ds + relativedelta(months=interval)
return True
def find(self, cr, uid, dt=None, exception=True, context=None):
res = self.finds(cr, uid, dt, exception, context=context)
return res and res[0] or False
def finds(self, cr, uid, dt=None, exception=True, context=None):
if context is None: context = {}
if not dt:
dt = fields.date.context_today(self,cr,uid,context=context)
args = [('date_start', '<=' ,dt), ('date_stop', '>=', dt)]
if context.get('company_id', False):
company_id = context['company_id']
else:
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
args.append(('company_id', '=', company_id))
ids = self.search(cr, uid, args, context=context)
if not ids:
if exception:
raise osv.except_osv(_('Error!'), _('There is no fiscal year defined for this date.\nPlease create one from the configuration of the accounting menu.'))
else:
return []
return ids
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
if args is None:
args = []
if context is None:
context = {}
ids = []
if name:
ids = self.search(cr, user, [('code', 'ilike', name)]+ args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name', operator, name)]+ args, limit=limit)
return self.name_get(cr, user, ids, context=context)
account_fiscalyear()
class account_period(osv.osv):
_name = "account.period"
_description = "Account period"
_columns = {
'name': fields.char('Period Name', size=64, required=True),
'code': fields.char('Code', size=12),
'special': fields.boolean('Opening/Closing Period', size=12,
help="These periods can overlap."),
'date_start': fields.date('Start of Period', required=True, states={'done':[('readonly',True)]}),
'date_stop': fields.date('End of Period', required=True, states={'done':[('readonly',True)]}),
'fiscalyear_id': fields.many2one('account.fiscalyear', 'Fiscal Year', required=True, states={'done':[('readonly',True)]}, select=True),
'state': fields.selection([('draft','Open'), ('done','Closed')], 'Status', readonly=True,
help='When monthly periods are created. The status is \'Draft\'. At the end of monthly period it is in \'Done\' status.'),
'company_id': fields.related('fiscalyear_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True)
}
_defaults = {
'state': 'draft',
}
_order = "date_start, special desc"
_sql_constraints = [
('name_company_uniq', 'unique(name, company_id)', 'The name of the period must be unique per company!'),
]
def _check_duration(self,cr,uid,ids,context=None):
obj_period = self.browse(cr, uid, ids[0], context=context)
if obj_period.date_stop < obj_period.date_start:
return False
return True
def _check_year_limit(self,cr,uid,ids,context=None):
for obj_period in self.browse(cr, uid, ids, context=context):
if obj_period.special:
continue
if obj_period.fiscalyear_id.date_stop < obj_period.date_stop or \
obj_period.fiscalyear_id.date_stop < obj_period.date_start or \
obj_period.fiscalyear_id.date_start > obj_period.date_start or \
obj_period.fiscalyear_id.date_start > obj_period.date_stop:
return False
pids = self.search(cr, uid, [('date_stop','>=',obj_period.date_start),('date_start','<=',obj_period.date_stop),('special','=',False),('id','<>',obj_period.id)])
for period in self.browse(cr, uid, pids):
if period.fiscalyear_id.company_id.id==obj_period.fiscalyear_id.company_id.id:
return False
return True
_constraints = [
(_check_duration, 'Error!\nThe duration of the Period(s) is/are invalid.', ['date_stop']),
(_check_year_limit, 'Error!\nThe period is invalid. Either some periods are overlapping or the period\'s dates are not matching the scope of the fiscal year.', ['date_stop'])
]
def next(self, cr, uid, period, step, context=None):
ids = self.search(cr, uid, [('date_start','>',period.date_start)])
if len(ids)>=step:
return ids[step-1]
return False
def find(self, cr, uid, dt=None, context=None):
if context is None: context = {}
if not dt:
dt = fields.date.context_today(self, cr, uid, context=context)
args = [('date_start', '<=' ,dt), ('date_stop', '>=', dt)]
if context.get('company_id', False):
args.append(('company_id', '=', context['company_id']))
else:
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
args.append(('company_id', '=', company_id))
result = []
#WARNING: in next version the default value for account_periof_prefer_normal will be True
if context.get('account_period_prefer_normal'):
# look for non-special periods first, and fallback to all if no result is found
result = self.search(cr, uid, args + [('special', '=', False)], context=context)
if not result:
result = self.search(cr, uid, args, context=context)
if not result:
raise osv.except_osv(_('Error!'), _('There is no period defined for this date: %s.\nPlease create one.')%dt)
return result
def action_draft(self, cr, uid, ids, *args):
mode = 'draft'
for period in self.browse(cr, uid, ids):
if period.fiscalyear_id.state == 'done':
raise osv.except_osv(_('Warning!'), _('You can not re-open a period which belongs to closed fiscal year'))
cr.execute('update account_journal_period set state=%s where period_id in %s', (mode, tuple(ids),))
cr.execute('update account_period set state=%s where id in %s', (mode, tuple(ids),))
return True
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if args is None:
args = []
if context is None:
context = {}
ids = []
if name:
ids = self.search(cr, user,
[('code', 'ilike', name)] + args,
limit=limit,
context=context)
if not ids:
ids = self.search(cr, user,
[('name', operator, name)] + args,
limit=limit,
context=context)
return self.name_get(cr, user, ids, context=context)
def write(self, cr, uid, ids, vals, context=None):
if 'company_id' in vals:
move_lines = self.pool.get('account.move.line').search(cr, uid, [('period_id', 'in', ids)])
if move_lines:
raise osv.except_osv(_('Warning!'), _('This journal already contains items for this period, therefore you cannot modify its company field.'))
return super(account_period, self).write(cr, uid, ids, vals, context=context)
def build_ctx_periods(self, cr, uid, period_from_id, period_to_id):
if period_from_id == period_to_id:
return [period_from_id]
period_from = self.browse(cr, uid, period_from_id)
period_date_start = period_from.date_start
company1_id = period_from.company_id.id
period_to = self.browse(cr, uid, period_to_id)
period_date_stop = period_to.date_stop
company2_id = period_to.company_id.id
if company1_id != company2_id:
raise osv.except_osv(_('Error!'), _('You should choose the periods that belong to the same company.'))
if period_date_start > period_date_stop:
raise osv.except_osv(_('Error!'), _('Start period should precede then end period.'))
# /!\ We do not include a criterion on the company_id field below, to allow producing consolidated reports
# on multiple companies. It will only work when start/end periods are selected and no fiscal year is chosen.
#for period from = january, we want to exclude the opening period (but it has same date_from, so we have to check if period_from is special or not to include that clause or not in the search).
if period_from.special:
return self.search(cr, uid, [('date_start', '>=', period_date_start), ('date_stop', '<=', period_date_stop)])
return self.search(cr, uid, [('date_start', '>=', period_date_start), ('date_stop', '<=', period_date_stop), ('special', '=', False)])
account_period()
class account_journal_period(osv.osv):
_name = "account.journal.period"
_description = "Journal Period"
def _icon_get(self, cr, uid, ids, field_name, arg=None, context=None):
result = {}.fromkeys(ids, 'STOCK_NEW')
for r in self.read(cr, uid, ids, ['state']):
result[r['id']] = {
'draft': 'STOCK_NEW',
'printed': 'STOCK_PRINT_PREVIEW',
'done': 'STOCK_DIALOG_AUTHENTICATION',
}.get(r['state'], 'STOCK_NEW')
return result
_columns = {
'name': fields.char('Journal-Period Name', size=64, required=True),
'journal_id': fields.many2one('account.journal', 'Journal', required=True, ondelete="cascade"),
'period_id': fields.many2one('account.period', 'Period', required=True, ondelete="cascade"),
'icon': fields.function(_icon_get, string='Icon', type='char', size=32),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the journal period without removing it."),
'state': fields.selection([('draft','Draft'), ('printed','Printed'), ('done','Done')], 'Status', required=True, readonly=True,
help='When journal period is created. The status is \'Draft\'. If a report is printed it comes to \'Printed\' status. When all transactions are done, it comes in \'Done\' status.'),
'fiscalyear_id': fields.related('period_id', 'fiscalyear_id', string='Fiscal Year', type='many2one', relation='account.fiscalyear'),
'company_id': fields.related('journal_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True)
}
def _check(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
cr.execute('select * from account_move_line where journal_id=%s and period_id=%s limit 1', (obj.journal_id.id, obj.period_id.id))
res = cr.fetchall()
if res:
raise osv.except_osv(_('Error!'), _('You cannot modify/delete a journal with entries for this period.'))
return True
def write(self, cr, uid, ids, vals, context=None):
self._check(cr, uid, ids, context=context)
return super(account_journal_period, self).write(cr, uid, ids, vals, context=context)
def create(self, cr, uid, vals, context=None):
period_id = vals.get('period_id',False)
if period_id:
period = self.pool.get('account.period').browse(cr, uid, period_id, context=context)
vals['state']=period.state
return super(account_journal_period, self).create(cr, uid, vals, context)
def unlink(self, cr, uid, ids, context=None):
self._check(cr, uid, ids, context=context)
return super(account_journal_period, self).unlink(cr, uid, ids, context=context)
_defaults = {
'state': 'draft',
'active': True,
}
_order = "period_id"
account_journal_period()
class account_fiscalyear(osv.osv):
_inherit = "account.fiscalyear"
_description = "Fiscal Year"
_columns = {
'end_journal_period_id':fields.many2one('account.journal.period','End of Year Entries Journal', readonly=True),
}
def copy(self, cr, uid, id, default=None, context=None):
default = {} if default is None else default.copy()
default.update({
'period_ids': [],
'end_journal_period_id': False
})
return super(account_fiscalyear, self).copy(cr, uid, id, default=default, context=context)
account_fiscalyear()
#----------------------------------------------------------
# Entries
#----------------------------------------------------------
class account_move(osv.osv):
_name = "account.move"
_description = "Account Entry"
_order = 'id desc'
def account_move_prepare(self, cr, uid, journal_id, date=False, ref='', company_id=False, context=None):
'''
Prepares and returns a dictionary of values, ready to be passed to create() based on the parameters received.
'''
if not date:
date = fields.date.today()
period_obj = self.pool.get('account.period')
if not company_id:
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
company_id = user.company_id.id
if context is None:
context = {}
#put the company in context to find the good period
ctx = context.copy()
ctx.update({'company_id': company_id, 'account_period_prefer_normal': True})
return {
'journal_id': journal_id,
'date': date,
'period_id': period_obj.find(cr, uid, date, context=ctx)[0],
'ref': ref,
'company_id': company_id,
}
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
"""
Returns a list of tupples containing id, name, as internally it is called {def name_get}
result format: {[(id, name), (id, name), ...]}
@param cr: A database cursor
@param user: ID of the user currently logged in
@param name: name to search
@param args: other arguments
@param operator: default operator is 'ilike', it can be changed
@param context: context arguments, like lang, time zone
@param limit: Returns first 'n' ids of complete result, default is 80.
@return: Returns a list of tuples containing id and name
"""
if not args:
args = []
ids = []
if name:
ids += self.search(cr, user, [('name','ilike',name)]+args, limit=limit, context=context)
if not ids and name and type(name) == int:
ids += self.search(cr, user, [('id','=',name)]+args, limit=limit, context=context)
if not ids:
ids += self.search(cr, user, args, limit=limit, context=context)
return self.name_get(cr, user, ids, context=context)
def name_get(self, cursor, user, ids, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
if not ids:
return []
res = []
data_move = self.pool.get('account.move').browse(cursor, user, ids, context=context)
for move in data_move:
if move.state=='draft':
name = '*' + str(move.id)
else:
name = move.name
res.append((move.id, name))
return res
def _get_period(self, cr, uid, context=None):
ctx = dict(context or {}, account_period_prefer_normal=True)
period_ids = self.pool.get('account.period').find(cr, uid, context=ctx)
return period_ids[0]
def _amount_compute(self, cr, uid, ids, name, args, context, where =''):
if not ids: return {}
cr.execute( 'SELECT move_id, SUM(debit) '\
'FROM account_move_line '\
'WHERE move_id IN %s '\
'GROUP BY move_id', (tuple(ids),))
result = dict(cr.fetchall())
for id in ids:
result.setdefault(id, 0.0)
return result
def _search_amount(self, cr, uid, obj, name, args, context):
ids = set()
for cond in args:
amount = cond[2]
if isinstance(cond[2],(list,tuple)):
if cond[1] in ['in','not in']:
amount = tuple(cond[2])
else:
continue
else:
if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']:
continue
cr.execute("select move_id from account_move_line group by move_id having sum(debit) %s %%s" % (cond[1]),(amount,))
res_ids = set(id[0] for id in cr.fetchall())
ids = ids and (ids & res_ids) or res_ids
if ids:
return [('id', 'in', tuple(ids))]
return [('id', '=', '0')]
def _get_move_from_lines(self, cr, uid, ids, context=None):
line_obj = self.pool.get('account.move.line')
return [line.move_id.id for line in line_obj.browse(cr, uid, ids, context=context)]
_columns = {
'name': fields.char('Number', size=64, required=True),
'ref': fields.char('Reference', size=64),
'period_id': fields.many2one('account.period', 'Period', required=True, states={'posted':[('readonly',True)]}),
'journal_id': fields.many2one('account.journal', 'Journal', required=True, states={'posted':[('readonly',True)]}),
'state': fields.selection([('draft','Unposted'), ('posted','Posted')], 'Status', required=True, readonly=True,
help='All manually created new journal entries are usually in the status \'Unposted\', but you can set the option to skip that status on the related journal. In that case, they will behave as journal entries automatically created by the system on document validation (invoices, bank statements...) and will be created in \'Posted\' status.'),
'line_id': fields.one2many('account.move.line', 'move_id', 'Entries', states={'posted':[('readonly',True)]}),
'to_check': fields.boolean('To Review', help='Check this box if you are unsure of that journal entry and if you want to note it as \'to be reviewed\' by an accounting expert.'),
'partner_id': fields.related('line_id', 'partner_id', type="many2one", relation="res.partner", string="Partner", store={
_name: (lambda self, cr,uid,ids,c: ids, ['line_id'], 10),
'account.move.line': (_get_move_from_lines, ['partner_id'],10)
}),
'amount': fields.function(_amount_compute, string='Amount', digits_compute=dp.get_precision('Account'), type='float', fnct_search=_search_amount),
'date': fields.date('Date', required=True, states={'posted':[('readonly',True)]}, select=True),
'narration':fields.text('Internal Note'),
'company_id': fields.related('journal_id','company_id',type='many2one',relation='res.company',string='Company', store=True, readonly=True),
'balance': fields.float('balance', digits_compute=dp.get_precision('Account'), help="This is a field only used for internal purpose and shouldn't be displayed"),
}
_defaults = {
'name': '/',
'state': 'draft',
'period_id': _get_period,
'date': fields.date.context_today,
'company_id': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
def _check_centralisation(self, cursor, user, ids, context=None):
for move in self.browse(cursor, user, ids, context=context):
if move.journal_id.centralisation:
move_ids = self.search(cursor, user, [
('period_id', '=', move.period_id.id),
('journal_id', '=', move.journal_id.id),
])
if len(move_ids) > 1:
return False
return True
_constraints = [
(_check_centralisation,
'You cannot create more than one move per period on a centralized journal.',
['journal_id']),
]
def post(self, cr, uid, ids, context=None):
if context is None:
context = {}
invoice = context.get('invoice', False)
valid_moves = self.validate(cr, uid, ids, context)
if not valid_moves:
raise osv.except_osv(_('Error!'), _('You cannot validate a non-balanced entry.\nMake sure you have configured payment terms properly.\nThe latest payment term line should be of the "Balance" type.'))
obj_sequence = self.pool.get('ir.sequence')
for move in self.browse(cr, uid, valid_moves, context=context):
if move.name =='/':
new_name = False
journal = move.journal_id
if invoice and invoice.internal_number:
new_name = invoice.internal_number
else:
if journal.sequence_id:
c = {'fiscalyear_id': move.period_id.fiscalyear_id.id}
new_name = obj_sequence.next_by_id(cr, uid, journal.sequence_id.id, c)
else:
raise osv.except_osv(_('Error!'), _('Please define a sequence on the journal.'))
if new_name:
self.write(cr, uid, [move.id], {'name':new_name})
cr.execute('UPDATE account_move '\
'SET state=%s '\
'WHERE id IN %s',
('posted', tuple(valid_moves),))
return True
def button_validate(self, cursor, user, ids, context=None):
for move in self.browse(cursor, user, ids, context=context):
# check that all accounts have the same topmost ancestor
top_common = None
for line in move.line_id:
account = line.account_id
top_account = account
while top_account.parent_id:
top_account = top_account.parent_id
if not top_common:
top_common = top_account
elif top_account.id != top_common.id:
raise osv.except_osv(_('Error!'),
_('You cannot validate this journal entry because account "%s" does not belong to chart of accounts "%s".') % (account.name, top_common.name))
return self.post(cursor, user, ids, context=context)
def button_cancel(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
if not line.journal_id.update_posted:
raise osv.except_osv(_('Error!'), _('You cannot modify a posted entry of this journal.\nFirst you should set the journal to allow cancelling entries.'))
if ids:
cr.execute('UPDATE account_move '\
'SET state=%s '\
'WHERE id IN %s', ('draft', tuple(ids),))
return True
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
c = context.copy()
c['novalidate'] = True
result = super(account_move, self).write(cr, uid, ids, vals, c)
self.validate(cr, uid, ids, context=context)
return result
#
# TODO: Check if period is closed !
#
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if 'line_id' in vals and context.get('copy'):
for l in vals['line_id']:
if not l[0]:
l[2].update({
'reconcile_id':False,
'reconcile_partial_id':False,
'analytic_lines':False,
'invoice':False,
'ref':False,
'balance':False,
'account_tax_id':False,
'statement_id': False,
})
if 'journal_id' in vals and vals.get('journal_id', False):
for l in vals['line_id']:
if not l[0]:
l[2]['journal_id'] = vals['journal_id']
context['journal_id'] = vals['journal_id']
if 'period_id' in vals:
for l in vals['line_id']:
if not l[0]:
l[2]['period_id'] = vals['period_id']
context['period_id'] = vals['period_id']
else:
default_period = self._get_period(cr, uid, context)
for l in vals['line_id']:
if not l[0]:
l[2]['period_id'] = default_period
context['period_id'] = default_period
if vals.get('line_id', False):
c = context.copy()
c['novalidate'] = True
c['period_id'] = vals['period_id'] if 'period_id' in vals else self._get_period(cr, uid, context)
c['journal_id'] = vals['journal_id']
if 'date' in vals: c['date'] = vals['date']
result = super(account_move, self).create(cr, uid, vals, c)
tmp = self.validate(cr, uid, [result], context)
journal = self.pool.get('account.journal').browse(cr, uid, vals['journal_id'], context)
if journal.entry_posted and tmp:
self.button_validate(cr,uid, [result], context)
else:
result = super(account_move, self).create(cr, uid, vals, context)
return result
def copy(self, cr, uid, id, default=None, context=None):
default = {} if default is None else default.copy()
context = {} if context is None else context.copy()
default.update({
'state':'draft',
'ref': False,
'name':'/',
})
context.update({
'copy':True
})
return super(account_move, self).copy(cr, uid, id, default, context)
def unlink(self, cr, uid, ids, context=None, check=True):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
toremove = []
obj_move_line = self.pool.get('account.move.line')
for move in self.browse(cr, uid, ids, context=context):
if move['state'] != 'draft':
raise osv.except_osv(_('User Error!'),
_('You cannot delete a posted journal entry "%s".') % \
move['name'])
for line in move.line_id:
if line.invoice:
raise osv.except_osv(_('User Error!'),
_("Move cannot be deleted if linked to an invoice. (Invoice: %s - Move ID:%s)") % \
(line.invoice.number,move.name))
line_ids = map(lambda x: x.id, move.line_id)
context['journal_id'] = move.journal_id.id
context['period_id'] = move.period_id.id
obj_move_line._update_check(cr, uid, line_ids, context)
obj_move_line.unlink(cr, uid, line_ids, context=context)
toremove.append(move.id)
result = super(account_move, self).unlink(cr, uid, toremove, context)
return result
def _compute_balance(self, cr, uid, id, context=None):
move = self.browse(cr, uid, id, context=context)
amount = 0
for line in move.line_id:
amount+= (line.debit - line.credit)
return amount
def _centralise(self, cr, uid, move, mode, context=None):
assert mode in ('debit', 'credit'), 'Invalid Mode' #to prevent sql injection
currency_obj = self.pool.get('res.currency')
if context is None:
context = {}
if mode=='credit':
account_id = move.journal_id.default_debit_account_id.id
mode2 = 'debit'
if not account_id:
raise osv.except_osv(_('User Error!'),
_('There is no default debit account defined \n' \
'on journal "%s".') % move.journal_id.name)
else:
account_id = move.journal_id.default_credit_account_id.id
mode2 = 'credit'
if not account_id:
raise osv.except_osv(_('User Error!'),
_('There is no default credit account defined \n' \
'on journal "%s".') % move.journal_id.name)
# find the first line of this move with the current mode
# or create it if it doesn't exist
cr.execute('select id from account_move_line where move_id=%s and centralisation=%s limit 1', (move.id, mode))
res = cr.fetchone()
if res:
line_id = res[0]
else:
context.update({'journal_id': move.journal_id.id, 'period_id': move.period_id.id})
line_id = self.pool.get('account.move.line').create(cr, uid, {
'name': _(mode.capitalize()+' Centralisation'),
'centralisation': mode,
'partner_id': False,
'account_id': account_id,
'move_id': move.id,
'journal_id': move.journal_id.id,
'period_id': move.period_id.id,
'date': move.period_id.date_stop,
'debit': 0.0,
'credit': 0.0,
}, context)
# find the first line of this move with the other mode
# so that we can exclude it from our calculation
cr.execute('select id from account_move_line where move_id=%s and centralisation=%s limit 1', (move.id, mode2))
res = cr.fetchone()
if res:
line_id2 = res[0]
else:
line_id2 = 0
cr.execute('SELECT SUM(%s) FROM account_move_line WHERE move_id=%%s AND id!=%%s' % (mode,), (move.id, line_id2))
result = cr.fetchone()[0] or 0.0
cr.execute('update account_move_line set '+mode2+'=%s where id=%s', (result, line_id))
#adjust also the amount in currency if needed
cr.execute("select currency_id, sum(amount_currency) as amount_currency from account_move_line where move_id = %s and currency_id is not null group by currency_id", (move.id,))
for row in cr.dictfetchall():
currency_id = currency_obj.browse(cr, uid, row['currency_id'], context=context)
if not currency_obj.is_zero(cr, uid, currency_id, row['amount_currency']):
amount_currency = row['amount_currency'] * -1
account_id = amount_currency > 0 and move.journal_id.default_debit_account_id.id or move.journal_id.default_credit_account_id.id
cr.execute('select id from account_move_line where move_id=%s and centralisation=\'currency\' and currency_id = %slimit 1', (move.id, row['currency_id']))
res = cr.fetchone()
if res:
cr.execute('update account_move_line set amount_currency=%s , account_id=%s where id=%s', (amount_currency, account_id, res[0]))
else:
context.update({'journal_id': move.journal_id.id, 'period_id': move.period_id.id})
line_id = self.pool.get('account.move.line').create(cr, uid, {
'name': _('Currency Adjustment'),
'centralisation': 'currency',
'partner_id': False,
'account_id': account_id,
'move_id': move.id,
'journal_id': move.journal_id.id,
'period_id': move.period_id.id,
'date': move.period_id.date_stop,
'debit': 0.0,
'credit': 0.0,
'currency_id': row['currency_id'],
'amount_currency': amount_currency,
}, context)
return True
#
# Validate a balanced move. If it is a centralised journal, create a move.
#
def validate(self, cr, uid, ids, context=None):
if context and ('__last_update' in context):
del context['__last_update']
valid_moves = [] #Maintains a list of moves which can be responsible to create analytic entries
obj_analytic_line = self.pool.get('account.analytic.line')
obj_move_line = self.pool.get('account.move.line')
for move in self.browse(cr, uid, ids, context):
# Unlink old analytic lines on move_lines
for obj_line in move.line_id:
for obj in obj_line.analytic_lines:
obj_analytic_line.unlink(cr,uid,obj.id)
journal = move.journal_id
amount = 0
line_ids = []
line_draft_ids = []
company_id = None
for line in move.line_id:
amount += line.debit - line.credit
line_ids.append(line.id)
if line.state=='draft':
line_draft_ids.append(line.id)
if not company_id:
company_id = line.account_id.company_id.id
if not company_id == line.account_id.company_id.id:
raise osv.except_osv(_('Error!'), _("Cannot create moves for different companies."))
if line.account_id.currency_id and line.currency_id:
if line.account_id.currency_id.id != line.currency_id.id and (line.account_id.currency_id.id != line.account_id.company_id.currency_id.id):
raise osv.except_osv(_('Error!'), _("""Cannot create move with currency different from ..""") % (line.account_id.code, line.account_id.name))
if abs(amount) < 10 ** -4:
# If the move is balanced
# Add to the list of valid moves
# (analytic lines will be created later for valid moves)
valid_moves.append(move)
# Check whether the move lines are confirmed
if not line_draft_ids:
continue
# Update the move lines (set them as valid)
obj_move_line.write(cr, uid, line_draft_ids, {
'state': 'valid'
}, context, check=False)
account = {}
account2 = {}
if journal.type in ('purchase','sale'):
for line in move.line_id:
code = amount = 0
key = (line.account_id.id, line.tax_code_id.id)
if key in account2:
code = account2[key][0]
amount = account2[key][1] * (line.debit + line.credit)
elif line.account_id.id in account:
code = account[line.account_id.id][0]
amount = account[line.account_id.id][1] * (line.debit + line.credit)
if (code or amount) and not (line.tax_code_id or line.tax_amount):
obj_move_line.write(cr, uid, [line.id], {
'tax_code_id': code,
'tax_amount': amount
}, context, check=False)
elif journal.centralisation:
# If the move is not balanced, it must be centralised...
# Add to the list of valid moves
# (analytic lines will be created later for valid moves)
valid_moves.append(move)
#
# Update the move lines (set them as valid)
#
self._centralise(cr, uid, move, 'debit', context=context)
self._centralise(cr, uid, move, 'credit', context=context)
obj_move_line.write(cr, uid, line_draft_ids, {
'state': 'valid'
}, context, check=False)
else:
# We can't validate it (it's unbalanced)
# Setting the lines as draft
not_draft_line_ids = list(set(line_ids) - set(line_draft_ids))
if not_draft_line_ids:
obj_move_line.write(cr, uid, not_draft_line_ids, {
'state': 'draft'
}, context, check=False)
# Create analytic lines for the valid moves
for record in valid_moves:
obj_move_line.create_analytic_lines(cr, uid, [line.id for line in record.line_id], context)
valid_moves = [move.id for move in valid_moves]
return len(valid_moves) > 0 and valid_moves or False
account_move()
class account_move_reconcile(osv.osv):
_name = "account.move.reconcile"
_description = "Account Reconciliation"
_columns = {
'name': fields.char('Name', size=64, required=True),
'type': fields.char('Type', size=16, required=True),
'line_id': fields.one2many('account.move.line', 'reconcile_id', 'Entry Lines'),
'line_partial_ids': fields.one2many('account.move.line', 'reconcile_partial_id', 'Partial Entry lines'),
'create_date': fields.date('Creation date', readonly=True),
'opening_reconciliation': fields.boolean('Opening Entries Reconciliation', help="Is this reconciliation produced by the opening of a new fiscal year ?."),
}
_defaults = {
'name': lambda self,cr,uid,ctx=None: self.pool.get('ir.sequence').get(cr, uid, 'account.reconcile', context=ctx) or '/',
}
# You cannot unlink a reconciliation if it is a opening_reconciliation one,
# you should use the generate opening entries wizard for that
def unlink(self, cr, uid, ids, context=None):
for move_rec in self.browse(cr, uid, ids, context=context):
if move_rec.opening_reconciliation:
raise osv.except_osv(_('Error!'), _('You cannot unreconcile journal items if they has been generated by the \
opening/closing fiscal year process.'))
return super(account_move_reconcile, self).unlink(cr, uid, ids, context=context)
# Look in the line_id and line_partial_ids to ensure the partner is the same or empty
# on all lines. We allow that only for opening/closing period
def _check_same_partner(self, cr, uid, ids, context=None):
for reconcile in self.browse(cr, uid, ids, context=context):
move_lines = []
if not reconcile.opening_reconciliation:
if reconcile.line_id:
first_partner = reconcile.line_id[0].partner_id.id
move_lines = reconcile.line_id
elif reconcile.line_partial_ids:
first_partner = reconcile.line_partial_ids[0].partner_id.id
move_lines = reconcile.line_partial_ids
if any([(line.account_id.type in ('receivable', 'payable') and line.partner_id.id != first_partner) for line in move_lines]):
return False
return True
_constraints = [
(_check_same_partner, 'You can only reconcile journal items with the same partner.', ['line_id']),
]
def reconcile_partial_check(self, cr, uid, ids, type='auto', context=None):
total = 0.0
for rec in self.browse(cr, uid, ids, context=context):
for line in rec.line_partial_ids:
if line.account_id.currency_id:
total += line.amount_currency
else:
total += (line.debit or 0.0) - (line.credit or 0.0)
if not total:
self.pool.get('account.move.line').write(cr, uid,
map(lambda x: x.id, rec.line_partial_ids),
{'reconcile_id': rec.id }
)
return True
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
result = []
for r in self.browse(cr, uid, ids, context=context):
total = reduce(lambda y,t: (t.debit or 0.0) - (t.credit or 0.0) + y, r.line_partial_ids, 0.0)
if total:
name = '%s (%.2f)' % (r.name, total)
result.append((r.id,name))
else:
result.append((r.id,r.name))
return result
account_move_reconcile()
#----------------------------------------------------------
# Tax
#----------------------------------------------------------
"""
a documenter
child_depend: la taxe depend des taxes filles
"""
class account_tax_code(osv.osv):
"""
A code for the tax object.
This code is used for some tax declarations.
"""
def _sum(self, cr, uid, ids, name, args, context, where ='', where_params=()):
parent_ids = tuple(self.search(cr, uid, [('parent_id', 'child_of', ids)]))
if context.get('based_on', 'invoices') == 'payments':
cr.execute('SELECT line.tax_code_id, sum(line.tax_amount) \
FROM account_move_line AS line, \
account_move AS move \
LEFT JOIN account_invoice invoice ON \
(invoice.move_id = move.id) \
WHERE line.tax_code_id IN %s '+where+' \
AND move.id = line.move_id \
AND ((invoice.state = \'paid\') \
OR (invoice.id IS NULL)) \
GROUP BY line.tax_code_id',
(parent_ids,) + where_params)
else:
cr.execute('SELECT line.tax_code_id, sum(line.tax_amount) \
FROM account_move_line AS line, \
account_move AS move \
WHERE line.tax_code_id IN %s '+where+' \
AND move.id = line.move_id \
GROUP BY line.tax_code_id',
(parent_ids,) + where_params)
res=dict(cr.fetchall())
obj_precision = self.pool.get('decimal.precision')
res2 = {}
for record in self.browse(cr, uid, ids, context=context):
def _rec_get(record):
amount = res.get(record.id, 0.0)
for rec in record.child_ids:
amount += _rec_get(rec) * rec.sign
return amount
res2[record.id] = round(_rec_get(record), obj_precision.precision_get(cr, uid, 'Account'))
return res2
def _sum_year(self, cr, uid, ids, name, args, context=None):
if context is None:
context = {}
move_state = ('posted', )
if context.get('state', 'all') == 'all':
move_state = ('draft', 'posted', )
if context.get('fiscalyear_id', False):
fiscalyear_id = [context['fiscalyear_id']]
else:
fiscalyear_id = self.pool.get('account.fiscalyear').finds(cr, uid, exception=False)
where = ''
where_params = ()
if fiscalyear_id:
pids = []
for fy in fiscalyear_id:
pids += map(lambda x: str(x.id), self.pool.get('account.fiscalyear').browse(cr, uid, fy).period_ids)
if pids:
where = ' AND line.period_id IN %s AND move.state IN %s '
where_params = (tuple(pids), move_state)
return self._sum(cr, uid, ids, name, args, context,
where=where, where_params=where_params)
def _sum_period(self, cr, uid, ids, name, args, context):
if context is None:
context = {}
move_state = ('posted', )
if context.get('state', False) == 'all':
move_state = ('draft', 'posted', )
if context.get('period_id', False):
period_id = context['period_id']
else:
ctx = dict(context, account_period_prefer_normal=True)
period_id = self.pool.get('account.period').find(cr, uid, context=ctx)
if not period_id:
return dict.fromkeys(ids, 0.0)
period_id = period_id[0]
return self._sum(cr, uid, ids, name, args, context,
where=' AND line.period_id=%s AND move.state IN %s', where_params=(period_id, move_state))
_name = 'account.tax.code'
_description = 'Tax Code'
_rec_name = 'code'
_columns = {
'name': fields.char('Tax Case Name', size=64, required=True, translate=True),
'code': fields.char('Case Code', size=64),
'info': fields.text('Description'),
'sum': fields.function(_sum_year, string="Year Sum"),
'sum_period': fields.function(_sum_period, string="Period Sum"),
'parent_id': fields.many2one('account.tax.code', 'Parent Code', select=True),
'child_ids': fields.one2many('account.tax.code', 'parent_id', 'Child Codes'),
'line_ids': fields.one2many('account.move.line', 'tax_code_id', 'Lines'),
'company_id': fields.many2one('res.company', 'Company', required=True),
'sign': fields.float('Coefficent for parent', required=True, help='You can specify here the coefficient that will be used when consolidating the amount of this case into its parent. For example, set 1/-1 if you want to add/substract it.'),
'notprintable':fields.boolean("Not Printable in Invoice", help="Check this box if you don't want any tax related to this tax code to appear on invoices"),
'sequence': fields.integer('Sequence', help="Determine the display order in the report 'Accounting \ Reporting \ Generic Reporting \ Taxes \ Taxes Report'"),
}
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
if not args:
args = []
if context is None:
context = {}
ids = self.search(cr, user, ['|',('name',operator,name),('code',operator,name)] + args, limit=limit, context=context)
return self.name_get(cr, user, ids, context)
def name_get(self, cr, uid, ids, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name','code'], context, load='_classic_write')
return [(x['id'], (x['code'] and (x['code'] + ' - ') or '') + x['name']) \
for x in reads]
def _default_company(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if user.company_id:
return user.company_id.id
return self.pool.get('res.company').search(cr, uid, [('parent_id', '=', False)])[0]
_defaults = {
'company_id': _default_company,
'sign': 1.0,
'notprintable': False,
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
default.update({'line_ids': []})
return super(account_tax_code, self).copy(cr, uid, id, default, context)
_check_recursion = check_cycle
_constraints = [
(_check_recursion, 'Error!\nYou cannot create recursive accounts.', ['parent_id'])
]
_order = 'code'
account_tax_code()
def get_precision_tax():
def change_digit_tax(cr):
res = pooler.get_pool(cr.dbname).get('decimal.precision').precision_get(cr, SUPERUSER_ID, 'Account')
return (16, res+3)
return change_digit_tax
class account_tax(osv.osv):
"""
A tax object.
Type: percent, fixed, none, code
PERCENT: tax = price * amount
FIXED: tax = price + amount
NONE: no tax line
CODE: execute python code. localcontext = {'price_unit':pu}
return result in the context
Ex: result=round(price_unit*0.21,4)
"""
def copy_data(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
name = self.read(cr, uid, id, ['name'], context=context)['name']
default = default.copy()
default.update({'name': name + _(' (Copy)')})
return super(account_tax, self).copy_data(cr, uid, id, default=default, context=context)
_name = 'account.tax'
_description = 'Tax'
_columns = {
'name': fields.char('Tax Name', size=64, required=True, translate=True, help="This name will be displayed on reports"),
'sequence': fields.integer('Sequence', required=True, help="The sequence field is used to order the tax lines from the lowest sequences to the higher ones. The order is important if you have a tax with several tax children. In this case, the evaluation order is important."),
'amount': fields.float('Amount', required=True, digits_compute=get_precision_tax(), help="For taxes of type percentage, enter % ratio between 0-1."),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the tax without removing it."),
'type': fields.selection( [('percent','Percentage'), ('fixed','Fixed Amount'), ('none','None'), ('code','Python Code'), ('balance','Balance')], 'Tax Type', required=True,
help="The computation method for the tax amount."),
'applicable_type': fields.selection( [('true','Always'), ('code','Given by Python Code')], 'Applicability', required=True,
help="If not applicable (computed through a Python code), the tax won't appear on the invoice."),
'domain':fields.char('Domain', size=32, help="This field is only used if you develop your own module allowing developers to create specific taxes in a custom domain."),
'account_collected_id':fields.many2one('account.account', 'Invoice Tax Account', help="Set the account that will be set by default on invoice tax lines for invoices. Leave empty to use the expense account."),
'account_paid_id':fields.many2one('account.account', 'Refund Tax Account', help="Set the account that will be set by default on invoice tax lines for refunds. Leave empty to use the expense account."),
'account_analytic_collected_id':fields.many2one('account.analytic.account', 'Invoice Tax Analytic Account', help="Set the analytic account that will be used by default on the invoice tax lines for invoices. Leave empty if you don't want to use an analytic account on the invoice tax lines by default."),
'account_analytic_paid_id':fields.many2one('account.analytic.account', 'Refund Tax Analytic Account', help="Set the analytic account that will be used by default on the invoice tax lines for refunds. Leave empty if you don't want to use an analytic account on the invoice tax lines by default."),
'parent_id':fields.many2one('account.tax', 'Parent Tax Account', select=True),
'child_ids':fields.one2many('account.tax', 'parent_id', 'Child Tax Accounts'),
'child_depend':fields.boolean('Tax on Children', help="Set if the tax computation is based on the computation of child taxes rather than on the total amount."),
'python_compute':fields.text('Python Code'),
'python_compute_inv':fields.text('Python Code (reverse)'),
'python_applicable':fields.text('Python Code'),
#
# Fields used for the Tax declaration
#
'base_code_id': fields.many2one('account.tax.code', 'Account Base Code', help="Use this code for the tax declaration."),
'tax_code_id': fields.many2one('account.tax.code', 'Account Tax Code', help="Use this code for the tax declaration."),
'base_sign': fields.float('Base Code Sign', help="Usually 1 or -1."),
'tax_sign': fields.float('Tax Code Sign', help="Usually 1 or -1."),
# Same fields for refund invoices
'ref_base_code_id': fields.many2one('account.tax.code', 'Refund Base Code', help="Use this code for the tax declaration."),
'ref_tax_code_id': fields.many2one('account.tax.code', 'Refund Tax Code', help="Use this code for the tax declaration."),
'ref_base_sign': fields.float('Base Code Sign', help="Usually 1 or -1."),
'ref_tax_sign': fields.float('Tax Code Sign', help="Usually 1 or -1."),
'include_base_amount': fields.boolean('Included in base amount', help="Indicates if the amount of tax must be included in the base amount for the computation of the next taxes"),
'company_id': fields.many2one('res.company', 'Company', required=True),
'description': fields.char('Tax Code'),
'price_include': fields.boolean('Tax Included in Price', help="Check this if the price you use on the product and invoices includes this tax."),
'type_tax_use': fields.selection([('sale','Sale'),('purchase','Purchase'),('all','All')], 'Tax Application', required=True)
}
_sql_constraints = [
('name_company_uniq', 'unique(name, company_id)', 'Tax Name must be unique per company!'),
]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
"""
Returns a list of tupples containing id, name, as internally it is called {def name_get}
result format: {[(id, name), (id, name), ...]}
@param cr: A database cursor
@param user: ID of the user currently logged in
@param name: name to search
@param args: other arguments
@param operator: default operator is 'ilike', it can be changed
@param context: context arguments, like lang, time zone
@param limit: Returns first 'n' ids of complete result, default is 80.
@return: Returns a list of tupples containing id and name
"""
if not args:
args = []
if context is None:
context = {}
ids = []
if name:
ids = self.search(cr, user, [('description', '=', name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args, limit=limit, context=context)
else:
ids = self.search(cr, user, args, limit=limit, context=context or {})
return self.name_get(cr, user, ids, context=context)
def write(self, cr, uid, ids, vals, context=None):
if vals.get('type', False) and vals['type'] in ('none', 'code'):
vals.update({'amount': 0.0})
return super(account_tax, self).write(cr, uid, ids, vals, context=context)
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
journal_pool = self.pool.get('account.journal')
if context and context.has_key('type'):
if context.get('type') in ('out_invoice','out_refund'):
args += [('type_tax_use','in',['sale','all'])]
elif context.get('type') in ('in_invoice','in_refund'):
args += [('type_tax_use','in',['purchase','all'])]
if context and context.has_key('journal_id'):
journal = journal_pool.browse(cr, uid, context.get('journal_id'))
if journal.type in ('sale', 'purchase'):
args += [('type_tax_use','in',[journal.type,'all'])]
return super(account_tax, self).search(cr, uid, args, offset, limit, order, context, count)
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
res = []
for record in self.read(cr, uid, ids, ['description','name'], context=context):
name = record['description'] and record['description'] or record['name']
res.append((record['id'],name ))
return res
def _default_company(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if user.company_id:
return user.company_id.id
return self.pool.get('res.company').search(cr, uid, [('parent_id', '=', False)])[0]
_defaults = {
'python_compute': '''# price_unit\n# or False\n# product: product.product object or None\n# partner: res.partner object or None\n\nresult = price_unit * 0.10''',
'python_compute_inv': '''# price_unit\n# product: product.product object or False\n\nresult = price_unit * 0.10''',
'applicable_type': 'true',
'type': 'percent',
'amount': 0,
'price_include': 0,
'active': 1,
'type_tax_use': 'all',
'sequence': 1,
'ref_tax_sign': 1,
'ref_base_sign': 1,
'tax_sign': 1,
'base_sign': 1,
'include_base_amount': False,
'company_id': _default_company,
}
_order = 'sequence'
def _applicable(self, cr, uid, taxes, price_unit, product=None, partner=None):
res = []
for tax in taxes:
if tax.applicable_type=='code':
localdict = {'price_unit':price_unit, 'product':product, 'partner':partner}
exec tax.python_applicable in localdict
if localdict.get('result', False):
res.append(tax)
else:
res.append(tax)
return res
def _unit_compute(self, cr, uid, taxes, price_unit, product=None, partner=None, quantity=0):
taxes = self._applicable(cr, uid, taxes, price_unit ,product, partner)
res = []
cur_price_unit=price_unit
for tax in taxes:
# we compute the amount for the current tax object and append it to the result
data = {'id':tax.id,
'name':tax.description and tax.description + " - " + tax.name or tax.name,
'account_collected_id':tax.account_collected_id.id,
'account_paid_id':tax.account_paid_id.id,
'account_analytic_collected_id': tax.account_analytic_collected_id.id,
'account_analytic_paid_id': tax.account_analytic_paid_id.id,
'base_code_id': tax.base_code_id.id,
'ref_base_code_id': tax.ref_base_code_id.id,
'sequence': tax.sequence,
'base_sign': tax.base_sign,
'tax_sign': tax.tax_sign,
'ref_base_sign': tax.ref_base_sign,
'ref_tax_sign': tax.ref_tax_sign,
'price_unit': cur_price_unit,
'tax_code_id': tax.tax_code_id.id,
'ref_tax_code_id': tax.ref_tax_code_id.id,
}
res.append(data)
if tax.type=='percent':
amount = cur_price_unit * tax.amount
data['amount'] = amount
elif tax.type=='fixed':
data['amount'] = tax.amount
data['tax_amount']=quantity
# data['amount'] = quantity
elif tax.type=='code':
localdict = {'price_unit':cur_price_unit, 'product':product, 'partner':partner}
exec tax.python_compute in localdict
amount = localdict['result']
data['amount'] = amount
elif tax.type=='balance':
data['amount'] = cur_price_unit - reduce(lambda x,y: y.get('amount',0.0)+x, res, 0.0)
data['balance'] = cur_price_unit
amount2 = data.get('amount', 0.0)
if tax.child_ids:
if tax.child_depend:
latest = res.pop()
amount = amount2
child_tax = self._unit_compute(cr, uid, tax.child_ids, amount, product, partner, quantity)
res.extend(child_tax)
if tax.child_depend:
for r in res:
for name in ('base','ref_base'):
if latest[name+'_code_id'] and latest[name+'_sign'] and not r[name+'_code_id']:
r[name+'_code_id'] = latest[name+'_code_id']
r[name+'_sign'] = latest[name+'_sign']
r['price_unit'] = latest['price_unit']
latest[name+'_code_id'] = False
for name in ('tax','ref_tax'):
if latest[name+'_code_id'] and latest[name+'_sign'] and not r[name+'_code_id']:
r[name+'_code_id'] = latest[name+'_code_id']
r[name+'_sign'] = latest[name+'_sign']
r['amount'] = data['amount']
latest[name+'_code_id'] = False
if tax.include_base_amount:
cur_price_unit+=amount2
return res
def compute_all(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None, force_excluded=False):
"""
:param force_excluded: boolean used to say that we don't want to consider the value of field price_include of
tax. It's used in encoding by line where you don't matter if you encoded a tax with that boolean to True or
False
RETURN: {
'total': 0.0, # Total without taxes
'total_included: 0.0, # Total with taxes
'taxes': [] # List of taxes, see compute for the format
}
"""
# By default, for each tax, tax amount will first be computed
# and rounded at the 'Account' decimal precision for each
# PO/SO/invoice line and then these rounded amounts will be
# summed, leading to the total amount for that tax. But, if the
# company has tax_calculation_rounding_method = round_globally,
# we still follow the same method, but we use a much larger
# precision when we round the tax amount for each line (we use
# the 'Account' decimal precision + 5), and that way it's like
# rounding after the sum of the tax amounts of each line
precision = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')
tax_compute_precision = precision
if taxes and taxes[0].company_id.tax_calculation_rounding_method == 'round_globally':
tax_compute_precision += 5
totalin = totalex = float_round(price_unit * quantity, precision)
tin = []
tex = []
for tax in taxes:
if not tax.price_include or force_excluded:
tex.append(tax)
else:
tin.append(tax)
tin = self.compute_inv(cr, uid, tin, price_unit, quantity, product=product, partner=partner, precision=tax_compute_precision)
for r in tin:
totalex -= r.get('amount', 0.0)
totlex_qty = 0.0
try:
totlex_qty = totalex/quantity
except:
pass
tex = self._compute(cr, uid, tex, totlex_qty, quantity, product=product, partner=partner, precision=tax_compute_precision)
for r in tex:
totalin += r.get('amount', 0.0)
return {
'total': totalex,
'total_included': totalin,
'taxes': tin + tex
}
def compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None):
_logger.warning("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included.")
return self._compute(cr, uid, taxes, price_unit, quantity, product, partner)
def _compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None, precision=None):
"""
Compute tax values for given PRICE_UNIT, QUANTITY and a buyer/seller ADDRESS_ID.
RETURN:
[ tax ]
tax = {'name':'', 'amount':0.0, 'account_collected_id':1, 'account_paid_id':2}
one tax for each tax id in IDS and their children
"""
if not precision:
precision = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')
res = self._unit_compute(cr, uid, taxes, price_unit, product, partner, quantity)
total = 0.0
for r in res:
if r.get('balance',False):
r['amount'] = round(r.get('balance', 0.0) * quantity, precision) - total
else:
r['amount'] = round(r.get('amount', 0.0) * quantity, precision)
total += r['amount']
return res
def _unit_compute_inv(self, cr, uid, taxes, price_unit, product=None, partner=None):
taxes = self._applicable(cr, uid, taxes, price_unit, product, partner)
res = []
taxes.reverse()
cur_price_unit = price_unit
tax_parent_tot = 0.0
for tax in taxes:
if (tax.type=='percent') and not tax.include_base_amount:
tax_parent_tot += tax.amount
for tax in taxes:
if (tax.type=='fixed') and not tax.include_base_amount:
cur_price_unit -= tax.amount
for tax in taxes:
if tax.type=='percent':
if tax.include_base_amount:
amount = cur_price_unit - (cur_price_unit / (1 + tax.amount))
else:
amount = (cur_price_unit / (1 + tax_parent_tot)) * tax.amount
elif tax.type=='fixed':
amount = tax.amount
elif tax.type=='code':
localdict = {'price_unit':cur_price_unit, 'product':product, 'partner':partner}
exec tax.python_compute_inv in localdict
amount = localdict['result']
elif tax.type=='balance':
amount = cur_price_unit - reduce(lambda x,y: y.get('amount',0.0)+x, res, 0.0)
if tax.include_base_amount:
cur_price_unit -= amount
todo = 0
else:
todo = 1
res.append({
'id': tax.id,
'todo': todo,
'name': tax.name,
'amount': amount,
'account_collected_id': tax.account_collected_id.id,
'account_paid_id': tax.account_paid_id.id,
'account_analytic_collected_id': tax.account_analytic_collected_id.id,
'account_analytic_paid_id': tax.account_analytic_paid_id.id,
'base_code_id': tax.base_code_id.id,
'ref_base_code_id': tax.ref_base_code_id.id,
'sequence': tax.sequence,
'base_sign': tax.base_sign,
'tax_sign': tax.tax_sign,
'ref_base_sign': tax.ref_base_sign,
'ref_tax_sign': tax.ref_tax_sign,
'price_unit': cur_price_unit,
'tax_code_id': tax.tax_code_id.id,
'ref_tax_code_id': tax.ref_tax_code_id.id,
})
if tax.child_ids:
if tax.child_depend:
del res[-1]
amount = price_unit
parent_tax = self._unit_compute_inv(cr, uid, tax.child_ids, amount, product, partner)
res.extend(parent_tax)
total = 0.0
for r in res:
if r['todo']:
total += r['amount']
for r in res:
r['price_unit'] -= total
r['todo'] = 0
return res
def compute_inv(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None, precision=None):
"""
Compute tax values for given PRICE_UNIT, QUANTITY and a buyer/seller ADDRESS_ID.
Price Unit is a Tax included price
RETURN:
[ tax ]
tax = {'name':'', 'amount':0.0, 'account_collected_id':1, 'account_paid_id':2}
one tax for each tax id in IDS and their children
"""
if not precision:
precision = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')
res = self._unit_compute_inv(cr, uid, taxes, price_unit, product, partner=None)
total = 0.0
for r in res:
if r.get('balance',False):
r['amount'] = round(r['balance'] * quantity, precision) - total
else:
r['amount'] = round(r['amount'] * quantity, precision)
total += r['amount']
return res
account_tax()
# ---------------------------------------------------------
# Account Entries Models
# ---------------------------------------------------------
class account_model(osv.osv):
_name = "account.model"
_description = "Account Model"
_columns = {
'name': fields.char('Model Name', size=64, required=True, help="This is a model for recurring accounting entries"),
'journal_id': fields.many2one('account.journal', 'Journal', required=True),
'company_id': fields.related('journal_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'lines_id': fields.one2many('account.model.line', 'model_id', 'Model Entries'),
'legend': fields.text('Legend', readonly=True, size=100),
}
_defaults = {
'legend': lambda self, cr, uid, context:_('You can specify year, month and date in the name of the model using the following labels:\n\n%(year)s: To Specify Year \n%(month)s: To Specify Month \n%(date)s: Current Date\n\ne.g. My model on %(date)s'),
}
def generate(self, cr, uid, ids, data=None, context=None):
if data is None:
data = {}
move_ids = []
entry = {}
account_move_obj = self.pool.get('account.move')
account_move_line_obj = self.pool.get('account.move.line')
pt_obj = self.pool.get('account.payment.term')
period_obj = self.pool.get('account.period')
if context is None:
context = {}
if data.get('date', False):
context.update({'date': data['date']})
move_date = context.get('date', time.strftime('%Y-%m-%d'))
move_date = datetime.strptime(move_date,"%Y-%m-%d")
for model in self.browse(cr, uid, ids, context=context):
ctx = context.copy()
ctx.update({'company_id': model.company_id.id, 'account_period_prefer_normal': True})
period_ids = period_obj.find(cr, uid, dt=context.get('date', False), context=ctx)
period_id = period_ids and period_ids[0] or False
ctx.update({'journal_id': model.journal_id.id,'period_id': period_id})
try:
entry['name'] = model.name%{'year': move_date.strftime('%Y'), 'month': move_date.strftime('%m'), 'date': move_date.strftime('%Y-%m')}
except:
raise osv.except_osv(_('Wrong Model!'), _('You have a wrong expression "%(...)s" in your model!'))
move_id = account_move_obj.create(cr, uid, {
'ref': entry['name'],
'period_id': period_id,
'journal_id': model.journal_id.id,
'date': context.get('date', fields.date.context_today(self,cr,uid,context=context))
})
move_ids.append(move_id)
for line in model.lines_id:
analytic_account_id = False
if line.analytic_account_id:
if not model.journal_id.analytic_journal_id:
raise osv.except_osv(_('No Analytic Journal!'),_("You have to define an analytic journal on the '%s' journal!") % (model.journal_id.name,))
analytic_account_id = line.analytic_account_id.id
val = {
'move_id': move_id,
'journal_id': model.journal_id.id,
'period_id': period_id,
'analytic_account_id': analytic_account_id
}
date_maturity = context.get('date',time.strftime('%Y-%m-%d'))
if line.date_maturity == 'partner':
if not line.partner_id:
raise osv.except_osv(_('Error!'), _("Maturity date of entry line generated by model line '%s' of model '%s' is based on partner payment term!" \
"\nPlease define partner on it!")%(line.name, model.name))
payment_term_id = False
if model.journal_id.type in ('purchase', 'purchase_refund') and line.partner_id.property_supplier_payment_term:
payment_term_id = line.partner_id.property_supplier_payment_term.id
elif line.partner_id.property_payment_term:
payment_term_id = line.partner_id.property_payment_term.id
if payment_term_id:
pterm_list = pt_obj.compute(cr, uid, payment_term_id, value=1, date_ref=date_maturity)
if pterm_list:
pterm_list = [l[0] for l in pterm_list]
pterm_list.sort()
date_maturity = pterm_list[-1]
val.update({
'name': line.name,
'quantity': line.quantity,
'debit': line.debit,
'credit': line.credit,
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': line.partner_id.id,
'date': context.get('date', fields.date.context_today(self,cr,uid,context=context)),
'date_maturity': date_maturity
})
account_move_line_obj.create(cr, uid, val, context=ctx)
return move_ids
def onchange_journal_id(self, cr, uid, ids, journal_id, context=None):
company_id = False
if journal_id:
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context)
if journal.company_id.id:
company_id = journal.company_id.id
return {'value': {'company_id': company_id}}
account_model()
class account_model_line(osv.osv):
_name = "account.model.line"
_description = "Account Model Entries"
_columns = {
'name': fields.char('Name', size=64, required=True),
'sequence': fields.integer('Sequence', required=True, help="The sequence field is used to order the resources from lower sequences to higher ones."),
'quantity': fields.float('Quantity', digits_compute=dp.get_precision('Account'), help="The optional quantity on entries."),
'debit': fields.float('Debit', digits_compute=dp.get_precision('Account')),
'credit': fields.float('Credit', digits_compute=dp.get_precision('Account')),
'account_id': fields.many2one('account.account', 'Account', required=True, ondelete="cascade"),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', ondelete="cascade"),
'model_id': fields.many2one('account.model', 'Model', required=True, ondelete="cascade", select=True),
'amount_currency': fields.float('Amount Currency', help="The amount expressed in an optional other currency."),
'currency_id': fields.many2one('res.currency', 'Currency'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'date_maturity': fields.selection([('today','Date of the day'), ('partner','Partner Payment Term')], 'Maturity Date', help="The maturity date of the generated entries for this model. You can choose between the creation date or the creation date of the entries plus the partner payment terms."),
}
_order = 'sequence'
_sql_constraints = [
('credit_debit1', 'CHECK (credit*debit=0)', 'Wrong credit or debit value in model, they must be positive!'),
('credit_debit2', 'CHECK (credit+debit>=0)', 'Wrong credit or debit value in model, they must be positive!'),
]
account_model_line()
# ---------------------------------------------------------
# Account Subscription
# ---------------------------------------------------------
class account_subscription(osv.osv):
_name = "account.subscription"
_description = "Account Subscription"
_columns = {
'name': fields.char('Name', size=64, required=True),
'ref': fields.char('Reference', size=16),
'model_id': fields.many2one('account.model', 'Model', required=True),
'date_start': fields.date('Start Date', required=True),
'period_total': fields.integer('Number of Periods', required=True),
'period_nbr': fields.integer('Period', required=True),
'period_type': fields.selection([('day','days'),('month','month'),('year','year')], 'Period Type', required=True),
'state': fields.selection([('draft','Draft'),('running','Running'),('done','Done')], 'Status', required=True, readonly=True),
'lines_id': fields.one2many('account.subscription.line', 'subscription_id', 'Subscription Lines')
}
_defaults = {
'date_start': fields.date.context_today,
'period_type': 'month',
'period_total': 12,
'period_nbr': 1,
'state': 'draft',
}
def state_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state':'draft'})
return False
def check(self, cr, uid, ids, context=None):
todone = []
for sub in self.browse(cr, uid, ids, context=context):
ok = True
for line in sub.lines_id:
if not line.move_id.id:
ok = False
break
if ok:
todone.append(sub.id)
if todone:
self.write(cr, uid, todone, {'state':'done'})
return False
def remove_line(self, cr, uid, ids, context=None):
toremove = []
for sub in self.browse(cr, uid, ids, context=context):
for line in sub.lines_id:
if not line.move_id.id:
toremove.append(line.id)
if toremove:
self.pool.get('account.subscription.line').unlink(cr, uid, toremove)
self.write(cr, uid, ids, {'state':'draft'})
return False
def compute(self, cr, uid, ids, context=None):
for sub in self.browse(cr, uid, ids, context=context):
ds = sub.date_start
for i in range(sub.period_total):
self.pool.get('account.subscription.line').create(cr, uid, {
'date': ds,
'subscription_id': sub.id,
})
if sub.period_type=='day':
ds = (datetime.strptime(ds, '%Y-%m-%d') + relativedelta(days=sub.period_nbr)).strftime('%Y-%m-%d')
if sub.period_type=='month':
ds = (datetime.strptime(ds, '%Y-%m-%d') + relativedelta(months=sub.period_nbr)).strftime('%Y-%m-%d')
if sub.period_type=='year':
ds = (datetime.strptime(ds, '%Y-%m-%d') + relativedelta(years=sub.period_nbr)).strftime('%Y-%m-%d')
self.write(cr, uid, ids, {'state':'running'})
return True
account_subscription()
class account_subscription_line(osv.osv):
_name = "account.subscription.line"
_description = "Account Subscription Line"
_columns = {
'subscription_id': fields.many2one('account.subscription', 'Subscription', required=True, select=True),
'date': fields.date('Date', required=True),
'move_id': fields.many2one('account.move', 'Entry'),
}
def move_create(self, cr, uid, ids, context=None):
tocheck = {}
all_moves = []
obj_model = self.pool.get('account.model')
for line in self.browse(cr, uid, ids, context=context):
data = {
'date': line.date,
}
move_ids = obj_model.generate(cr, uid, [line.subscription_id.model_id.id], data, context)
tocheck[line.subscription_id.id] = True
self.write(cr, uid, [line.id], {'move_id':move_ids[0]})
all_moves.extend(move_ids)
if tocheck:
self.pool.get('account.subscription').check(cr, uid, tocheck.keys(), context)
return all_moves
_rec_name = 'date'
account_subscription_line()
# ---------------------------------------------------------------
# Account Templates: Account, Tax, Tax Code and chart. + Wizard
# ---------------------------------------------------------------
class account_tax_template(osv.osv):
_name = 'account.tax.template'
account_tax_template()
class account_account_template(osv.osv):
_order = "code"
_name = "account.account.template"
_description ='Templates for Accounts'
_columns = {
'name': fields.char('Name', size=256, required=True, select=True),
'currency_id': fields.many2one('res.currency', 'Secondary Currency', help="Forces all moves for this account to have this secondary currency."),
'code': fields.char('Code', size=64, required=True, select=1),
'type': fields.selection([
('receivable','Receivable'),
('payable','Payable'),
('view','View'),
('consolidation','Consolidation'),
('liquidity','Liquidity'),
('other','Regular'),
('closed','Closed'),
], 'Internal Type', required=True,help="This type is used to differentiate types with "\
"special effects in OpenERP: view can not have entries, consolidation are accounts that "\
"can have children accounts for multi-company consolidations, payable/receivable are for "\
"partners accounts (for debit/credit computations), closed for depreciated accounts."),
'user_type': fields.many2one('account.account.type', 'Account Type', required=True,
help="These types are defined according to your country. The type contains more information "\
"about the account and its specificities."),
'financial_report_ids': fields.many2many('account.financial.report', 'account_template_financial_report', 'account_template_id', 'report_line_id', 'Financial Reports'),
'reconcile': fields.boolean('Allow Reconciliation', help="Check this option if you want the user to reconcile entries in this account."),
'shortcut': fields.char('Shortcut', size=12),
'note': fields.text('Note'),
'parent_id': fields.many2one('account.account.template', 'Parent Account Template', ondelete='cascade', domain=[('type','=','view')]),
'child_parent_ids':fields.one2many('account.account.template', 'parent_id', 'Children'),
'tax_ids': fields.many2many('account.tax.template', 'account_account_template_tax_rel', 'account_id', 'tax_id', 'Default Taxes'),
'nocreate': fields.boolean('Optional create', help="If checked, the new chart of accounts will not contain this by default."),
'chart_template_id': fields.many2one('account.chart.template', 'Chart Template', help="This optional field allow you to link an account template to a specific chart template that may differ from the one its root parent belongs to. This allow you to define chart templates that extend another and complete it with few new accounts (You don't need to define the whole structure that is common to both several times)."),
}
_defaults = {
'reconcile': False,
'type': 'view',
'nocreate': False,
}
_check_recursion = check_cycle
_constraints = [
(_check_recursion, 'Error!\nYou cannot create recursive account templates.', ['parent_id']),
]
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
reads = self.read(cr, uid, ids, ['name','code'], context=context)
res = []
for record in reads:
name = record['name']
if record['code']:
name = record['code']+' '+name
res.append((record['id'],name ))
return res
def generate_account(self, cr, uid, chart_template_id, tax_template_ref, acc_template_ref, code_digits, company_id, context=None):
"""
This method for generating accounts from templates.
:param chart_template_id: id of the chart template chosen in the wizard
:param tax_template_ref: Taxes templates reference for write taxes_id in account_account.
:paramacc_template_ref: dictionary with the mappping between the account templates and the real accounts.
:param code_digits: number of digits got from wizard.multi.charts.accounts, this is use for account code.
:param company_id: company_id selected from wizard.multi.charts.accounts.
:returns: return acc_template_ref for reference purpose.
:rtype: dict
"""
if context is None:
context = {}
obj_acc = self.pool.get('account.account')
company_name = self.pool.get('res.company').browse(cr, uid, company_id, context=context).name
template = self.pool.get('account.chart.template').browse(cr, uid, chart_template_id, context=context)
#deactivate the parent_store functionnality on account_account for rapidity purpose
ctx = context.copy()
ctx.update({'defer_parent_store_computation': True})
level_ref = {}
children_acc_criteria = [('chart_template_id','=', chart_template_id)]
if template.account_root_id.id:
children_acc_criteria = ['|'] + children_acc_criteria + ['&',('parent_id','child_of', [template.account_root_id.id]),('chart_template_id','=', False)]
children_acc_template = self.search(cr, uid, [('nocreate','!=',True)] + children_acc_criteria, order='id')
for account_template in self.browse(cr, uid, children_acc_template, context=context):
# skip the root of COA if it's not the main one
if (template.account_root_id.id == account_template.id) and template.parent_id:
continue
tax_ids = []
for tax in account_template.tax_ids:
tax_ids.append(tax_template_ref[tax.id])
code_main = account_template.code and len(account_template.code) or 0
code_acc = account_template.code or ''
if code_main > 0 and code_main <= code_digits and account_template.type != 'view':
code_acc = str(code_acc) + (str('0'*(code_digits-code_main)))
parent_id = account_template.parent_id and ((account_template.parent_id.id in acc_template_ref) and acc_template_ref[account_template.parent_id.id]) or False
#the level as to be given as well at the creation time, because of the defer_parent_store_computation in
#context. Indeed because of this, the parent_left and parent_right are not computed and thus the child_of
#operator does not return the expected values, with result of having the level field not computed at all.
if parent_id:
level = parent_id in level_ref and level_ref[parent_id] + 1 or obj_acc._get_level(cr, uid, [parent_id], 'level', None, context=context)[parent_id] + 1
else:
level = 0
vals={
'name': (template.account_root_id.id == account_template.id) and company_name or account_template.name,
'currency_id': account_template.currency_id and account_template.currency_id.id or False,
'code': code_acc,
'type': account_template.type,
'user_type': account_template.user_type and account_template.user_type.id or False,
'reconcile': account_template.reconcile,
'shortcut': account_template.shortcut,
'note': account_template.note,
'financial_report_ids': account_template.financial_report_ids and [(6,0,[x.id for x in account_template.financial_report_ids])] or False,
'parent_id': parent_id,
'tax_ids': [(6,0,tax_ids)],
'company_id': company_id,
'level': level,
}
new_account = obj_acc.create(cr, uid, vals, context=ctx)
acc_template_ref[account_template.id] = new_account
level_ref[new_account] = level
#reactivate the parent_store functionnality on account_account
obj_acc._parent_store_compute(cr)
return acc_template_ref
account_account_template()
class account_add_tmpl_wizard(osv.osv_memory):
"""Add one more account from the template.
With the 'nocreate' option, some accounts may not be created. Use this to add them later."""
_name = 'account.addtmpl.wizard'
def _get_def_cparent(self, cr, uid, context=None):
acc_obj = self.pool.get('account.account')
tmpl_obj = self.pool.get('account.account.template')
tids = tmpl_obj.read(cr, uid, [context['tmpl_ids']], ['parent_id'])
if not tids or not tids[0]['parent_id']:
return False
ptids = tmpl_obj.read(cr, uid, [tids[0]['parent_id'][0]], ['code'])
res = None
if not ptids or not ptids[0]['code']:
raise osv.except_osv(_('Error!'), _('There is no parent code for the template account.'))
res = acc_obj.search(cr, uid, [('code','=',ptids[0]['code'])])
return res and res[0] or False
_columns = {
'cparent_id':fields.many2one('account.account', 'Parent target', help="Creates an account with the selected template under this existing parent.", required=True),
}
_defaults = {
'cparent_id': _get_def_cparent,
}
def action_create(self,cr,uid,ids,context=None):
if context is None:
context = {}
acc_obj = self.pool.get('account.account')
tmpl_obj = self.pool.get('account.account.template')
data = self.read(cr, uid, ids)[0]
company_id = acc_obj.read(cr, uid, [data['cparent_id'][0]], ['company_id'])[0]['company_id'][0]
account_template = tmpl_obj.browse(cr, uid, context['tmpl_ids'])
vals = {
'name': account_template.name,
'currency_id': account_template.currency_id and account_template.currency_id.id or False,
'code': account_template.code,
'type': account_template.type,
'user_type': account_template.user_type and account_template.user_type.id or False,
'reconcile': account_template.reconcile,
'shortcut': account_template.shortcut,
'note': account_template.note,
'parent_id': data['cparent_id'][0],
'company_id': company_id,
}
acc_obj.create(cr, uid, vals)
return {'type':'state', 'state': 'end' }
def action_cancel(self, cr, uid, ids, context=None):
return { 'type': 'state', 'state': 'end' }
account_add_tmpl_wizard()
class account_tax_code_template(osv.osv):
_name = 'account.tax.code.template'
_description = 'Tax Code Template'
_order = 'code'
_rec_name = 'code'
_columns = {
'name': fields.char('Tax Case Name', size=64, required=True),
'code': fields.char('Case Code', size=64),
'info': fields.text('Description'),
'parent_id': fields.many2one('account.tax.code.template', 'Parent Code', select=True),
'child_ids': fields.one2many('account.tax.code.template', 'parent_id', 'Child Codes'),
'sign': fields.float('Sign For Parent', required=True),
'notprintable':fields.boolean("Not Printable in Invoice", help="Check this box if you don't want any tax related to this tax Code to appear on invoices."),
}
_defaults = {
'sign': 1.0,
'notprintable': False,
}
def generate_tax_code(self, cr, uid, tax_code_root_id, company_id, context=None):
'''
This function generates the tax codes from the templates of tax code that are children of the given one passed
in argument. Then it returns a dictionary with the mappping between the templates and the real objects.
:param tax_code_root_id: id of the root of all the tax code templates to process
:param company_id: id of the company the wizard is running for
:returns: dictionary with the mappping between the templates and the real objects.
:rtype: dict
'''
obj_tax_code_template = self.pool.get('account.tax.code.template')
obj_tax_code = self.pool.get('account.tax.code')
tax_code_template_ref = {}
company = self.pool.get('res.company').browse(cr, uid, company_id, context=context)
#find all the children of the tax_code_root_id
children_tax_code_template = tax_code_root_id and obj_tax_code_template.search(cr, uid, [('parent_id','child_of',[tax_code_root_id])], order='id') or []
for tax_code_template in obj_tax_code_template.browse(cr, uid, children_tax_code_template, context=context):
vals = {
'name': (tax_code_root_id == tax_code_template.id) and company.name or tax_code_template.name,
'code': tax_code_template.code,
'info': tax_code_template.info,
'parent_id': tax_code_template.parent_id and ((tax_code_template.parent_id.id in tax_code_template_ref) and tax_code_template_ref[tax_code_template.parent_id.id]) or False,
'company_id': company_id,
'sign': tax_code_template.sign,
}
#check if this tax code already exists
rec_list = obj_tax_code.search(cr, uid, [('name', '=', vals['name']),('code', '=', vals['code']),('company_id', '=', vals['company_id'])], context=context)
if not rec_list:
#if not yet, create it
new_tax_code = obj_tax_code.create(cr, uid, vals)
#recording the new tax code to do the mapping
tax_code_template_ref[tax_code_template.id] = new_tax_code
return tax_code_template_ref
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name','code'], context, load='_classic_write')
return [(x['id'], (x['code'] and x['code'] + ' - ' or '') + x['name']) \
for x in reads]
_check_recursion = check_cycle
_constraints = [
(_check_recursion, 'Error!\nYou cannot create recursive Tax Codes.', ['parent_id'])
]
_order = 'code,name'
account_tax_code_template()
class account_chart_template(osv.osv):
_name="account.chart.template"
_description= "Templates for Account Chart"
_columns={
'name': fields.char('Name', size=64, required=True),
'parent_id': fields.many2one('account.chart.template', 'Parent Chart Template'),
'code_digits': fields.integer('# of Digits', required=True, help="No. of Digits to use for account code"),
'visible': fields.boolean('Can be Visible?', help="Set this to False if you don't want this template to be used actively in the wizard that generate Chart of Accounts from templates, this is useful when you want to generate accounts of this template only when loading its child template."),
'complete_tax_set': fields.boolean('Complete Set of Taxes', help='This boolean helps you to choose if you want to propose to the user to encode the sale and purchase rates or choose from list of taxes. This last choice assumes that the set of tax defined on this template is complete'),
'account_root_id': fields.many2one('account.account.template', 'Root Account', domain=[('parent_id','=',False)]),
'tax_code_root_id': fields.many2one('account.tax.code.template', 'Root Tax Code', domain=[('parent_id','=',False)]),
'tax_template_ids': fields.one2many('account.tax.template', 'chart_template_id', 'Tax Template List', help='List of all the taxes that have to be installed by the wizard'),
'bank_account_view_id': fields.many2one('account.account.template', 'Bank Account'),
'property_account_receivable': fields.many2one('account.account.template', 'Receivable Account'),
'property_account_payable': fields.many2one('account.account.template', 'Payable Account'),
'property_account_expense_categ': fields.many2one('account.account.template', 'Expense Category Account'),
'property_account_income_categ': fields.many2one('account.account.template', 'Income Category Account'),
'property_account_expense': fields.many2one('account.account.template', 'Expense Account on Product Template'),
'property_account_income': fields.many2one('account.account.template', 'Income Account on Product Template'),
'property_account_income_opening': fields.many2one('account.account.template', 'Opening Entries Income Account'),
'property_account_expense_opening': fields.many2one('account.account.template', 'Opening Entries Expense Account'),
}
_defaults = {
'visible': True,
'code_digits': 6,
'complete_tax_set': True,
}
account_chart_template()
class account_tax_template(osv.osv):
_name = 'account.tax.template'
_description = 'Templates for Taxes'
_columns = {
'chart_template_id': fields.many2one('account.chart.template', 'Chart Template', required=True),
'name': fields.char('Tax Name', size=64, required=True),
'sequence': fields.integer('Sequence', required=True, help="The sequence field is used to order the taxes lines from lower sequences to higher ones. The order is important if you have a tax that has several tax children. In this case, the evaluation order is important."),
'amount': fields.float('Amount', required=True, digits_compute=get_precision_tax(), help="For Tax Type percent enter % ratio between 0-1."),
'type': fields.selection( [('percent','Percent'), ('fixed','Fixed'), ('none','None'), ('code','Python Code'), ('balance','Balance')], 'Tax Type', required=True),
'applicable_type': fields.selection( [('true','True'), ('code','Python Code')], 'Applicable Type', required=True, help="If not applicable (computed through a Python code), the tax won't appear on the invoice."),
'domain':fields.char('Domain', size=32, help="This field is only used if you develop your own module allowing developers to create specific taxes in a custom domain."),
'account_collected_id':fields.many2one('account.account.template', 'Invoice Tax Account'),
'account_paid_id':fields.many2one('account.account.template', 'Refund Tax Account'),
'parent_id':fields.many2one('account.tax.template', 'Parent Tax Account', select=True),
'child_depend':fields.boolean('Tax on Children', help="Set if the tax computation is based on the computation of child taxes rather than on the total amount."),
'python_compute':fields.text('Python Code'),
'python_compute_inv':fields.text('Python Code (reverse)'),
'python_applicable':fields.text('Python Code'),
#
# Fields used for the Tax declaration
#
'base_code_id': fields.many2one('account.tax.code.template', 'Base Code', help="Use this code for the tax declaration."),
'tax_code_id': fields.many2one('account.tax.code.template', 'Tax Code', help="Use this code for the tax declaration."),
'base_sign': fields.float('Base Code Sign', help="Usually 1 or -1."),
'tax_sign': fields.float('Tax Code Sign', help="Usually 1 or -1."),
# Same fields for refund invoices
'ref_base_code_id': fields.many2one('account.tax.code.template', 'Refund Base Code', help="Use this code for the tax declaration."),
'ref_tax_code_id': fields.many2one('account.tax.code.template', 'Refund Tax Code', help="Use this code for the tax declaration."),
'ref_base_sign': fields.float('Base Code Sign', help="Usually 1 or -1."),
'ref_tax_sign': fields.float('Tax Code Sign', help="Usually 1 or -1."),
'include_base_amount': fields.boolean('Include in Base Amount', help="Set if the amount of tax must be included in the base amount before computing the next taxes."),
'description': fields.char('Internal Name'),
'type_tax_use': fields.selection([('sale','Sale'),('purchase','Purchase'),('all','All')], 'Tax Use In', required=True,),
'price_include': fields.boolean('Tax Included in Price', help="Check this if the price you use on the product and invoices includes this tax."),
}
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
res = []
for record in self.read(cr, uid, ids, ['description','name'], context=context):
name = record['description'] and record['description'] or record['name']
res.append((record['id'],name ))
return res
def _default_company(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if user.company_id:
return user.company_id.id
return self.pool.get('res.company').search(cr, uid, [('parent_id', '=', False)])[0]
_defaults = {
'python_compute': lambda *a: '''# price_unit\n# product: product.product object or None\n# partner: res.partner object or None\n\nresult = price_unit * 0.10''',
'python_compute_inv': lambda *a: '''# price_unit\n# product: product.product object or False\n\nresult = price_unit * 0.10''',
'applicable_type': 'true',
'type': 'percent',
'amount': 0,
'sequence': 1,
'ref_tax_sign': 1,
'ref_base_sign': 1,
'tax_sign': 1,
'base_sign': 1,
'include_base_amount': False,
'type_tax_use': 'all',
'price_include': 0,
}
_order = 'sequence'
def _generate_tax(self, cr, uid, tax_templates, tax_code_template_ref, company_id, context=None):
"""
This method generate taxes from templates.
:param tax_templates: list of browse record of the tax templates to process
:param tax_code_template_ref: Taxcode templates reference.
:param company_id: id of the company the wizard is running for
:returns:
{
'tax_template_to_tax': mapping between tax template and the newly generated taxes corresponding,
'account_dict': dictionary containing a to-do list with all the accounts to assign on new taxes
}
"""
if context is None:
context = {}
res = {}
todo_dict = {}
tax_template_to_tax = {}
for tax in tax_templates:
vals_tax = {
'name':tax.name,
'sequence': tax.sequence,
'amount': tax.amount,
'type': tax.type,
'applicable_type': tax.applicable_type,
'domain': tax.domain,
'parent_id': tax.parent_id and ((tax.parent_id.id in tax_template_to_tax) and tax_template_to_tax[tax.parent_id.id]) or False,
'child_depend': tax.child_depend,
'python_compute': tax.python_compute,
'python_compute_inv': tax.python_compute_inv,
'python_applicable': tax.python_applicable,
'base_code_id': tax.base_code_id and ((tax.base_code_id.id in tax_code_template_ref) and tax_code_template_ref[tax.base_code_id.id]) or False,
'tax_code_id': tax.tax_code_id and ((tax.tax_code_id.id in tax_code_template_ref) and tax_code_template_ref[tax.tax_code_id.id]) or False,
'base_sign': tax.base_sign,
'tax_sign': tax.tax_sign,
'ref_base_code_id': tax.ref_base_code_id and ((tax.ref_base_code_id.id in tax_code_template_ref) and tax_code_template_ref[tax.ref_base_code_id.id]) or False,
'ref_tax_code_id': tax.ref_tax_code_id and ((tax.ref_tax_code_id.id in tax_code_template_ref) and tax_code_template_ref[tax.ref_tax_code_id.id]) or False,
'ref_base_sign': tax.ref_base_sign,
'ref_tax_sign': tax.ref_tax_sign,
'include_base_amount': tax.include_base_amount,
'description': tax.description,
'company_id': company_id,
'type_tax_use': tax.type_tax_use,
'price_include': tax.price_include
}
new_tax = self.pool.get('account.tax').create(cr, uid, vals_tax)
tax_template_to_tax[tax.id] = new_tax
#as the accounts have not been created yet, we have to wait before filling these fields
todo_dict[new_tax] = {
'account_collected_id': tax.account_collected_id and tax.account_collected_id.id or False,
'account_paid_id': tax.account_paid_id and tax.account_paid_id.id or False,
}
res.update({'tax_template_to_tax': tax_template_to_tax, 'account_dict': todo_dict})
return res
account_tax_template()
# Fiscal Position Templates
class account_fiscal_position_template(osv.osv):
_name = 'account.fiscal.position.template'
_description = 'Template for Fiscal Position'
_columns = {
'name': fields.char('Fiscal Position Template', size=64, required=True),
'chart_template_id': fields.many2one('account.chart.template', 'Chart Template', required=True),
'account_ids': fields.one2many('account.fiscal.position.account.template', 'position_id', 'Account Mapping'),
'tax_ids': fields.one2many('account.fiscal.position.tax.template', 'position_id', 'Tax Mapping'),
'note': fields.text('Notes', translate=True),
}
def generate_fiscal_position(self, cr, uid, chart_temp_id, tax_template_ref, acc_template_ref, company_id, context=None):
"""
This method generate Fiscal Position, Fiscal Position Accounts and Fiscal Position Taxes from templates.
:param chart_temp_id: Chart Template Id.
:param taxes_ids: Taxes templates reference for generating account.fiscal.position.tax.
:param acc_template_ref: Account templates reference for generating account.fiscal.position.account.
:param company_id: company_id selected from wizard.multi.charts.accounts.
:returns: True
"""
if context is None:
context = {}
obj_tax_fp = self.pool.get('account.fiscal.position.tax')
obj_ac_fp = self.pool.get('account.fiscal.position.account')
obj_fiscal_position = self.pool.get('account.fiscal.position')
fp_ids = self.search(cr, uid, [('chart_template_id', '=', chart_temp_id)])
for position in self.browse(cr, uid, fp_ids, context=context):
new_fp = obj_fiscal_position.create(cr, uid, {'company_id': company_id, 'name': position.name, 'note': position.note})
for tax in position.tax_ids:
obj_tax_fp.create(cr, uid, {
'tax_src_id': tax_template_ref[tax.tax_src_id.id],
'tax_dest_id': tax.tax_dest_id and tax_template_ref[tax.tax_dest_id.id] or False,
'position_id': new_fp
})
for acc in position.account_ids:
obj_ac_fp.create(cr, uid, {
'account_src_id': acc_template_ref[acc.account_src_id.id],
'account_dest_id': acc_template_ref[acc.account_dest_id.id],
'position_id': new_fp
})
return True
account_fiscal_position_template()
class account_fiscal_position_tax_template(osv.osv):
_name = 'account.fiscal.position.tax.template'
_description = 'Template Tax Fiscal Position'
_rec_name = 'position_id'
_columns = {
'position_id': fields.many2one('account.fiscal.position.template', 'Fiscal Position', required=True, ondelete='cascade'),
'tax_src_id': fields.many2one('account.tax.template', 'Tax Source', required=True),
'tax_dest_id': fields.many2one('account.tax.template', 'Replacement Tax')
}
account_fiscal_position_tax_template()
class account_fiscal_position_account_template(osv.osv):
_name = 'account.fiscal.position.account.template'
_description = 'Template Account Fiscal Mapping'
_rec_name = 'position_id'
_columns = {
'position_id': fields.many2one('account.fiscal.position.template', 'Fiscal Mapping', required=True, ondelete='cascade'),
'account_src_id': fields.many2one('account.account.template', 'Account Source', domain=[('type','<>','view')], required=True),
'account_dest_id': fields.many2one('account.account.template', 'Account Destination', domain=[('type','<>','view')], required=True)
}
account_fiscal_position_account_template()
# ---------------------------------------------------------
# Account generation from template wizards
# ---------------------------------------------------------
class wizard_multi_charts_accounts(osv.osv_memory):
"""
Create a new account chart for a company.
Wizards ask for:
* a company
* an account chart template
* a number of digits for formatting code of non-view accounts
* a list of bank accounts owned by the company
Then, the wizard:
* generates all accounts from the template and assigns them to the right company
* generates all taxes and tax codes, changing account assignations
* generates all accounting properties and assigns them correctly
"""
_name='wizard.multi.charts.accounts'
_inherit = 'res.config'
_columns = {
'company_id':fields.many2one('res.company', 'Company', required=True),
'currency_id': fields.many2one('res.currency', 'Currency', help="Currency as per company's country."),
'only_one_chart_template': fields.boolean('Only One Chart Template Available'),
'chart_template_id': fields.many2one('account.chart.template', 'Chart Template', required=True),
'bank_accounts_id': fields.one2many('account.bank.accounts.wizard', 'bank_account_id', 'Cash and Banks', required=True),
'code_digits':fields.integer('# of Digits', required=True, help="No. of Digits to use for account code"),
"sale_tax": fields.many2one("account.tax.template", "Default Sale Tax"),
"purchase_tax": fields.many2one("account.tax.template", "Default Purchase Tax"),
'sale_tax_rate': fields.float('Sales Tax(%)'),
'purchase_tax_rate': fields.float('Purchase Tax(%)'),
'complete_tax_set': fields.boolean('Complete Set of Taxes', help='This boolean helps you to choose if you want to propose to the user to encode the sales and purchase rates or use the usual m2o fields. This last choice assumes that the set of tax defined for the chosen template is complete'),
}
def _get_chart_parent_ids(self, cr, uid, chart_template, context=None):
""" Returns the IDs of all ancestor charts, including the chart itself.
(inverse of child_of operator)
:param browse_record chart_template: the account.chart.template record
:return: the IDS of all ancestor charts, including the chart itself.
"""
result = [chart_template.id]
while chart_template.parent_id:
chart_template = chart_template.parent_id
result.append(chart_template.id)
return result
def onchange_tax_rate(self, cr, uid, ids, rate=False, context=None):
return {'value': {'purchase_tax_rate': rate or False}}
def onchange_chart_template_id(self, cr, uid, ids, chart_template_id=False, context=None):
res = {}
tax_templ_obj = self.pool.get('account.tax.template')
ir_values = self.pool.get('ir.values')
res['value'] = {'complete_tax_set': False, 'sale_tax': False, 'purchase_tax': False}
if chart_template_id:
data = self.pool.get('account.chart.template').browse(cr, uid, chart_template_id, context=context)
#set currecy_id based on selected COA template using ir.vaalues else current users company's currency
value_id = ir_values.search(cr, uid, [('model', '=', 'account.chart.template'), ('res_id', '=', chart_template_id)], limit=1, context=context)
if value_id:
currency_id = int(ir_values.browse(cr, uid, value_id[0], context=context).value)
else:
currency_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
res['value'].update({'complete_tax_set': data.complete_tax_set, 'currency_id': currency_id})
if data.complete_tax_set:
# default tax is given by the lowest sequence. For same sequence we will take the latest created as it will be the case for tax created while isntalling the generic chart of account
chart_ids = self._get_chart_parent_ids(cr, uid, data, context=context)
base_tax_domain = [("chart_template_id", "in", chart_ids), ('parent_id', '=', False)]
sale_tax_domain = base_tax_domain + [('type_tax_use', 'in', ('sale','all'))]
purchase_tax_domain = base_tax_domain + [('type_tax_use', 'in', ('purchase','all'))]
sale_tax_ids = tax_templ_obj.search(cr, uid, sale_tax_domain, order="sequence, id desc")
purchase_tax_ids = tax_templ_obj.search(cr, uid, purchase_tax_domain, order="sequence, id desc")
res['value'].update({'sale_tax': sale_tax_ids and sale_tax_ids[0] or False,
'purchase_tax': purchase_tax_ids and purchase_tax_ids[0] or False})
res.setdefault('domain', {})
res['domain']['sale_tax'] = repr(sale_tax_domain)
res['domain']['purchase_tax'] = repr(purchase_tax_domain)
if data.code_digits:
res['value'].update({'code_digits': data.code_digits})
return res
def default_get(self, cr, uid, fields, context=None):
res = super(wizard_multi_charts_accounts, self).default_get(cr, uid, fields, context=context)
tax_templ_obj = self.pool.get('account.tax.template')
account_chart_template = self.pool['account.chart.template']
data_obj = self.pool.get('ir.model.data')
if 'bank_accounts_id' in fields:
res.update({'bank_accounts_id': [{'acc_name': _('Cash'), 'account_type': 'cash'},{'acc_name': _('Bank'), 'account_type': 'bank'}]})
if 'company_id' in fields:
res.update({'company_id': self.pool.get('res.users').browse(cr, uid, [uid], context=context)[0].company_id.id})
if 'currency_id' in fields:
company_id = res.get('company_id') or False
if company_id:
company_obj = self.pool.get('res.company')
country_id = company_obj.browse(cr, uid, company_id, context=context).country_id.id
currency_id = company_obj.on_change_country(cr, uid, company_id, country_id, context=context)['value']['currency_id']
res.update({'currency_id': currency_id})
ids = account_chart_template.search(cr, uid, [('visible', '=', True)], context=context)
if ids:
#in order to set default chart which was last created set max of ids.
chart_id = max(ids)
if context.get("default_charts"):
data_ids = data_obj.search(cr, uid, [('model', '=', 'account.chart.template'), ('module', '=', context.get("default_charts"))], limit=1, context=context)
if data_ids:
chart_id = data_obj.browse(cr, uid, data_ids[0], context=context).res_id
chart = account_chart_template.browse(cr, uid, chart_id, context=context)
chart_hierarchy_ids = self._get_chart_parent_ids(cr, uid, chart, context=context)
if 'chart_template_id' in fields:
res.update({'only_one_chart_template': len(ids) == 1,
'chart_template_id': chart_id})
if 'sale_tax' in fields:
sale_tax_ids = tax_templ_obj.search(cr, uid, [("chart_template_id", "in", chart_hierarchy_ids),
('type_tax_use', 'in', ('sale','all'))],
order="sequence")
res.update({'sale_tax': sale_tax_ids and sale_tax_ids[0] or False})
if 'purchase_tax' in fields:
purchase_tax_ids = tax_templ_obj.search(cr, uid, [("chart_template_id", "in", chart_hierarchy_ids),
('type_tax_use', 'in', ('purchase','all'))],
order="sequence")
res.update({'purchase_tax': purchase_tax_ids and purchase_tax_ids[0] or False})
res.update({
'purchase_tax_rate': 15.0,
'sale_tax_rate': 15.0,
})
return res
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
if context is None:context = {}
res = super(wizard_multi_charts_accounts, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False)
cmp_select = []
acc_template_obj = self.pool.get('account.chart.template')
company_obj = self.pool.get('res.company')
company_ids = company_obj.search(cr, uid, [], context=context)
#display in the widget selection of companies, only the companies that haven't been configured yet (but don't care about the demo chart of accounts)
cr.execute("SELECT company_id FROM account_account WHERE active = 't' AND account_account.parent_id IS NULL AND name != %s", ("Chart For Automated Tests",))
configured_cmp = [r[0] for r in cr.fetchall()]
unconfigured_cmp = list(set(company_ids)-set(configured_cmp))
for field in res['fields']:
if field == 'company_id':
res['fields'][field]['domain'] = [('id','in',unconfigured_cmp)]
res['fields'][field]['selection'] = [('', '')]
if unconfigured_cmp:
cmp_select = [(line.id, line.name) for line in company_obj.browse(cr, uid, unconfigured_cmp)]
res['fields'][field]['selection'] = cmp_select
return res
def check_created_journals(self, cr, uid, vals_journal, company_id, context=None):
"""
This method used for checking journals already created or not. If not then create new journal.
"""
obj_journal = self.pool.get('account.journal')
rec_list = obj_journal.search(cr, uid, [('name','=', vals_journal['name']),('company_id', '=', company_id)], context=context)
if not rec_list:
obj_journal.create(cr, uid, vals_journal, context=context)
return True
def generate_journals(self, cr, uid, chart_template_id, acc_template_ref, company_id, context=None):
"""
This method is used for creating journals.
:param chart_temp_id: Chart Template Id.
:param acc_template_ref: Account templates reference.
:param company_id: company_id selected from wizard.multi.charts.accounts.
:returns: True
"""
journal_data = self._prepare_all_journals(cr, uid, chart_template_id, acc_template_ref, company_id, context=context)
for vals_journal in journal_data:
self.check_created_journals(cr, uid, vals_journal, company_id, context=context)
return True
def _prepare_all_journals(self, cr, uid, chart_template_id, acc_template_ref, company_id, context=None):
def _get_analytic_journal(journal_type):
# Get the analytic journal
data = False
if journal_type in ('sale', 'sale_refund'):
data = obj_data.get_object_reference(cr, uid, 'account', 'analytic_journal_sale')
elif journal_type in ('purchase', 'purchase_refund'):
pass
elif journal_type == 'general':
pass
return data and data[1] or False
def _get_default_account(journal_type, type='debit'):
# Get the default accounts
default_account = False
if journal_type in ('sale', 'sale_refund'):
default_account = acc_template_ref.get(template.property_account_income_categ.id)
elif journal_type in ('purchase', 'purchase_refund'):
default_account = acc_template_ref.get(template.property_account_expense_categ.id)
elif journal_type == 'situation':
if type == 'debit':
default_account = acc_template_ref.get(template.property_account_expense_opening.id)
else:
default_account = acc_template_ref.get(template.property_account_income_opening.id)
return default_account
journal_names = {
'sale': _('Sales Journal'),
'purchase': _('Purchase Journal'),
'sale_refund': _('Sales Refund Journal'),
'purchase_refund': _('Purchase Refund Journal'),
'general': _('Miscellaneous Journal'),
'situation': _('Opening Entries Journal'),
}
journal_codes = {
'sale': _('SAJ'),
'purchase': _('EXJ'),
'sale_refund': _('SCNJ'),
'purchase_refund': _('ECNJ'),
'general': _('MISC'),
'situation': _('OPEJ'),
}
obj_data = self.pool.get('ir.model.data')
analytic_journal_obj = self.pool.get('account.analytic.journal')
template = self.pool.get('account.chart.template').browse(cr, uid, chart_template_id, context=context)
journal_data = []
for journal_type in ['sale', 'purchase', 'sale_refund', 'purchase_refund', 'general', 'situation']:
vals = {
'type': journal_type,
'name': journal_names[journal_type],
'code': journal_codes[journal_type],
'company_id': company_id,
'centralisation': journal_type == 'situation',
'analytic_journal_id': _get_analytic_journal(journal_type),
'default_credit_account_id': _get_default_account(journal_type, 'credit'),
'default_debit_account_id': _get_default_account(journal_type, 'debit'),
}
journal_data.append(vals)
return journal_data
def generate_properties(self, cr, uid, chart_template_id, acc_template_ref, company_id, context=None):
"""
This method used for creating properties.
:param chart_template_id: id of the current chart template for which we need to create properties
:param acc_template_ref: Mapping between ids of account templates and real accounts created from them
:param company_id: company_id selected from wizard.multi.charts.accounts.
:returns: True
"""
property_obj = self.pool.get('ir.property')
field_obj = self.pool.get('ir.model.fields')
todo_list = [
('property_account_receivable','res.partner','account.account'),
('property_account_payable','res.partner','account.account'),
('property_account_expense_categ','product.category','account.account'),
('property_account_income_categ','product.category','account.account'),
('property_account_expense','product.template','account.account'),
('property_account_income','product.template','account.account'),
]
template = self.pool.get('account.chart.template').browse(cr, uid, chart_template_id, context=context)
for record in todo_list:
account = getattr(template, record[0])
value = account and 'account.account,' + str(acc_template_ref[account.id]) or False
if value:
field = field_obj.search(cr, uid, [('name', '=', record[0]),('model', '=', record[1]),('relation', '=', record[2])], context=context)
vals = {
'name': record[0],
'company_id': company_id,
'fields_id': field[0],
'value': value,
}
property_ids = property_obj.search(cr, uid, [('name','=', record[0]),('company_id', '=', company_id)], context=context)
if property_ids:
#the property exist: modify it
property_obj.write(cr, uid, property_ids, vals, context=context)
else:
#create the property
property_obj.create(cr, uid, vals, context=context)
return True
def _install_template(self, cr, uid, template_id, company_id, code_digits=None, obj_wizard=None, acc_ref=None, taxes_ref=None, tax_code_ref=None, context=None):
'''
This function recursively loads the template objects and create the real objects from them.
:param template_id: id of the chart template to load
:param company_id: id of the company the wizard is running for
:param code_digits: integer that depicts the number of digits the accounts code should have in the COA
:param obj_wizard: the current wizard for generating the COA from the templates
:param acc_ref: Mapping between ids of account templates and real accounts created from them
:param taxes_ref: Mapping between ids of tax templates and real taxes created from them
:param tax_code_ref: Mapping between ids of tax code templates and real tax codes created from them
:returns: return a tuple with a dictionary containing
* the mapping between the account template ids and the ids of the real accounts that have been generated
from them, as first item,
* a similar dictionary for mapping the tax templates and taxes, as second item,
* a last identical containing the mapping of tax code templates and tax codes
:rtype: tuple(dict, dict, dict)
'''
if acc_ref is None:
acc_ref = {}
if taxes_ref is None:
taxes_ref = {}
if tax_code_ref is None:
tax_code_ref = {}
template = self.pool.get('account.chart.template').browse(cr, uid, template_id, context=context)
if template.parent_id:
tmp1, tmp2, tmp3 = self._install_template(cr, uid, template.parent_id.id, company_id, code_digits=code_digits, acc_ref=acc_ref, taxes_ref=taxes_ref, tax_code_ref=tax_code_ref, context=context)
acc_ref.update(tmp1)
taxes_ref.update(tmp2)
tax_code_ref.update(tmp3)
tmp1, tmp2, tmp3 = self._load_template(cr, uid, template_id, company_id, code_digits=code_digits, obj_wizard=obj_wizard, account_ref=acc_ref, taxes_ref=taxes_ref, tax_code_ref=tax_code_ref, context=context)
acc_ref.update(tmp1)
taxes_ref.update(tmp2)
tax_code_ref.update(tmp3)
return acc_ref, taxes_ref, tax_code_ref
def _load_template(self, cr, uid, template_id, company_id, code_digits=None, obj_wizard=None, account_ref=None, taxes_ref=None, tax_code_ref=None, context=None):
'''
This function generates all the objects from the templates
:param template_id: id of the chart template to load
:param company_id: id of the company the wizard is running for
:param code_digits: integer that depicts the number of digits the accounts code should have in the COA
:param obj_wizard: the current wizard for generating the COA from the templates
:param acc_ref: Mapping between ids of account templates and real accounts created from them
:param taxes_ref: Mapping between ids of tax templates and real taxes created from them
:param tax_code_ref: Mapping between ids of tax code templates and real tax codes created from them
:returns: return a tuple with a dictionary containing
* the mapping between the account template ids and the ids of the real accounts that have been generated
from them, as first item,
* a similar dictionary for mapping the tax templates and taxes, as second item,
* a last identical containing the mapping of tax code templates and tax codes
:rtype: tuple(dict, dict, dict)
'''
if account_ref is None:
account_ref = {}
if taxes_ref is None:
taxes_ref = {}
if tax_code_ref is None:
tax_code_ref = {}
template = self.pool.get('account.chart.template').browse(cr, uid, template_id, context=context)
obj_tax_code_template = self.pool.get('account.tax.code.template')
obj_acc_tax = self.pool.get('account.tax')
obj_tax_temp = self.pool.get('account.tax.template')
obj_acc_template = self.pool.get('account.account.template')
obj_fiscal_position_template = self.pool.get('account.fiscal.position.template')
# create all the tax code.
tax_code_ref.update(obj_tax_code_template.generate_tax_code(cr, uid, template.tax_code_root_id.id, company_id, context=context))
# Generate taxes from templates.
tax_templates = [x for x in template.tax_template_ids]
generated_tax_res = obj_tax_temp._generate_tax(cr, uid, tax_templates, tax_code_ref, company_id, context=context)
taxes_ref.update(generated_tax_res['tax_template_to_tax'])
# Generating Accounts from templates.
account_template_ref = obj_acc_template.generate_account(cr, uid, template_id, taxes_ref, account_ref, code_digits, company_id, context=context)
account_ref.update(account_template_ref)
# writing account values on tax after creation of accounts
for key,value in generated_tax_res['account_dict'].items():
if value['account_collected_id'] or value['account_paid_id']:
obj_acc_tax.write(cr, uid, [key], {
'account_collected_id': account_ref.get(value['account_collected_id'], False),
'account_paid_id': account_ref.get(value['account_paid_id'], False),
})
# Create Journals
self.generate_journals(cr, uid, template_id, account_ref, company_id, context=context)
# generate properties function
self.generate_properties(cr, uid, template_id, account_ref, company_id, context=context)
# Generate Fiscal Position , Fiscal Position Accounts and Fiscal Position Taxes from templates
obj_fiscal_position_template.generate_fiscal_position(cr, uid, template_id, taxes_ref, account_ref, company_id, context=context)
return account_ref, taxes_ref, tax_code_ref
def _create_tax_templates_from_rates(self, cr, uid, obj_wizard, company_id, context=None):
'''
This function checks if the chosen chart template is configured as containing a full set of taxes, and if
it's not the case, it creates the templates for account.tax.code and for account.account.tax objects accordingly
to the provided sale/purchase rates. Then it saves the new tax templates as default taxes to use for this chart
template.
:param obj_wizard: browse record of wizard to generate COA from templates
:param company_id: id of the company for wich the wizard is running
:return: True
'''
obj_tax_code_template = self.pool.get('account.tax.code.template')
obj_tax_temp = self.pool.get('account.tax.template')
chart_template = obj_wizard.chart_template_id
vals = {}
all_parents = self._get_chart_parent_ids(cr, uid, chart_template, context=context)
# create tax templates and tax code templates from purchase_tax_rate and sale_tax_rate fields
if not chart_template.complete_tax_set:
value = obj_wizard.sale_tax_rate
ref_tax_ids = obj_tax_temp.search(cr, uid, [('type_tax_use','in', ('sale','all')), ('chart_template_id', 'in', all_parents)], context=context, order="sequence, id desc", limit=1)
obj_tax_temp.write(cr, uid, ref_tax_ids, {'amount': value/100.0, 'name': _('Tax %.2f%%') % value})
value = obj_wizard.purchase_tax_rate
ref_tax_ids = obj_tax_temp.search(cr, uid, [('type_tax_use','in', ('purchase','all')), ('chart_template_id', 'in', all_parents)], context=context, order="sequence, id desc", limit=1)
obj_tax_temp.write(cr, uid, ref_tax_ids, {'amount': value/100.0, 'name': _('Purchase Tax %.2f%%') % value})
return True
def execute(self, cr, uid, ids, context=None):
'''
This function is called at the confirmation of the wizard to generate the COA from the templates. It will read
all the provided information to create the accounts, the banks, the journals, the taxes, the tax codes, the
accounting properties... accordingly for the chosen company.
'''
if uid != SUPERUSER_ID and not self.pool['res.users'].has_group(cr, uid, 'base.group_erp_manager'):
raise openerp.exceptions.AccessError(_("Only administrators can change the settings"))
obj_data = self.pool.get('ir.model.data')
ir_values_obj = self.pool.get('ir.values')
obj_wizard = self.browse(cr, uid, ids[0])
company_id = obj_wizard.company_id.id
self.pool.get('res.company').write(cr, uid, [company_id], {'currency_id': obj_wizard.currency_id.id})
# When we install the CoA of first company, set the currency to price types and pricelists
if company_id==1:
for ref in (('product','list_price'),('product','standard_price'),('product','list0'),('purchase','list0')):
try:
tmp2 = obj_data.get_object_reference(cr, uid, *ref)
if tmp2:
self.pool.get(tmp2[0]).write(cr, uid, tmp2[1], {
'currency_id': obj_wizard.currency_id.id
})
except ValueError:
pass
# If the floats for sale/purchase rates have been filled, create templates from them
self._create_tax_templates_from_rates(cr, uid, obj_wizard, company_id, context=context)
# Install all the templates objects and generate the real objects
acc_template_ref, taxes_ref, tax_code_ref = self._install_template(cr, uid, obj_wizard.chart_template_id.id, company_id, code_digits=obj_wizard.code_digits, obj_wizard=obj_wizard, context=context)
# write values of default taxes for product as super user
if obj_wizard.sale_tax and taxes_ref:
ir_values_obj.set_default(cr, SUPERUSER_ID, 'product.product', "taxes_id", [taxes_ref[obj_wizard.sale_tax.id]], for_all_users=True, company_id=company_id)
if obj_wizard.purchase_tax and taxes_ref:
ir_values_obj.set_default(cr, SUPERUSER_ID, 'product.product', "supplier_taxes_id", [taxes_ref[obj_wizard.purchase_tax.id]], for_all_users=True, company_id=company_id)
# Create Bank journals
self._create_bank_journals_from_o2m(cr, uid, obj_wizard, company_id, acc_template_ref, context=context)
return {}
def _prepare_bank_journal(self, cr, uid, line, current_num, default_account_id, company_id, context=None):
'''
This function prepares the value to use for the creation of a bank journal created through the wizard of
generating COA from templates.
:param line: dictionary containing the values encoded by the user related to his bank account
:param current_num: integer corresponding to a counter of the already created bank journals through this wizard.
:param default_account_id: id of the default debit.credit account created before for this journal.
:param company_id: id of the company for which the wizard is running
:return: mapping of field names and values
:rtype: dict
'''
obj_data = self.pool.get('ir.model.data')
obj_journal = self.pool.get('account.journal')
# we need to loop again to find next number for journal code
# because we can't rely on the value current_num as,
# its possible that we already have bank journals created (e.g. by the creation of res.partner.bank)
# and the next number for account code might have been already used before for journal
for num in xrange(current_num, 100):
# journal_code has a maximal size of 5, hence we can enforce the boundary num < 100
journal_code = _('BNK')[:3] + str(num)
ids = obj_journal.search(cr, uid, [('code', '=', journal_code), ('company_id', '=', company_id)], context=context)
if not ids:
break
else:
raise osv.except_osv(_('Error!'), _('Cannot generate an unused journal code.'))
vals = {
'name': line['acc_name'],
'code': journal_code,
'type': line['account_type'] == 'cash' and 'cash' or 'bank',
'company_id': company_id,
'analytic_journal_id': False,
'currency': False,
'default_credit_account_id': default_account_id,
'default_debit_account_id': default_account_id,
}
if line['currency_id']:
vals['currency'] = line['currency_id']
return vals
def _prepare_bank_account(self, cr, uid, line, new_code, acc_template_ref, ref_acc_bank, company_id, context=None):
'''
This function prepares the value to use for the creation of the default debit and credit accounts of a
bank journal created through the wizard of generating COA from templates.
:param line: dictionary containing the values encoded by the user related to his bank account
:param new_code: integer corresponding to the next available number to use as account code
:param acc_template_ref: the dictionary containing the mapping between the ids of account templates and the ids
of the accounts that have been generated from them.
:param ref_acc_bank: browse record of the account template set as root of all bank accounts for the chosen
template
:param company_id: id of the company for which the wizard is running
:return: mapping of field names and values
:rtype: dict
'''
obj_data = self.pool.get('ir.model.data')
# Get the id of the user types fr-or cash and bank
tmp = obj_data.get_object_reference(cr, uid, 'account', 'data_account_type_cash')
cash_type = tmp and tmp[1] or False
tmp = obj_data.get_object_reference(cr, uid, 'account', 'data_account_type_bank')
bank_type = tmp and tmp[1] or False
return {
'name': line['acc_name'],
'currency_id': line['currency_id'],
'code': new_code,
'type': 'liquidity',
'user_type': line['account_type'] == 'cash' and cash_type or bank_type,
'parent_id': acc_template_ref[ref_acc_bank.id] or False,
'company_id': company_id,
}
def _create_bank_journals_from_o2m(self, cr, uid, obj_wizard, company_id, acc_template_ref, context=None):
'''
This function creates bank journals and its accounts for each line encoded in the field bank_accounts_id of the
wizard.
:param obj_wizard: the current wizard that generates the COA from the templates.
:param company_id: the id of the company for which the wizard is running.
:param acc_template_ref: the dictionary containing the mapping between the ids of account templates and the ids
of the accounts that have been generated from them.
:return: True
'''
obj_acc = self.pool.get('account.account')
obj_journal = self.pool.get('account.journal')
code_digits = obj_wizard.code_digits
# Build a list with all the data to process
journal_data = []
if obj_wizard.bank_accounts_id:
for acc in obj_wizard.bank_accounts_id:
vals = {
'acc_name': acc.acc_name,
'account_type': acc.account_type,
'currency_id': acc.currency_id.id,
}
journal_data.append(vals)
ref_acc_bank = obj_wizard.chart_template_id.bank_account_view_id
if journal_data and not ref_acc_bank.code:
raise osv.except_osv(_('Configuration Error!'), _('You have to set a code for the bank account defined on the selected chart of accounts.'))
current_num = 1
for line in journal_data:
# Seek the next available number for the account code
while True:
new_code = str(ref_acc_bank.code.ljust(code_digits-len(str(current_num)), '0')) + str(current_num)
ids = obj_acc.search(cr, uid, [('code', '=', new_code), ('company_id', '=', company_id)])
if not ids:
break
else:
current_num += 1
# Create the default debit/credit accounts for this bank journal
vals = self._prepare_bank_account(cr, uid, line, new_code, acc_template_ref, ref_acc_bank, company_id, context=context)
default_account_id = obj_acc.create(cr, uid, vals, context=context)
#create the bank journal
vals_journal = self._prepare_bank_journal(cr, uid, line, current_num, default_account_id, company_id, context=context)
obj_journal.create(cr, uid, vals_journal)
current_num += 1
return True
wizard_multi_charts_accounts()
class account_bank_accounts_wizard(osv.osv_memory):
_name='account.bank.accounts.wizard'
_columns = {
'acc_name': fields.char('Account Name.', size=64, required=True),
'bank_account_id': fields.many2one('wizard.multi.charts.accounts', 'Bank Account', required=True, ondelete='cascade'),
'currency_id': fields.many2one('res.currency', 'Secondary Currency', help="Forces all moves for this account to have this secondary currency."),
'account_type': fields.selection([('cash','Cash'), ('check','Check'), ('bank','Bank')], 'Account Type', size=32),
}
account_bank_accounts_wizard()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
apixandru/intellij-community | refs/heads/master | python/lib/Lib/site-packages/django/contrib/gis/db/backends/spatialite/models.py | 403 | """
The GeometryColumns and SpatialRefSys models for the SpatiaLite backend.
"""
from django.db import models
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
class GeometryColumns(models.Model):
"""
The 'geometry_columns' table from SpatiaLite.
"""
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
type = models.CharField(max_length=30)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
spatial_index_enabled = models.IntegerField()
class Meta:
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the
the feature table name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the
the feature geometry column.
"""
return 'f_geometry_column'
def __unicode__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class SpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from SpatiaLite.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
ref_sys_name = models.CharField(max_length=256)
proj4text = models.CharField(max_length=2048)
@property
def wkt(self):
from django.contrib.gis.gdal import SpatialReference
return SpatialReference(self.proj4text).wkt
class Meta:
db_table = 'spatial_ref_sys'
managed = False
|
psnj/petl | refs/heads/master | examples/io/json.py | 4 | from __future__ import division, print_function, absolute_import
# fromjson()
############
import petl as etl
data = '''
[{"foo": "a", "bar": 1},
{"foo": "b", "bar": 2},
{"foo": "c", "bar": 2}]
'''
with open('example.json', 'w') as f:
f.write(data)
table1 = etl.fromjson('example.json')
table1
# fromdicts()
#############
import petl as etl
dicts = [{"foo": "a", "bar": 1},
{"foo": "b", "bar": 2},
{"foo": "c", "bar": 2}]
table1 = etl.fromdicts(dicts)
table1
# tojson()
##########
import petl as etl
table1 = [['foo', 'bar'],
['a', 1],
['b', 2],
['c', 2]]
etl.tojson(table1, 'example.json', sort_keys=True)
# check what it did
print(open('example.json').read())
# tojsonarrays()
################
import petl as etl
table1 = [['foo', 'bar'],
['a', 1],
['b', 2],
['c', 2]]
etl.tojsonarrays(table1, 'example.json')
# check what it did
print(open('example.json').read())
|
jhona22baz/blog-flask | refs/heads/master | python2.7/lib/python2.7/site-packages/werkzeug/testsuite/contrib/securecookie.py | 102 | # -*- coding: utf-8 -*-
"""
werkzeug.testsuite.securecookie
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests the secure cookie.
:copyright: (c) 2013 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import unittest
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.utils import parse_cookie
from werkzeug.wrappers import Request, Response
from werkzeug.contrib.securecookie import SecureCookie
class SecureCookieTestCase(WerkzeugTestCase):
def test_basic_support(self):
c = SecureCookie(secret_key=b'foo')
assert c.new
assert not c.modified
assert not c.should_save
c['x'] = 42
assert c.modified
assert c.should_save
s = c.serialize()
c2 = SecureCookie.unserialize(s, b'foo')
assert c is not c2
assert not c2.new
assert not c2.modified
assert not c2.should_save
self.assert_equal(c2, c)
c3 = SecureCookie.unserialize(s, b'wrong foo')
assert not c3.modified
assert not c3.new
self.assert_equal(c3, {})
def test_wrapper_support(self):
req = Request.from_values()
resp = Response()
c = SecureCookie.load_cookie(req, secret_key=b'foo')
assert c.new
c['foo'] = 42
self.assert_equal(c.secret_key, b'foo')
c.save_cookie(resp)
req = Request.from_values(headers={
'Cookie': 'session="%s"' % parse_cookie(resp.headers['set-cookie'])['session']
})
c2 = SecureCookie.load_cookie(req, secret_key=b'foo')
assert not c2.new
self.assert_equal(c2, c)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SecureCookieTestCase))
return suite
|
braintreeps/moto | refs/heads/master | moto/s3bucket_path/__init__.py | 7 | from __future__ import unicode_literals
from moto import mock_s3
mock_s3bucket_path = mock_s3
|
ehthiede/EMUS | refs/heads/master | src/emus/usutils.py | 1 | # -*- coding: utf-8 -*-
"""Module containing methods useful for analyzing umbrella sampling
calculations that do not rely directly on the EMUS estimator.
"""
from __future__ import absolute_import
import numpy as np
from ._defaults import DEFAULT_KT, DEFAULT_T, DEFAULT_K_B
import numbers
def neighbors_harmonic(centers, fks, kTs=DEFAULT_KT, period=None, nsig=6):
"""Calculates neighborlist for harmonic windows. Neighbors are chosen
such that neighboring umbrellas are no more than nsig standard
deviations away on a flat potential.
Parameters
----------
centers : 2D array-like
The locations of the centers of each window. The first dimension is the window index, and the second is the collective variable index.
fks : 2D array-like or scalar
If array or list, data structure where the first dimension corresponds to the window index and the second corresponds to the collective variable. If scalar, windows are assumed to have that force constant in every dimension.
kTs : 2D array-like or scalar
1D array with the Boltzmann factor or a single value which will be used in all windows. Default value is the scalar 1.
period : 1D array-like or float
Period of the collective variable e.g. 360 for an angle. If None, all collective variables are taken to be aperiodic. If scalar, assumed to be period of each collective variable. If 1D iterable with each value a scalar or None, each cv has periodicity of that size.
nsig : scalar
Number of standard deviations of the gaussians to include in the neighborlist.
Returns
-------
nbrs : 2D list
List where element i is a list with the indices of all windows neighboring window i.
"""
L = len(centers) # Number of Windows
# Enforce Typing
if isinstance(kTs, numbers.Number):
kTs = kTs*np.ones(L)
if isinstance(fks, numbers.Number):
fks = fks*np.ones(np.shape(centers))
kTs = np.outer(kTs, np.ones(np.shape(fks[0])))
rad = nsig*np.sqrt(kTs/fks)
if period is not None:
if isinstance(period, numbers.Number): # Check if period is scalar
period = [period]
# Iterate through window centers and find neighboring umbrellas.
nbrs = []
for i, cntr_i in enumerate(centers):
rad_i = rad[i]
nbrs_i = []
rv = centers - cntr_i
rvmin = _minimage_traj(rv, period)
for j, rv in enumerate(rvmin):
if (np.abs(rv) < rad_i).all():
nbrs_i.append(j)
nbrs.append(nbrs_i)
return nbrs
def unpack_nbrs(compd_array, neighbors, L):
"""Unpacks an array of neighborlisted data. Currently, assumes axis 0 is the compressed axis.
Parameters
----------
compd_array : array-like
The compressed array, calculated using neighborlists
neighbors : array-like
The list or array of ints, containing the indices of the neighboring windows
L : int
The total number of windows.
Returns
-------
expd_array : array-like
The expanded array of data
"""
axis = 0
expd_shape = list(np.shape(compd_array))
expd_shape[axis] = L
expd_array = np.zeros(expd_shape)
for n_ind, n_val in enumerate(neighbors):
expd_array[n_val] = compd_array[n_ind]
return expd_array
def calc_harmonic_psis(cv_traj, centers, fks, kTs, period=None):
"""Calculates the values of each bias function from a trajectory of points in a single window.
Parameters
----------
cv_traj : array-like
Trajectory in collective variable space. Can be 1-dimensional (one cv) or 2-dimensional (many cvs). The first dimension is the time index, and (optional) second corresponds to the collective variable.
centers : array-like
The locations of the centers of each window. The first dimension is the window index, and the (optional) second is the collective variable index.
fks : 2D array-like or scalar
If array or list, data structure where the first dimension corresponds to the window index and the second corresponds to the collective variable. If scalar, windows are assumed to have that force constant in every dimension.
kTs : 2D array-like or scalar
1D array with the Boltzmann factor or a single value which will be used in all windows. Default value is the scalar 1.
period : 1D array-like or float, optional
Period of the collective variable e.g. 360 for an angle. If None, all collective variables are taken to be aperiodic. If scalar, assumed to be period of each collective variable. If 1D iterable with each value a scalar or None, each cv has periodicity of that size.
Returns
-------
psis : 2D array
The values of the bias functions evaluated each window and timepoint. See `datastructures <../datastructures.html#data-from-sampling>`__ for more information.
"""
L = len(centers) # Number of windows
if isinstance(kTs, numbers.Number):
kTs = kTs*np.ones(L)
if isinstance(fks, numbers.Number):
fks = fks*np.ones(np.shape(centers))
psis = np.zeros((len(cv_traj), L))
for j in range(L):
psis[:, j] = _calc_harmonic_psi_ij(
cv_traj, centers[j], fks[j], kTs[j], period=period)
return psis
def _calc_harmonic_psi_ij(cv_traj, win_center, win_fk, kT=1.0, period=None):
"""Helper routine for calc_harm_psis. Evaluates the value of the bias
function for a single harmonic window over a trajectory.
Parameters
----------
cv_traj : array-like
Trajectory in collective variable space. Can be 1-dimensional (one cv) or 2-dimensional (many cvs). The first dimension is the time index, and (optional) second corresponds to the collective variable.
trajectory
win_center : array-like or scalar
Array of the centers of the window.
win_fk : array-like or scalar
Force constants for the windows divided by -kT.
period : 1D array-like or float, optional
Period of the collective variables. See documentation for calc_harmonic_psis.
Returns
-------
psivals : 1D array
Value of :math:`\psi_{ij}(x)` evaluated at the center of the window for each point in the trajectory.
"""
try:
ndim = len(win_center)
except TypeError:
ndim = 1
if period is not None:
if not hasattr(period, '__getitem__'): # Check if period is a scalar
period = [period]*ndim
rv = cv_traj - win_center
# Enforce Minimum Image Convention.
rvmin = _minimage_traj(rv, period)
# Calculate psi_ij
U = rvmin*rvmin*win_fk
if len(np.shape(U)) == 2:
U = np.sum(U, axis=1)
U /= 2.
return np.exp(-U/kT)
def fxn_data_from_meta(filepath):
"""Parses the meta file associated with observable data
Parameters
----------
filepath : string
The path to the meta file containing the paths of the observable data.
Returns
-------
fxndata : List of 2D arrays
Three dimensional data structure containing observable information. The first index corresponds to the observable being calculated, the second to the window index, and the third to the time point in the window.
"""
fxn_paths = []
with open(filepath, 'r') as f:
for full_line in f:
line = full_line.strip()
if not line:
continue
if line.startswith("#"):
continue
fxn_paths.append(line)
fxndata = []
nfxns = None # Placeholder value
for i, path in enumerate(fxn_paths):
data_i = np.loadtxt(path)
if i == 0:
nfxns = int(len(data_i[0])-1)
for n in range(nfxns):
fxndata.append([data_i[:, (n+1)]])
else:
for n in range(nfxns):
fxndata[n].append(data_i[:, (n+1)])
return fxndata
def data_from_meta(filepath, dim, T=DEFAULT_T, k_B=DEFAULT_K_B, nsig=None, period=None):
"""Reads collective variable data from as tabulated by a meta file of the same format used in Grossfield's implementation of the WHAM algorithm, and calculates the value of the biasing functions.
Parameters
----------
filepath : string
The path to the meta file.
dim : int
The number of dimensions of the cv space.
T : scalar, optional
Temperature of the system if not provided in the meta file.
k_B : scalar, optional
Boltzmann Constant for the system. Default is in natural units (1.0)
nsig : scalar or None, optional
Number of standard deviations of the gaussians to include in the neighborlist.If None, does not use neighbor lists.
period : 1D array-like or float, optional
Variable with the periodicity information of the system. See the Data Structures section of the documentation for a detailed explanation.
Returns
-------
psis : List of 2D arrays
The values of the bias functions evaluated each window and timepoint. See `datastructures <../datastructures.html#data-from-sampling>`__ for more information.
"""
# Parse Meta file.
trajlocs, cntrs, fks, iats, temps = _parse_metafile(filepath, dim)
L = len(cntrs)
# Calculate kT for each window. Involves some type management...
if not temps:
try:
temps = np.ones(L)*T
except:
raise TypeError(
'No Temperatures were found in the meta file, and no valid Temperature was provided as input.')
kT = k_B * temps
if nsig is not None:
neighbors = neighbors_harmonic(
cntrs, fks, kTs=kT, period=period, nsig=nsig)
else:
neighbors = np.outer(np.ones(L), range(L)).astype(int)
# Load in the trajectories into the cv space
trajs = []
for i, trajloc in enumerate(trajlocs):
trajs.append(np.loadtxt(trajloc)[:, 1:1+dim])
# Calculate psi values
psis = []
for i, traj in enumerate(trajs):
nbrs_i = neighbors[i]
psi_i = calc_harmonic_psis(traj, cntrs[nbrs_i], fks, kT, period=period)
psis.append(psi_i)
return psis, trajs, neighbors
def _parse_metafile(filepath, dim):
"""
Parses the meta file located at filepath. Assumes Wham-like Syntax.
Parameters
----------
filepath : string
The path to the meta file.
dim : int
The number of dimensions of the cv space.
Returns
-------
traj_paths : list of strings
A list containing the paths to the trajectories for each window.
centers : 2D array of floats
Array with the center of each harmonic window. See calc_harm_psis for syntax.
fks : 2D array of floats
Array with the force constants for each harmonic window. See calc_harm_psis for syntax.
iats : 1D array of floats or None
Array with the integrated autocorrelation times of each window. None if not given in
the meta file
temps : 1D array of floats or None
Array with the temperature of each window in the umbrella sampling calculation. If not given in the meta file, this will just be None.
"""
traj_paths = []
fks = []
centers = []
iats = []
temps = []
with open(filepath, 'r') as f:
for full_line in f:
line = full_line.strip()
if not line:
continue
if line.startswith("#"):
continue
windowparams = line.split()
traj_paths.append(windowparams[0])
centers.append(windowparams[1:1+dim])
fks.append(windowparams[1+dim:1+2*dim])
if len(windowparams) > 1+2*dim: # If Correlation Time provided
iats.append(windowparams[1+2*dim])
if len(windowparams) > 2+2*dim: # If Temperature is provided
temps.append(windowparams[2+2*dim])
# Move to numpy arrays, convert to appropriate data types
fks = np.array(fks).astype('float')
centers = np.array(centers).astype('float')
iats = np.array(iats).astype('float')
temps = np.array(temps).astype('float')
return traj_paths, centers, fks, iats, temps
def _minimage(rv, period):
"""Calculates the minimum vector.
Parameters
----------
rv : array-like or scalar
Minimum image vector
period : array-like or scalar
Periodicity in each dimension.
Returns
-------
minimage : array-like or scalar
minimum image vector.
"""
return rv - period * np.rint(rv/period)
def _minimage_traj(rv, period):
"""Calculates the minimum trajectory
Parameters
----------
rv : 1 or 2D array-like
Minimum image trajectory
period : array-like or scalar
Periodicity in each dimension.
Returns
-------
minimage : array-like
minimum image trajectory
"""
rvmin = np.array(np.copy(rv))
if len(np.shape(rv)) == 1: # 1D trajectory array provided
if period is not None:
p = period[0]
if (p is not None) and (p != 0):
rvmin -= p*np.rint(rvmin/p)
elif len(np.shape(rv)) == 2: # 2D trajectory array provided
ndim = len(rv[0])
if period is not None:
for d in range(ndim):
p = period[d]
if (p is not None) and (p != 0):
rvmin[:, d] -= p*np.rint(rvmin[:, d]/p)
else: # User provided something weird...
raise ValueError("Trajectory provided has wrong dimensionality %d, " +
"dimension should be 1 or 2." % len(np.shape(rv)))
return rvmin
|
w1kke/pylearn2 | refs/heads/master | pylearn2/scripts/show_examples.py | 44 | #!/usr/bin/env python
"""
.. todo::
WRITEME
"""
from __future__ import print_function
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
import argparse
import numpy as np
from theano.compat.six.moves import xrange
from pylearn2.gui import patch_viewer
from pylearn2.config import yaml_parse
def show_examples(path, rows, cols, rescale='global', out=None):
"""
.. todo::
WRITEME
Parameters
----------
path : string
The pickle or YAML file to show examples of
rows : int
WRITEME
cols : int
WRITEME
rescale : {'rescale', 'global', 'individual'}
Default is 'rescale', WRITEME
out : string, optional
WRITEME
"""
if rescale == 'none':
global_rescale = False
patch_rescale = False
elif rescale == 'global':
global_rescale = True
patch_rescale = False
elif rescale == 'individual':
global_rescale = False
patch_rescale = True
if path.endswith('.pkl'):
from pylearn2.utils import serial
obj = serial.load(path)
elif path.endswith('.yaml'):
print('Building dataset from yaml...')
obj = yaml_parse.load_path(path)
print('...done')
else:
obj = yaml_parse.load(path)
if hasattr(obj, 'get_batch_topo'):
# obj is a Dataset
dataset = obj
examples = dataset.get_batch_topo(rows*cols)
else:
# obj is a Model
model = obj
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
theano_rng = RandomStreams(42)
design_examples_var = model.random_design_matrix(
batch_size=rows * cols, theano_rng=theano_rng
)
from theano import function
print('compiling sampling function')
f = function([], design_examples_var)
print('sampling')
design_examples = f()
print('loading dataset')
dataset = yaml_parse.load(model.dataset_yaml_src)
examples = dataset.get_topological_view(design_examples)
norms = np.asarray([np.sqrt(np.sum(np.square(examples[i, :])))
for i in xrange(examples.shape[0])])
print('norms of examples: ')
print('\tmin: ', norms.min())
print('\tmean: ', norms.mean())
print('\tmax: ', norms.max())
print('range of elements of examples', (examples.min(), examples.max()))
print('dtype: ', examples.dtype)
examples = dataset.adjust_for_viewer(examples)
if global_rescale:
examples /= np.abs(examples).max()
if len(examples.shape) != 4:
print('sorry, view_examples.py only supports image examples for now.')
print('this dataset has ' + str(len(examples.shape) - 2), end='')
print('topological dimensions')
quit(-1)
if examples.shape[3] == 1:
is_color = False
elif examples.shape[3] == 3:
is_color = True
else:
print('got unknown image format with', str(examples.shape[3]), end='')
print('channels')
print('supported formats are 1 channel greyscale or three channel RGB')
quit(-1)
print(examples.shape[1:3])
pv = patch_viewer.PatchViewer((rows, cols), examples.shape[1:3],
is_color=is_color)
for i in xrange(rows*cols):
pv.add_patch(examples[i, :, :, :], activation=0.0,
rescale=patch_rescale)
if out is None:
pv.show()
else:
pv.save(out)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--rows', default=20, type=int)
parser.add_argument('--cols', default=20, type=int)
parser.add_argument('--rescale', default='global',
choices=['none', 'global', 'individual'],
help="how to rescale the patches for display")
parser.add_argument('--out',
help='if not specified, displays an image. '
'otherwise saves an image to the specified path')
parser.add_argument("path")
args = parser.parse_args()
show_examples(args.path, args.rows, args.cols, args.rescale, args.out)
|
openSUSE/docmanager | refs/heads/develop | test/test_docmanager_exitcodes.py | 1 | #!/usr/bin/python3
import pytest
import shlex
from docmanager.cli import parsecli
from docmanager.action import Actions
from docmanager.core import ReturnCodes
from xml.sax._exceptions import SAXParseException
def test_exitcodes_0(tmp_broken_xml):
""" call docmanager without params """
try:
parser = parsecli([])
except SystemExit as e:
assert e.code == ReturnCodes.E_CALL_WITHOUT_PARAMS, \
"Expected exit code {} but got {}.".format(ReturnCodes.E_CALL_WITHOUT_PARAMS,
e.code)
def test_exitcodes_1(tmp_broken_xml):
""" parse broken xml file in get """
try:
clicmd = "get {}".format(tmp_broken_xml)
a = Actions(parsecli(shlex.split(clicmd)))
except SystemExit as e:
assert e.code == ReturnCodes.E_XML_PARSE_ERROR, \
"Expected exit code {} but got {}.".format(ReturnCodes.E_XML_PARSE_ERROR,
e.code)
def test_exitcodes_2(tmp_invalid_db5_file):
""" check for an invalid DocBook 5 file """
try:
clicmd = "get {}".format(tmp_invalid_db5_file)
a = Actions(parsecli(shlex.split(clicmd)))
except SystemExit as e:
assert e.code == ReturnCodes.E_INVALID_XML_DOCUMENT, \
"Expected exit code {} but got {}.".format(ReturnCodes.E_INVALID_XML_DOCUMENT,
e.code)
def test_exitcodes_3(tmp_invalid_db5_file):
""" check for an invalid DocBook 5 file """
try:
clicmd = "get invalid_file_name.xml"
a = Actions(parsecli(shlex.split(clicmd)))
a.parse()
except SystemExit as e:
assert e.code == ReturnCodes.E_FILE_NOT_FOUND, \
"Expected exit code {} but got {}.".format(ReturnCodes.E_FILE_NOT_FOUND,
e.code)
|
varunarya10/python-neutronclient | refs/heads/master | neutronclient/tests/unit/test_cli20_subnet.py | 4 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sys
from mox3 import mox
from neutronclient.common import exceptions
from neutronclient.neutron.v2_0 import subnet
from neutronclient.tests.unit import test_cli20
class CLITestV20SubnetJSON(test_cli20.CLITestV20Base):
def setUp(self):
super(CLITestV20SubnetJSON, self).setUp(plurals={'tags': 'tag'})
def test_create_subnet(self):
"""Create subnet: --gateway gateway netid cidr."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
gateway = 'gatewayvalue'
args = ['--gateway', gateway, netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr', 'gateway_ip']
position_values = [4, netid, cidr, gateway]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnet_with_no_gateway(self):
"""Create subnet: --no-gateway netid cidr."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
args = ['--no-gateway', netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr', 'gateway_ip']
position_values = [4, netid, cidr, None]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnet_with_bad_gateway_option(self):
"""Create sbunet: --no-gateway netid cidr."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
gateway = 'gatewayvalue'
args = ['--gateway', gateway, '--no-gateway', netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr', 'gateway_ip']
position_values = [4, netid, cidr, None]
try:
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
except Exception:
return
self.fail('No exception for bad gateway option')
def _test_create_resource_and_catch_command_error(self, tested_args,
should_fail,
*args):
_j = lambda args: ' '.join(args)
try:
self._test_create_resource(*args)
except exceptions.CommandError:
if not should_fail:
self.fail(
'Unexpected exception raised for %s options' %
_j(tested_args))
self.mox.UnsetStubs()
else:
if should_fail:
self.fail(
'No exception for %s options' % _j(tested_args))
def test_create_subnet_with_enable_and_disable_dhcp(self):
"""Create subnet: --enable-dhcp and --disable-dhcp."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
position_names = ['ip_version', 'network_id', 'cidr', 'enable_dhcp']
# enable_dhcp value is appended later inside the loop
position_values = [4, netid, cidr]
for enable_dhcp_arg, should_fail in (
('--enable-dhcp=False', False),
('--enable-dhcp=True', True),
('--enable-dhcp', True)
):
tested_args = [enable_dhcp_arg, '--disable-dhcp']
args = tested_args + [netid, cidr]
pos_values = position_values + [should_fail]
self._test_create_resource_and_catch_command_error(
tested_args, should_fail,
resource, cmd, name, myid, args, position_names, pos_values)
def test_create_subnet_with_multiple_enable_dhcp(self):
"""Create subnet with multiple --enable-dhcp arguments passed."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
position_names = ['ip_version', 'network_id', 'cidr', 'enable_dhcp']
# enable_dhcp value is appended later inside the loop
position_values = [4, netid, cidr]
_ = 'UNUSED_MARKER'
for tested_args, should_fail, pos_value in (
(['--enable-dhcp', '--enable-dhcp=True'], False, True),
(['--enable-dhcp', '--enable-dhcp=False'], True, _),
(['--enable-dhcp=False', '--enable-dhcp'], True, _),
(['--enable-dhcp=True', '--enable-dhcp=False'], True, _),
(['--enable-dhcp=False', '--enable-dhcp=True'], True, _)
):
args = tested_args + [netid, cidr]
pos_values = position_values + [pos_value]
self._test_create_resource_and_catch_command_error(
tested_args, should_fail,
resource, cmd, name, myid, args, position_names, pos_values)
def test_create_subnet_tenant(self):
"""Create subnet: --tenant_id tenantid netid cidr."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid', netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr']
position_values = [4, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_tags(self):
"""Create subnet: netid cidr --tags a b."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = [netid, cidr, '--tags', 'a', 'b']
position_names = ['ip_version', 'network_id', 'cidr']
position_values = [4, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tags=['a', 'b'])
def test_create_subnet_allocation_pool(self):
"""Create subnet: --tenant_id tenantid <allocation_pool> netid cidr.
The <allocation_pool> is --allocation_pool start=1.1.1.10,end=1.1.1.20
"""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation_pool', 'start=1.1.1.10,end=1.1.1.20',
netid, cidr]
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pool = [{'start': '1.1.1.10', 'end': '1.1.1.20'}]
position_values = [4, pool, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_allocation_pools(self):
"""Create subnet: --tenant-id tenantid <pools> netid cidr.
The <pools> are --allocation_pool start=1.1.1.10,end=1.1.1.20 and
--allocation_pool start=1.1.1.30,end=1.1.1.40
"""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation_pool', 'start=1.1.1.10,end=1.1.1.20',
'--allocation_pool', 'start=1.1.1.30,end=1.1.1.40',
netid, cidr]
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.10', 'end': '1.1.1.20'},
{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_host_route(self):
"""Create subnet: --tenant_id tenantid <host_route> netid cidr.
The <host_route> is
--host-route destination=172.16.1.0/24,nexthop=1.1.1.20
"""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--host-route', 'destination=172.16.1.0/24,nexthop=1.1.1.20',
netid, cidr]
position_names = ['ip_version', 'host_routes', 'network_id',
'cidr']
route = [{'destination': '172.16.1.0/24', 'nexthop': '1.1.1.20'}]
position_values = [4, route, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_host_routes(self):
"""Create subnet: --tenant-id tenantid <host_routes> netid cidr.
The <host_routes> are
--host-route destination=172.16.1.0/24,nexthop=1.1.1.20 and
--host-route destination=172.17.7.0/24,nexthop=1.1.1.40
"""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--host-route', 'destination=172.16.1.0/24,nexthop=1.1.1.20',
'--host-route', 'destination=172.17.7.0/24,nexthop=1.1.1.40',
netid, cidr]
position_names = ['ip_version', 'host_routes', 'network_id',
'cidr']
routes = [{'destination': '172.16.1.0/24', 'nexthop': '1.1.1.20'},
{'destination': '172.17.7.0/24', 'nexthop': '1.1.1.40'}]
position_values = [4, routes, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_dns_nameservers(self):
"""Create subnet: --tenant-id tenantid <dns-nameservers> netid cidr.
The <dns-nameservers> are
--dns-nameserver 1.1.1.20 and --dns-nameserver 1.1.1.40
"""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--dns-nameserver', '1.1.1.20',
'--dns-nameserver', '1.1.1.40',
netid, cidr]
position_names = ['ip_version', 'dns_nameservers', 'network_id',
'cidr']
nameservers = ['1.1.1.20', '1.1.1.40']
position_values = [4, nameservers, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_with_disable_dhcp(self):
"""Create subnet: --tenant-id tenantid --disable-dhcp netid cidr."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--disable-dhcp',
netid, cidr]
position_names = ['ip_version', 'enable_dhcp', 'network_id',
'cidr']
position_values = [4, False, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_merge_single_plurar(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation-pool', 'start=1.1.1.10,end=1.1.1.20',
netid, cidr,
'--allocation-pools', 'list=true', 'type=dict',
'start=1.1.1.30,end=1.1.1.40']
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.10', 'end': '1.1.1.20'},
{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_merge_plurar(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
netid, cidr,
'--allocation-pools', 'list=true', 'type=dict',
'start=1.1.1.30,end=1.1.1.40']
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_merge_single_single(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation-pool', 'start=1.1.1.10,end=1.1.1.20',
netid, cidr,
'--allocation-pool',
'start=1.1.1.30,end=1.1.1.40']
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.10', 'end': '1.1.1.20'},
{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_max_v4_cidr(self):
"""Create subnet: --gateway gateway netid cidr."""
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = '192.168.0.1/32'
gateway = 'gatewayvalue'
args = ['--gateway', gateway, netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr', 'gateway_ip']
position_values = [4, netid, cidr, gateway]
self.mox.StubOutWithMock(cmd.log, 'warning')
cmd.log.warning(mox.IgnoreArg())
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_create_subnet_with_ipv6_ra_mode(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--ip-version', '6',
'--ipv6-ra-mode', 'dhcpv6-stateful',
netid, cidr]
position_names = ['ip_version', 'ipv6_ra_mode',
'network_id', 'cidr']
position_values = [6, 'dhcpv6-stateful', netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_with_ipv6_address_mode(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--ip-version', '6',
'--ipv6-address-mode', 'dhcpv6-stateful',
netid, cidr]
position_names = ['ip_version', 'ipv6_address_mode',
'network_id', 'cidr']
position_values = [6, 'dhcpv6-stateful', netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_with_ipv6_modes(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--ip-version', '6',
'--ipv6-address-mode', 'slaac',
'--ipv6-ra-mode', 'slaac',
netid, cidr]
position_names = ['ip_version', 'ipv6_address_mode',
'ipv6_ra_mode', 'network_id', 'cidr']
position_values = [6, 'slaac', 'slaac', netid, cidr]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_with_ipv6_ra_mode_ipv4(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--ip-version', '4',
'--ipv6-ra-mode', 'slaac',
netid, cidr]
position_names = ['ip_version', 'ipv6_ra_mode',
'network_id', 'cidr']
position_values = [4, None, netid, cidr]
self.assertRaises(exceptions.CommandError, self._test_create_resource,
resource, cmd, name, myid, args, position_names,
position_values, tenant_id='tenantid')
def test_create_subnet_with_ipv6_address_mode_ipv4(self):
resource = 'subnet'
cmd = subnet.CreateSubnet(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--ip-version', '4',
'--ipv6-address-mode', 'slaac',
netid, cidr]
position_names = ['ip_version', 'ipv6_address_mode',
'network_id', 'cidr']
position_values = [4, None, netid, cidr]
self.assertRaises(exceptions.CommandError, self._test_create_resource,
resource, cmd, name, myid, args, position_names,
position_values, tenant_id='tenantid')
def test_list_subnets_detail(self):
"""List subnets: -D."""
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, True)
def test_list_subnets_tags(self):
"""List subnets: -- --tags a b."""
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, tags=['a', 'b'])
def test_list_subnets_known_option_after_unknown(self):
"""List subnets: -- --tags a b --request-format xml."""
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, tags=['a', 'b'])
def test_list_subnets_detail_tags(self):
"""List subnets: -D -- --tags a b."""
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, detail=True, tags=['a', 'b'])
def test_list_subnets_fields(self):
"""List subnets: --fields a --fields b -- --fields c d."""
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd,
fields_1=['a', 'b'], fields_2=['c', 'd'])
def test_list_subnets_pagination(self):
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources_with_pagination(resources, cmd)
def test_list_subnets_sort(self):
"""List subnets: --sort-key name --sort-key id --sort-key asc
--sort-key desc
"""
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd,
sort_key=["name", "id"],
sort_dir=["asc", "desc"])
def test_list_subnets_limit(self):
"""List subnets: -P."""
resources = "subnets"
cmd = subnet.ListSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, page_size=1000)
def test_update_subnet(self):
"""Update subnet: myid --name myname --tags a b."""
resource = 'subnet'
cmd = subnet.UpdateSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'myname',
'--tags', 'a', 'b'],
{'name': 'myname', 'tags': ['a', 'b'], }
)
def test_update_subnet_known_option_before_id(self):
"""Update subnet: --request-format json myid --name myname."""
# --request-format xml is known option
resource = 'subnet'
cmd = subnet.UpdateSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['--request-format', 'json',
'myid', '--name', 'myname'],
{'name': 'myname', }
)
def test_update_subnet_known_option_after_id(self):
"""Update subnet: myid --name myname --request-format json."""
# --request-format xml is known option
resource = 'subnet'
cmd = subnet.UpdateSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'myname',
'--request-format', 'json'],
{'name': 'myname', }
)
def test_update_subnet_allocation_pools(self):
"""Update subnet: myid --name myname --tags a b."""
resource = 'subnet'
cmd = subnet.UpdateSubnet(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--allocation-pool',
'start=1.2.0.2,end=1.2.0.127',
'--request-format', 'json'],
{'allocation_pools': [{'start': '1.2.0.2',
'end': '1.2.0.127'}]}
)
def test_update_subnet_enable_disable_dhcp(self):
"""Update sbunet: --enable-dhcp and --disable-dhcp."""
resource = 'subnet'
cmd = subnet.UpdateSubnet(test_cli20.MyApp(sys.stdout), None)
try:
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'myname',
'--enable-dhcp', '--disable-dhcp'],
{'name': 'myname', }
)
except exceptions.CommandError:
return
self.fail('No exception for --enable-dhcp --disable-dhcp option')
def test_show_subnet(self):
"""Show subnet: --fields id --fields name myid."""
resource = 'subnet'
cmd = subnet.ShowSubnet(test_cli20.MyApp(sys.stdout), None)
args = ['--fields', 'id', '--fields', 'name', self.test_id]
self._test_show_resource(resource, cmd, self.test_id,
args, ['id', 'name'])
def test_delete_subnet(self):
"""Delete subnet: subnetid."""
resource = 'subnet'
cmd = subnet.DeleteSubnet(test_cli20.MyApp(sys.stdout), None)
myid = 'myid'
args = [myid]
self._test_delete_resource(resource, cmd, myid, args)
class CLITestV20SubnetXML(CLITestV20SubnetJSON):
format = 'xml'
|
VasuAgrawal/tartanHacks2015 | refs/heads/master | site/flask/lib/python2.7/site-packages/pip/commands/completion.py | 435 | from __future__ import absolute_import
import sys
from pip.basecommand import Command
BASE_COMPLETION = """
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
"""
COMPLETION_SCRIPTS = {
'bash': """
_pip_completion()
{
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
COMP_CWORD=$COMP_CWORD \\
PIP_AUTO_COMPLETE=1 $1 ) )
}
complete -o default -F _pip_completion pip
""", 'zsh': """
function _pip_completion {
local words cword
read -Ac words
read -cn cword
reply=( $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] ) )
}
compctl -K _pip_completion pip
"""}
class CompletionCommand(Command):
"""A helper command to be used for command completion."""
name = 'completion'
summary = 'A helper command to be used for command completion'
hidden = True
def __init__(self, *args, **kw):
super(CompletionCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'--bash', '-b',
action='store_const',
const='bash',
dest='shell',
help='Emit completion code for bash')
cmd_opts.add_option(
'--zsh', '-z',
action='store_const',
const='zsh',
dest='shell',
help='Emit completion code for zsh')
self.parser.insert_option_group(0, cmd_opts)
def run(self, options, args):
"""Prints the completion code of the given shell"""
shells = COMPLETION_SCRIPTS.keys()
shell_options = ['--' + shell for shell in sorted(shells)]
if options.shell in shells:
script = COMPLETION_SCRIPTS.get(options.shell, '')
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
else:
sys.stderr.write(
'ERROR: You must pass %s\n' % ' or '.join(shell_options)
)
|
caotianwei/django | refs/heads/master | django/shortcuts.py | 129 | """
This module collects helper functions and classes that "span" multiple levels
of MVC. In other words, these functions/classes introduce controlled coupling
for convenience's sake.
"""
import warnings
from django.core import urlresolvers
from django.db.models.base import ModelBase
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from django.http import (
Http404, HttpResponse, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.template import RequestContext, loader
from django.template.context import _current_app_undefined
from django.template.engine import (
_context_instance_undefined, _dictionary_undefined, _dirs_undefined,
)
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
from django.utils.functional import Promise
def render_to_response(template_name, context=None,
context_instance=_context_instance_undefined,
content_type=None, status=None, dirs=_dirs_undefined,
dictionary=_dictionary_undefined, using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
if (context_instance is _context_instance_undefined
and dirs is _dirs_undefined
and dictionary is _dictionary_undefined):
# No deprecated arguments were passed - use the new code path
content = loader.render_to_string(template_name, context, using=using)
else:
# Some deprecated arguments were passed - use the legacy code path
content = loader.render_to_string(
template_name, context, context_instance, dirs, dictionary,
using=using)
return HttpResponse(content, content_type, status)
def render(request, template_name, context=None,
context_instance=_context_instance_undefined,
content_type=None, status=None, current_app=_current_app_undefined,
dirs=_dirs_undefined, dictionary=_dictionary_undefined,
using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
Uses a RequestContext by default.
"""
if (context_instance is _context_instance_undefined
and current_app is _current_app_undefined
and dirs is _dirs_undefined
and dictionary is _dictionary_undefined):
# No deprecated arguments were passed - use the new code path
# In Django 1.10, request should become a positional argument.
content = loader.render_to_string(
template_name, context, request=request, using=using)
else:
# Some deprecated arguments were passed - use the legacy code path
if context_instance is not _context_instance_undefined:
if current_app is not _current_app_undefined:
raise ValueError('If you provide a context_instance you must '
'set its current_app before calling render()')
else:
context_instance = RequestContext(request)
if current_app is not _current_app_undefined:
warnings.warn(
"The current_app argument of render is deprecated. "
"Set the current_app attribute of request instead.",
RemovedInDjango110Warning, stacklevel=2)
request.current_app = current_app
# Directly set the private attribute to avoid triggering the
# warning in RequestContext.__init__.
context_instance._current_app = current_app
content = loader.render_to_string(
template_name, context, context_instance, dirs, dictionary,
using=using)
return HttpResponse(content, content_type, status)
def redirect(to, *args, **kwargs):
"""
Returns an HttpResponseRedirect to the appropriate URL for the arguments
passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be used as-is for the redirect location.
By default issues a temporary redirect; pass permanent=True to issue a
permanent redirect
"""
if kwargs.pop('permanent', False):
redirect_class = HttpResponsePermanentRedirect
else:
redirect_class = HttpResponseRedirect
return redirect_class(resolve_url(to, *args, **kwargs))
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
Raises a ValueError if klass is not a Model, Manager, or QuerySet.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
elif isinstance(klass, ModelBase):
manager = klass._default_manager
else:
if isinstance(klass, type):
klass__name = klass.__name__
else:
klass__name = klass.__class__.__name__
raise ValueError("Object is of type '%s', but must be a Django Model, "
"Manager, or QuerySet" % klass__name)
return manager.all()
def get_object_or_404(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
def get_list_or_404(klass, *args, **kwargs):
"""
Uses filter() to return a list of objects, or raise a Http404 exception if
the list is empty.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
"""
queryset = _get_queryset(klass)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
return obj_list
def resolve_url(to, *args, **kwargs):
"""
Return a URL appropriate for the arguments passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be returned as-is.
"""
# If it's a model, use get_absolute_url()
if hasattr(to, 'get_absolute_url'):
return to.get_absolute_url()
if isinstance(to, Promise):
# Expand the lazy instance, as it can cause issues when it is passed
# further to some Python functions like urlparse.
to = force_text(to)
if isinstance(to, six.string_types):
# Handle relative URLs
if to.startswith(('./', '../')):
return to
# Next try a reverse URL resolution.
try:
return urlresolvers.reverse(to, args=args, kwargs=kwargs)
except urlresolvers.NoReverseMatch:
# If this is a callable, re-raise.
if callable(to):
raise
# If this doesn't "feel" like a URL, re-raise.
if '/' not in to and '.' not in to:
raise
# Finally, fall back and assume it's a URL
return to
|
aphexddb/agocontrol | refs/heads/master | devices/webcam/agowebcam.py | 2 | #!/usr/bin/python
# ago client webcam device
#
# copyright (c) 2013 Harald Klein <hari+ago@vt100.at>
#
import agoclient
import urllib2
import base64
client = agoclient.AgoConnection("webcam")
def messageHandler(internalid, content):
result = {}
result["result"] = -1;
if "command" in content:
if content['command'] == 'getvideoframe':
print "getting video frame"
try:
protocol, urldata = internalid.split("://")
if "@" in urldata:
logindata, urlpart = urldata.split("@")
username, password = logindata.split(":")
else:
urlpart = urldata
username = ''
password = ''
url = protocol + "://" + urlpart
if password != '' and username != '':
authmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
authmgr.add_password(None, url, username, password)
handler = urllib2.HTTPBasicAuthHandler(authmgr)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
u = urllib2.urlopen(url)
else:
u = urllib2.urlopen(url)
buffer = u.read()
result["image"] = base64.b64encode(buffer)
result["result"] = 0;
except urllib2.URLError, e:
print ('Error opening URL %s' % (url) + ' - Reason: ' + e.reason)
return result
client.add_handler(messageHandler)
devicelist=agoclient.get_config_option("webcam", "devices", "")
try:
devices = map(str, devicelist.split(','))
except:
print "error reading device list"
else:
for device in devices:
print "announcing device", device
if "rtsp://" in device:
client.add_device(device, "onvifnvt")
else:
client.add_device(device, "camera")
client.run()
|
ZhangXinNan/tensorflow | refs/heads/master | tensorflow/contrib/tensorrt/python/trt_convert.py | 3 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the Python wrapper conversion to trt_graph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long
import six as _six
from tensorflow.contrib.tensorrt.wrap_conversion import add_test_value
from tensorflow.contrib.tensorrt.wrap_conversion import calib_convert
from tensorflow.contrib.tensorrt.wrap_conversion import clear_test_values
from tensorflow.contrib.tensorrt.wrap_conversion import enable_test_value
from tensorflow.contrib.tensorrt.wrap_conversion import get_linked_tensorrt_version
from tensorflow.contrib.tensorrt.wrap_conversion import get_loaded_tensorrt_version
from tensorflow.contrib.tensorrt.wrap_conversion import get_test_value
from tensorflow.contrib.tensorrt.wrap_conversion import is_tensorrt_enabled
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.framework import errors_impl as _impl
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.platform import tf_logging
from tensorflow.python.training import saver
# pylint: enable=unused-import,line-too-long
def create_inference_graph(input_graph_def,
outputs,
max_batch_size=1,
max_workspace_size_bytes=2 << 20,
precision_mode="FP32",
minimum_segment_size=3,
is_dynamic_op=False,
maximum_cached_engines=1,
cached_engine_batches=None):
"""Python wrapper for the TRT transformation.
Args:
input_graph_def: GraphDef object containing a model to be transformed.
outputs: list of tensors or node names for the model outputs.
max_batch_size: max size for the input batch
max_workspace_size_bytes: parameter to control memory allocation (in Bytes)
precision_mode: one of 'FP32', 'FP16' and 'INT8'
minimum_segment_size: the minimum number of nodes required for a subgraph to
be replaced by TRTEngineOp.
is_dynamic_op: whether to generate dynamic TRT ops which will build the TRT
network and engine at run time.
maximum_cached_engines: max number of cached TRT engines in dynamic TRT ops.
cached_engine_batches: batch sizes used to pre-create cached engines.
Returns:
New GraphDef with TRTEngineOps placed in graph replacing subgraphs.
Raises:
ValueError: if the provided precision mode is invalid.
RuntimeError: if the returned status message is malformed.
"""
supported_precision_modes = {"FP32": 0, "FP16": 1, "INT8": 2}
if precision_mode.upper() not in supported_precision_modes:
raise ValueError(("precision mode '{}' is not supported."
"It should be one of {}").format(
precision_mode, "{'FP32', 'FP16', 'INT8'}"))
mode = supported_precision_modes[precision_mode.upper()]
compiled_version = get_linked_tensorrt_version()
loaded_version = get_loaded_tensorrt_version()
version_mismatch = False
if loaded_version[0] < compiled_version[0]:
tf_logging.error(
"TensorRT version mismatch. Tensorflow was compiled against " +
"TensorRT %s but library loaded from environment is TensorRT %s" %
(".".join([str(x) for x in compiled_version]),
".".join([str(x) for x in loaded_version])) +
". Please make sure that correct version of TensorRT " +
"is available in the system and added to ldconfig or LD_LIBRARY_PATH")
raise RuntimeError("Incompatible TensorRT library version")
for i in zip(loaded_version, compiled_version):
if i[0] != i[1]:
tf_logging.warn("TensorRT mismatch. Compiled against version " +
"%s, but loaded %s. Things may not work" %
(".".join([str(x) for x in compiled_version]),
".".join([str(x) for x in loaded_version])))
version_mismatch = True
break
if not version_mismatch:
tf_logging.info("Running against TensorRT version %s" % ".".join(
[str(x) for x in loaded_version]))
def py2bytes(inp):
return inp
def py3bytes(inp):
return inp.encode("utf-8", errors="surrogateescape")
def py2string(inp):
return inp
def py3string(inp):
return inp.decode("utf-8")
if _six.PY2:
to_bytes = py2bytes
to_string = py2string
else:
to_bytes = py3bytes
to_string = py3string
# Create MetaGraphDef
graph = ops.Graph()
with graph.as_default():
importer.import_graph_def(input_graph_def, name="")
meta_graph = saver.export_meta_graph(
graph_def=graph.as_graph_def(), graph=graph)
if outputs:
output_collection = meta_graph_pb2.CollectionDef()
output_list = output_collection.node_list.value
for i in outputs:
if isinstance(i, ops.Tensor):
output_list.append(to_bytes(i.name))
else:
output_list.append(to_bytes(i))
meta_graph.collection_def["train_op"].CopyFrom(output_collection)
# Create RewriterConfig.
rewriter_cfg = rewriter_config_pb2.RewriterConfig()
rewriter_cfg.optimizers.extend(["constfold", "layout"])
optimizer = rewriter_cfg.custom_optimizers.add()
optimizer.name = "TensorRTOptimizer"
optimizer.parameter_map["minimum_segment_size"].i = minimum_segment_size
optimizer.parameter_map["max_batch_size"].i = max_batch_size
optimizer.parameter_map["is_dynamic_op"].b = is_dynamic_op
optimizer.parameter_map[
"max_workspace_size_bytes"].i = max_workspace_size_bytes
optimizer.parameter_map["precision_mode"].s = to_bytes(precision_mode)
optimizer.parameter_map["maximum_cached_engines"].i = maximum_cached_engines
if cached_engine_batches:
if not isinstance(cached_engine_batches, list):
raise TypeError("cached_engine_batches should be a list.")
optimizer.parameter_map["cached_engine_batches"].list.i.extend(
cached_engine_batches)
return tf_optimizer.OptimizeGraph(
rewriter_cfg, meta_graph, graph_id=b"tf_graph")
def calib_graph_to_infer_graph(calibration_graph_def, is_dynamic_op=False):
"""Convert an existing calibration graph to inference graph.
Args:
calibration_graph_def: the calibration GraphDef object with calibration data
is_dynamic_op: whether to create dynamic static engines from calibration
Returns:
New GraphDef with TRTEngineOps placed in graph replacing calibration nodes.
Raises:
RuntimeError: if the returned status message is malformed.
"""
def py2string(inp):
return inp
def py3string(inp):
return inp.decode("utf-8")
if _six.PY2:
to_string = py2string
else:
to_string = py3string
is_calib_graph = False
for n in calibration_graph_def.node:
if n.op == "TRTEngineOp":
is_calib_graph = is_calib_graph or not n.attr["calibration_data"].s
if not is_calib_graph:
tf_logging.error(
"Not a calib graph. Doesn't seem to contain any calibration nodes.")
return None
graph_str = calibration_graph_def.SerializeToString()
out = calib_convert(graph_str, is_dynamic_op)
status = to_string(out[0])
output_graph_def_string = out[1]
del graph_str # Save some memory
if len(status) < 2:
raise _impl.UnknownError(None, None, status)
if status[:2] != "OK":
msg = status.split(";")
if len(msg) == 1:
raise RuntimeError("Status message is malformed {}".format(status))
# pylint: disable=protected-access
raise _impl._make_specific_exception(None, None, ";".join(msg[1:]),
int(msg[0]))
# pylint: enable=protected-access
output_graph_def = graph_pb2.GraphDef()
output_graph_def.ParseFromString(output_graph_def_string)
del output_graph_def_string # Save some memory
return output_graph_def
|
sandeepdsouza93/TensorFlow-15712 | refs/heads/master | tensorflow/python/framework/tensor_util_test.py | 6 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for tensor_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_state_ops
class TensorUtilTest(tf.test.TestCase):
def testFloat(self):
value = 10.0
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape {}
float_val: %.1f
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array(value, dtype=np.float32), a)
def testFloatN(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTyped(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], dtype=tf.float32)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerce(self):
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=tf.float32)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerceNdarray(self):
arr = np.asarray([10, 20, 30], dtype="int")
t = tensor_util.make_tensor_proto(arr, dtype=tf.float32)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatSizes(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0, 20.0, 30.0]], dtype=np.float32), a)
def testFloatSizes2(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[3, 1])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } dim { size: 1 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0], [20.0], [30.0]], dtype=np.float32),
a)
def testFloatSizesLessValues(self):
t = tensor_util.make_tensor_proto(10.0, shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
float_val: 10.0
""", t)
# No conversion to Ndarray for this one: not enough values.
def testFloatNpArrayFloat64(self):
t = tensor_util.make_tensor_proto(
np.array([[10.0, 20.0, 30.0]], dtype=np.float64))
self.assertProtoEquals("""
dtype: DT_DOUBLE
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000$@\000\000\000\000\000\0004@\000\000\000\000\000\000>@"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float64, a.dtype)
self.assertAllClose(np.array([[10.0, 20.0, 30.0]], dtype=np.float64),
tensor_util.MakeNdarray(t))
def testFloatTypesWithImplicitRepeat(self):
for dtype, nptype in [
(tf.float32, np.float32), (tf.float64, np.float64)]:
t = tensor_util.make_tensor_proto([10.0], shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllClose(np.array([[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0]], dtype=nptype), a)
def testHalf(self):
t = tensor_util.make_tensor_proto(np.array([10.0, 20.0], dtype=np.float16))
self.assertProtoEquals("""
dtype: DT_HALF
tensor_shape {
dim {
size: 2
}
}
half_val: 18688
half_val: 19712
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.float16, a.dtype)
self.assertAllClose(np.array([10.0, 20.0], dtype=np.float16), a)
def testInt(self):
t = tensor_util.make_tensor_proto(10)
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape {}
int_val: 10
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int32, a.dtype)
self.assertAllClose(np.array(10, dtype=np.int32), a)
def testLargeInt(self):
value = np.iinfo(np.int64).max
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: %d
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array(value, dtype=np.int64), a)
def testLargeNegativeInt(self):
# We don't use the min np.int64 value here
# because it breaks np.abs().
#
# np.iinfo(np.int64).min = -9223372036854775808
# np.iinfo(np.int64).max = 9223372036854775807
# np.abs(-9223372036854775808) = -9223372036854775808
value = np.iinfo(np.int64).min + 1
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: %d
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array(value, dtype=np.int64), a)
def testIntNDefaultType(self):
t = tensor_util.make_tensor_proto([10, 20, 30, 40], shape=[2, 2])
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape { dim { size: 2 } dim { size: 2 } }
tensor_content: "\\n\000\000\000\024\000\000\000\036\000\000\000(\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int32, a.dtype)
self.assertAllClose(np.array([[10, 20], [30, 40]], dtype=np.int32), a)
def testIntTypes(self):
for dtype, nptype in [
(tf.int32, np.int32),
(tf.uint8, np.uint8),
(tf.uint16, np.uint16),
(tf.int16, np.int16),
(tf.int8, np.int8)]:
# Test with array.
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=dtype)
self.assertEquals(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEquals(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
# Test with ndarray.
t = tensor_util.make_tensor_proto(np.array([10, 20, 30], dtype=nptype))
self.assertEquals(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEquals(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
def testIntTypesWithImplicitRepeat(self):
for dtype, nptype in [
(tf.int64, np.int64),
(tf.int32, np.int32),
(tf.uint8, np.uint8),
(tf.uint16, np.uint16),
(tf.int16, np.int16),
(tf.int8, np.int8)]:
t = tensor_util.make_tensor_proto([10], shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllEqual(np.array([[10, 10, 10, 10],
[10, 10, 10, 10],
[10, 10, 10, 10]], dtype=nptype), a)
def testLong(self):
t = tensor_util.make_tensor_proto(10, dtype=tf.int64)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: 10
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array(10, dtype=np.int64), a)
def testLongN(self):
t = tensor_util.make_tensor_proto([10, 20, 30], shape=[1, 3],
dtype=tf.int64)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array([[10, 20, 30]], dtype=np.int64), a)
def testLongNpArray(self):
t = tensor_util.make_tensor_proto(np.array([10, 20, 30]))
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape { dim { size: 3 } }
tensor_content: "\\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.int64, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=np.int64), a)
def testQuantizedTypes(self):
# Test with array.
data = [(21,), (22,), (23,)]
t = tensor_util.make_tensor_proto(data, dtype=tf.qint32)
self.assertProtoEquals("""
dtype: DT_QINT32
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\000\000\026\000\000\000\027\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(tf.qint32.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=tf.quint8)
self.assertProtoEquals("""
dtype: DT_QUINT8
tensor_shape { dim { size: 3 } }
tensor_content: "\025\026\027"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(tf.quint8.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=tf.qint8)
self.assertProtoEquals("""
dtype: DT_QINT8
tensor_shape { dim { size: 3 } }
tensor_content: "\025\026\027"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(tf.qint8.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=tf.quint16)
self.assertProtoEquals("""
dtype: DT_QUINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\026\000\027\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(tf.quint16.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=tf.qint16)
self.assertProtoEquals("""
dtype: DT_QINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\026\000\027\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(tf.qint16.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
def testString(self):
t = tensor_util.make_tensor_proto("foo")
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape {}
string_val: "foo"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertEquals([b"foo"], a)
def testStringWithImplicitRepeat(self):
t = tensor_util.make_tensor_proto("f", shape=[3, 4])
a = tensor_util.MakeNdarray(t)
self.assertAllEqual(np.array([[b"f"] * 4] * 3, dtype=np.object), a)
def testStringN(self):
t = tensor_util.make_tensor_proto([b"foo", b"bar", b"baz"], shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 1 } dim { size: 3 } }
string_val: "foo"
string_val: "bar"
string_val: "baz"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array([[b"foo", b"bar", b"baz"]]), a)
def testStringNpArray(self):
t = tensor_util.make_tensor_proto(np.array([[b"a", b"ab"],
[b"abc", b"abcd"]]))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 2 } dim { size: 2 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array([[b"a", b"ab"], [b"abc", b"abcd"]]), a)
def testStringTuple(self):
t = tensor_util.make_tensor_proto((b"a", b"ab", b"abc", b"abcd"))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 4 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array((b"a", b"ab", b"abc", b"abcd")), a)
def testStringNestedTuple(self):
t = tensor_util.make_tensor_proto(((b"a", b"ab"), (b"abc", b"abcd")))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 2 } dim { size: 2 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.object, a.dtype)
self.assertAllEqual(np.array(((b"a", b"ab"), (b"abc", b"abcd"))), a)
def testComplex64(self):
t = tensor_util.make_tensor_proto((1+2j), dtype=tf.complex64)
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape {}
scomplex_val: 1
scomplex_val: 2
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex64, a.dtype)
self.assertAllEqual(np.array(1 + 2j), a)
def testComplex128(self):
t = tensor_util.make_tensor_proto((1+2j), dtype=tf.complex128)
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape {}
dcomplex_val: 1
dcomplex_val: 2
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex128, a.dtype)
self.assertAllEqual(np.array(1 + 2j), a)
def testComplexWithImplicitRepeat(self):
for dtype, np_dtype in [(tf.complex64, np.complex64),
(tf.complex128, np.complex128)]:
t = tensor_util.make_tensor_proto((1+1j), shape=[3, 4],
dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllClose(np.array([[(1+1j), (1+1j), (1+1j), (1+1j)],
[(1+1j), (1+1j), (1+1j), (1+1j)],
[(1+1j), (1+1j), (1+1j), (1+1j)]],
dtype=np_dtype), a)
def testComplex64N(self):
t = tensor_util.make_tensor_proto([(1+2j), (3+4j), (5+6j)], shape=[1, 3],
dtype=tf.complex64)
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape { dim { size: 1 } dim { size: 3 } }
scomplex_val: 1
scomplex_val: 2
scomplex_val: 3
scomplex_val: 4
scomplex_val: 5
scomplex_val: 6
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex64, a.dtype)
self.assertAllEqual(np.array([[(1+2j), (3+4j), (5+6j)]]), a)
def testComplex128N(self):
t = tensor_util.make_tensor_proto([(1+2j), (3+4j), (5+6j)], shape=[1, 3],
dtype=tf.complex128)
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape { dim { size: 1 } dim { size: 3 } }
dcomplex_val: 1
dcomplex_val: 2
dcomplex_val: 3
dcomplex_val: 4
dcomplex_val: 5
dcomplex_val: 6
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex128, a.dtype)
self.assertAllEqual(np.array([[(1+2j), (3+4j), (5+6j)]]), a)
def testComplex64NpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[(1+2j), (3+4j)], [(5+6j), (7+8j)]]), dtype=tf.complex64)
# scomplex_val are real_0, imag_0, real_1, imag_1, ...
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape { dim { size: 2 } dim { size: 2 } }
scomplex_val: 1
scomplex_val: 2
scomplex_val: 3
scomplex_val: 4
scomplex_val: 5
scomplex_val: 6
scomplex_val: 7
scomplex_val: 8
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex64, a.dtype)
self.assertAllEqual(np.array([[(1+2j), (3+4j)], [(5+6j), (7+8j)]]), a)
def testComplex128NpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[(1+2j), (3+4j)], [(5+6j), (7+8j)]]), dtype=tf.complex128)
# scomplex_val are real_0, imag_0, real_1, imag_1, ...
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape { dim { size: 2 } dim { size: 2 } }
dcomplex_val: 1
dcomplex_val: 2
dcomplex_val: 3
dcomplex_val: 4
dcomplex_val: 5
dcomplex_val: 6
dcomplex_val: 7
dcomplex_val: 8
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEquals(np.complex128, a.dtype)
self.assertAllEqual(np.array([[(1+2j), (3+4j)], [(5+6j), (7+8j)]]), a)
def testUnsupportedDTypes(self):
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(np.array([1]), 0)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(3, dtype=tf.qint8)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto([3], dtype=tf.qint8)
def testTensorShapeVerification(self):
array = np.array([[1], [2]])
correct_shape = (2, 1)
incorrect_shape = (1, 2)
tensor_util.make_tensor_proto(array, shape=correct_shape,
verify_shape=True)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(array, shape=incorrect_shape,
verify_shape=True)
def testShapeTooLarge(self):
with self.assertRaises(ValueError):
tensor_util.make_tensor_proto(np.array([1, 2]), shape=[1])
def testLowRankSupported(self):
t = tensor_util.make_tensor_proto(np.array(7))
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: 7
""", t)
def testShapeEquals(self):
t = tensor_util.make_tensor_proto([10, 20, 30, 40], shape=[2, 2])
self.assertTrue(tensor_util.ShapeEquals(t, [2, 2]))
self.assertTrue(tensor_util.ShapeEquals(t, (2, 2)))
self.assertTrue(tensor_util.ShapeEquals(
t, tensor_shape.as_shape([2, 2]).as_proto()))
self.assertFalse(tensor_util.ShapeEquals(t, [5, 3]))
self.assertFalse(tensor_util.ShapeEquals(t, [1, 4]))
self.assertFalse(tensor_util.ShapeEquals(t, [4]))
class ConstantValueTest(tf.test.TestCase):
def testConstant(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = tf.constant(np_val)
self.assertAllClose(np_val, tf.contrib.util.constant_value(tf_val))
np_val = np.random.rand(3, 0, 7).astype(np.float32)
tf_val = tf.constant(np_val)
self.assertAllClose(np_val, tf.contrib.util.constant_value(tf_val))
def testUnknown(self):
tf_val = gen_state_ops._variable(shape=[3, 4, 7], dtype=tf.float32,
name="tf_val", container="", shared_name="")
self.assertIs(None, tf.contrib.util.constant_value(tf_val))
def testShape(self):
np_val = np.array([1, 2, 3], dtype=np.int32)
tf_val = tf.shape(tf.constant(0.0, shape=[1, 2, 3]))
c_val = tf.contrib.util.constant_value(tf_val)
self.assertAllEqual(np_val, c_val)
self.assertEqual(np.int32, c_val.dtype)
def testSize(self):
tf_val = tf.size(tf.constant(0.0, shape=[1, 2, 3]))
c_val = tf.contrib.util.constant_value(tf_val)
self.assertEqual(6, c_val)
def testSizeOfScalar(self):
tf_val = tf.size(tf.constant(0.0))
c_val = tf.contrib.util.constant_value(tf_val)
self.assertEqual(1, c_val)
self.assertEqual(np.ndarray, type(c_val))
def testRank(self):
tf_val = tf.rank(tf.constant(0.0, shape=[1, 2, 3]))
c_val = tf.contrib.util.constant_value(tf_val)
self.assertEqual(np.ndarray, type(c_val))
self.assertEqual((), c_val.shape)
self.assertEqual(3, c_val)
# Repeat test using array_ops.rank_internal to avoid the optimization that
# happens in the rank function.
tf_val = array_ops.rank_internal(tf.constant(0.0, shape=[1, 2, 3]),
optimize=False)
c_val = tf.contrib.util.constant_value(tf_val)
self.assertEqual(np.ndarray, type(c_val))
self.assertEqual((), c_val.shape)
self.assertEqual(3, c_val)
self.assertEqual([3], c_val)
def testCast(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = tf.cast(tf.constant(np_val), tf.float64)
c_val = tf.contrib.util.constant_value(tf_val)
self.assertAllClose(np_val.astype(np.float64), c_val)
np_val = np.random.rand(3, 0, 7).astype(np.float32)
tf_val = tf.cast(tf.constant(np_val), tf.float64)
c_val = tf.contrib.util.constant_value(tf_val)
self.assertAllClose(np_val.astype(np.float64), c_val)
def testConcat(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = tf.concat(
0, [np_val[0:1, :, :], np_val[1:2, :, :], np_val[2:3, :, :]])
c_val = tf.contrib.util.constant_value(tf_val)
self.assertAllClose(np_val, c_val)
tf_val = tf.concat(
tf.placeholder(tf.int32),
[np_val[0, :, :], np_val[1, :, :], np_val[2, :, :]])
c_val = tf.contrib.util.constant_value(tf_val)
self.assertIs(None, c_val)
tf_val = tf.concat(
1,
[np_val[0, :, :], tf.placeholder(tf.float32),
np_val[2, :, :]])
c_val = tf.contrib.util.constant_value(tf_val)
self.assertIs(None, c_val)
def testPack(self):
inputs = [np.random.rand(4, 7) for _ in range(3)]
np_val = np.array(inputs)
tf_val = tf.stack(inputs)
c_val = tf.contrib.util.constant_value(tf_val)
self.assertAllClose(np_val, c_val)
tf_val = tf.stack([inputs[0], tf.placeholder(tf.float32), inputs[2]])
c_val = tf.contrib.util.constant_value(tf_val)
self.assertIs(None, c_val)
class ConstantValueAsShapeTest(tf.test.TestCase):
def testConstant(self):
np_val = np.random.rand(3).astype(np.int32)
tf_val = tf.constant(np_val)
self.assertEqual(tf.TensorShape(np_val),
tensor_util.constant_value_as_shape(tf_val))
tf_val = tf.constant([], dtype=tf.int32)
self.assertEqual(tf.TensorShape([]),
tensor_util.constant_value_as_shape(tf_val))
def testShape(self):
tf_val = tf.shape(tf.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual(tf.TensorShape([1, 2, 3]), c_val)
def testPack(self):
tf_val = tf.stack([tf.constant(16), 37, tf.placeholder(tf.int32)])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None], c_val.as_list())
def testConcat(self):
tf_val = tf.concat(0, [[16, 37], tf.placeholder(tf.int32, shape=(2,))])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, None], c_val.as_list())
tf_val = tf.concat(0,
[[16, 37], tf.placeholder(tf.int32, shape=(1,)), [48]])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, 48], c_val.as_list())
if __name__ == "__main__":
tf.test.main()
|
anastue/netforce | refs/heads/stable-3.1 | netforce/netforce/layout.py | 2 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from .model import get_model
from . import template
import os
from lxml import etree
import json
from . import module
import shutil
import pkg_resources
import py_compile
import sys
_xml_layouts = {}
def load_xml_layouts():
print("loading layouts...")
_xml_layouts.clear()
loaded_modules = module.get_loaded_modules()
for m in loaded_modules:
if not pkg_resources.resource_exists(m, "layouts"):
continue
for fname in pkg_resources.resource_listdir(m, "layouts"):
if not fname.endswith("xml"):
continue
data = pkg_resources.resource_string(m, "layouts/" + fname)
try:
root = etree.fromstring(data)
vals = {
"module": m,
}
vals["name"] = fname.replace(".xml", "")
vals["type"] = root.tag.lower()
if root.attrib.get("model"):
vals["model"] = root.attrib["model"]
if root.attrib.get("inherit"):
vals["inherit"] = root.attrib["inherit"]
if root.attrib.get("priority"):
vals["priority"] = int(root.attrib["priority"])
vals["layout"] = data.decode()
_xml_layouts[vals["name"]] = vals
except Exception as e:
print("ERROR: Failed to load XML layout: %s/%s (%s)" % (m, fname, e))
print(" %d layouts loaded"%len(_xml_layouts))
def layouts_to_json(modules=None):
if modules is None:
return _xml_layouts
return {n:v for n,v in _xml_layouts.items() if v["module"] in modules}
|
niharathomas/mongodb-tornado-angular | refs/heads/master | utils/generator/create_app.py | 2 | #!/usr/bin/python
import sys, getopt
import os
from templates.handler import create_handler
from templates.controller import create_controller
from templates.service import create_service
from templates.view import create_view
def camel_case(word):
return ''.join(x.capitalize() or '_' for x in word.split('_'))
print(camel_case('some_module'))
def main(argv):
# print (argv)
module_name = argv[0]
os.mkdir(module_name)
class_name = camel_case(module_name)
print ("module :{}, {}".format(class_name, module_name))
h= (create_handler(class_name, module_name))
f = open('{}/{}_handler.py'.format(module_name,module_name), 'w')
f.write(h)
f.close()
c= (create_controller(class_name, module_name))
f = open('{}/{}_controller.js'.format(module_name,module_name), 'w')
f.write(c)
f.close()
s= (create_service(class_name, module_name))
f = open('{}/{}_service.js'.format(module_name,module_name), 'w')
f.write(s)
f.close()
v= (create_view(class_name, module_name))
f = open('{}/{}.html'.format(module_name,module_name), 'w')
f.write(v)
f.close()
if __name__ == "__main__":
main(sys.argv[1:]) |
anshul313/chibleetestbackend | refs/heads/master | node_modules/node-gyp/gyp/PRESUBMIT.py | 1369 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
PYLINT_BLACKLIST = [
# TODO: fix me.
# From SCons, not done in google style.
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
]
PYLINT_DISABLED_WARNINGS = [
# TODO: fix me.
# Many tests include modules they don't use.
'W0611',
# Possible unbalanced tuple unpacking with sequence.
'W0632',
# Attempting to unpack a non-sequence.
'W0633',
# Include order doesn't properly include local files?
'F0401',
# Some use of built-in names.
'W0622',
# Some unused variables.
'W0612',
# Operator not preceded/followed by space.
'C0323',
'C0322',
# Unnecessary semicolon.
'W0301',
# Unused argument.
'W0613',
# String has no effect (docstring in wrong place).
'W0105',
# map/filter on lambda could be replaced by comprehension.
'W0110',
# Use of eval.
'W0123',
# Comma not followed by space.
'C0324',
# Access to a protected member.
'W0212',
# Bad indent.
'W0311',
# Line too long.
'C0301',
# Undefined variable.
'E0602',
# Not exception type specified.
'W0702',
# No member of that name.
'E1101',
# Dangerous default {}.
'W0102',
# Cyclic import.
'R0401',
# Others, too many to sort.
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
'R0201', 'E0101', 'C0321',
# ************* Module copy
# W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
'W0104',
]
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
# Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
# The (c) is deprecated, but tolerate it until it's removed from all files.
license = (
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': years_re,
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import os
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
blacklist = PYLINT_BLACKLIST
if sys.platform == 'win32':
blacklist = [os.path.normpath(x).replace('\\', '\\\\')
for x in PYLINT_BLACKLIST]
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=blacklist,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally:
sys.path = old_sys_path
return report
TRYBOTS = [
'linux_try',
'mac_try',
'win_try',
]
def GetPreferredTryMasters(_, change):
return {
'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
}
|
t0mm13b/CAF-Zte-Blade-Android-MSM-2.6.35 | refs/heads/master | arch/ia64/scripts/unwcheck.py | 13143 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
|
srvg/ansible | refs/heads/devel | test/integration/targets/script/files/no_shebang.py | 35 | from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
sys.stdout.write("Script with shebang omitted")
|
arpith/zulip | refs/heads/master | zerver/lib/url_preview/parsers/generic.py | 33 | from __future__ import absolute_import
from typing import Any, Dict
from zerver.lib.url_preview.parsers.base import BaseParser
class GenericParser(BaseParser):
def extract_data(self):
# type: () -> Dict
return {
'title': self._get_title(),
'description': self._get_description(),
'image': self._get_image()}
def _get_title(self):
# type: () -> Any
soup = self._soup
if (soup.title and soup.title.text != ''):
return soup.title.text
if (soup.h1 and soup.h1.text != ''):
return soup.h1.text
return None
def _get_description(self):
# type: () -> Any
soup = self._soup
meta_description = soup.find('meta', attrs={'name': 'description'})
if (meta_description and meta_description['content'] != ''):
return meta_description['content']
first_h1 = soup.find('h1')
if first_h1:
first_p = first_h1.find_next('p')
if (first_p and first_p.string != ''):
return first_p.text
first_p = soup.find('p')
if (first_p and first_p.string != ''):
return first_p.string
return None
def _get_image(self):
# type: () -> Any
"""
Finding a first image after the h1 header.
Presumably it will be the main image.
"""
soup = self._soup
first_h1 = soup.find('h1')
if first_h1:
first_image = first_h1.find_next_sibling('img')
if first_image and first_image['src'] != '':
return first_image['src']
return None
|
tinkerinestudio/Tinkerine-Suite | refs/heads/master | TinkerineSuite/pypy/lib-python/2.7/nturl2path.py | 228 | """Convert a NT pathname to a file URL and vice versa."""
def url2pathname(url):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
# e.g.
# ///C|/foo/bar/spam.foo
# becomes
# C:\foo\bar\spam.foo
import string, urllib
# Windows itself uses ":" even in URLs.
url = url.replace(':', '|')
if not '|' in url:
# No drive specifier, just convert slashes
if url[:4] == '////':
# path is something like ////host/path/on/remote/host
# convert this to \\host\path\on\remote\host
# (notice halving of slashes at the start of the path)
url = url[2:]
components = url.split('/')
# make sure not to convert quoted slashes :-)
return urllib.unquote('\\'.join(components))
comp = url.split('|')
if len(comp) != 2 or comp[0][-1] not in string.ascii_letters:
error = 'Bad URL: ' + url
raise IOError, error
drive = comp[0][-1].upper()
path = drive + ':'
components = comp[1].split('/')
for comp in components:
if comp:
path = path + '\\' + urllib.unquote(comp)
# Issue #11474: url like '/C|/' should convert into 'C:\\'
if path.endswith(':') and url.endswith('/'):
path += '\\'
return path
def pathname2url(p):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
# e.g.
# C:\foo\bar\spam.foo
# becomes
# ///C|/foo/bar/spam.foo
import urllib
if not ':' in p:
# No drive specifier, just convert slashes and quote the name
if p[:2] == '\\\\':
# path is something like \\host\path\on\remote\host
# convert this to ////host/path/on/remote/host
# (notice doubling of slashes at the start of the path)
p = '\\\\' + p
components = p.split('\\')
return urllib.quote('/'.join(components))
comp = p.split(':')
if len(comp) != 2 or len(comp[0]) > 1:
error = 'Bad path: ' + p
raise IOError, error
drive = urllib.quote(comp[0].upper())
components = comp[1].split('\\')
path = '///' + drive + ':'
for comp in components:
if comp:
path = path + '/' + urllib.quote(comp)
return path
|
neerja28/Tempest | refs/heads/master | tempest/api/compute/admin/test_live_migration.py | 3 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class LiveBlockMigrationTestJSON(base.BaseV2ComputeAdminTest):
_host_key = 'OS-EXT-SRV-ATTR:host'
@classmethod
def setup_clients(cls):
super(LiveBlockMigrationTestJSON, cls).setup_clients()
cls.admin_hosts_client = cls.os_adm.hosts_client
cls.admin_servers_client = cls.os_adm.servers_client
@classmethod
def resource_setup(cls):
super(LiveBlockMigrationTestJSON, cls).resource_setup()
cls.created_server_ids = []
def _get_compute_hostnames(self):
body = self.admin_hosts_client.list_hosts()
return [
host_record['host_name']
for host_record in body
if host_record['service'] == 'compute'
]
def _get_server_details(self, server_id):
body = self.admin_servers_client.get_server(server_id)
return body
def _get_host_for_server(self, server_id):
return self._get_server_details(server_id)[self._host_key]
def _migrate_server_to(self, server_id, dest_host):
body = self.admin_servers_client.live_migrate_server(
server_id, dest_host,
CONF.compute_feature_enabled.block_migration_for_live_migration)
return body
def _get_host_other_than(self, host):
for target_host in self._get_compute_hostnames():
if host != target_host:
return target_host
def _get_server_status(self, server_id):
return self._get_server_details(server_id)['status']
def _get_an_active_server(self):
for server_id in self.created_server_ids:
if 'ACTIVE' == self._get_server_status(server_id):
return server_id
else:
server = self.create_test_server(wait_until="ACTIVE")
server_id = server['id']
self.created_server_ids.append(server_id)
return server_id
def _volume_clean_up(self, server_id, volume_id):
body = self.volumes_client.show_volume(volume_id)
if body['status'] == 'in-use':
self.servers_client.detach_volume(server_id, volume_id)
self.volumes_client.wait_for_volume_status(volume_id, 'available')
self.volumes_client.delete_volume(volume_id)
def _test_live_block_migration(self, state='ACTIVE'):
"""Tests live block migration between two hosts.
Requires CONF.compute_feature_enabled.live_migration to be True.
:param state: The vm_state the migrated server should be in before and
after the live migration. Supported values are 'ACTIVE'
and 'PAUSED'.
"""
# Live block migrate an instance to another host
if len(self._get_compute_hostnames()) < 2:
raise self.skipTest(
"Less than 2 compute nodes, skipping migration test.")
server_id = self._get_an_active_server()
actual_host = self._get_host_for_server(server_id)
target_host = self._get_host_other_than(actual_host)
if state == 'PAUSED':
self.admin_servers_client.pause_server(server_id)
self.admin_servers_client.wait_for_server_status(server_id, state)
self._migrate_server_to(server_id, target_host)
self.servers_client.wait_for_server_status(server_id, state)
self.assertEqual(target_host, self._get_host_for_server(server_id))
@test.idempotent_id('1dce86b8-eb04-4c03-a9d8-9c1dc3ee0c7b')
@testtools.skipUnless(CONF.compute_feature_enabled.live_migration,
'Live migration not available')
def test_live_block_migration(self):
self._test_live_block_migration()
@test.idempotent_id('1e107f21-61b2-4988-8f22-b196e938ab88')
@testtools.skipUnless(CONF.compute_feature_enabled.live_migration,
'Live migration not available')
@testtools.skipUnless(CONF.compute_feature_enabled.pause,
'Pause is not available.')
@testtools.skipUnless(CONF.compute_feature_enabled
.live_migrate_paused_instances,
'Live migration of paused instances is not '
'available.')
def test_live_block_migration_paused(self):
self._test_live_block_migration(state='PAUSED')
@test.idempotent_id('e19c0cc6-6720-4ed8-be83-b6603ed5c812')
@testtools.skipIf(not CONF.compute_feature_enabled.live_migration or not
CONF.compute_feature_enabled.
block_migration_for_live_migration,
'Block Live migration not available')
@testtools.skipIf(not CONF.compute_feature_enabled.
block_migrate_cinder_iscsi,
'Block Live migration not configured for iSCSI')
def test_iscsi_volume(self):
# Live block migrate an instance to another host
if len(self._get_compute_hostnames()) < 2:
raise self.skipTest(
"Less than 2 compute nodes, skipping migration test.")
server_id = self._get_an_active_server()
actual_host = self._get_host_for_server(server_id)
target_host = self._get_host_other_than(actual_host)
volume = self.volumes_client.create_volume(display_name='test')
self.volumes_client.wait_for_volume_status(volume['id'],
'available')
self.addCleanup(self._volume_clean_up, server_id, volume['id'])
# Attach the volume to the server
self.servers_client.attach_volume(server_id, volume['id'],
device='/dev/xvdb')
self.volumes_client.wait_for_volume_status(volume['id'], 'in-use')
self._migrate_server_to(server_id, target_host)
self.servers_client.wait_for_server_status(server_id, 'ACTIVE')
self.assertEqual(target_host, self._get_host_for_server(server_id))
|
miaecle/deepchem | refs/heads/master | contrib/tensorflow_models/progressive_multitask.py | 5 | from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import warnings
import time
import numpy as np
import tensorflow as tf
from deepchem.utils.save import log
from deepchem.metrics import to_one_hot
from deepchem.metrics import from_one_hot
from deepchem.nn import model_ops
class ProgressiveMultitaskRegressor(TensorflowMultiTaskRegressor):
"""Implements a progressive multitask neural network.
Progressive Networks: https://arxiv.org/pdf/1606.04671v3.pdf
Progressive networks allow for multitask learning where each task
gets a new column of weights. As a result, there is no exponential
forgetting where previous tasks are ignored.
TODO(rbharath): This class is unnecessarily complicated. Can we simplify the
structure of the code here?
"""
def __init__(self, n_tasks, n_features, alpha_init_stddevs=[.02], **kwargs):
"""Creates a progressive network.
Only listing parameters specific to progressive networks here.
Parameters
----------
n_tasks: int
Number of tasks
n_features: int
Number of input features
alpha_init_stddevs: list
List of standard-deviations for alpha in adapter layers.
"""
warnings.warn(
"ProgressiveMultitaskRegressor is deprecated. "
"Will be removed in DeepChem 1.4.", DeprecationWarning)
self.alpha_init_stddevs = alpha_init_stddevs
super(ProgressiveMultitaskRegressor, self).__init__(n_tasks, n_features,
**kwargs)
# Consistency check
lengths_set = {
len(self.layer_sizes),
len(self.weight_init_stddevs),
len(self.alpha_init_stddevs),
len(self.bias_init_consts),
len(self.dropouts),
}
assert len(lengths_set) == 1, "All layer params must have same length."
def construct_graph(self, training, seed):
"""Returns a TensorflowGraph object."""
graph = tf.Graph()
# Lazily created by _get_shared_session().
shared_session = None
# Cache of TensorFlow scopes, to prevent '_1' appended scope names
# when subclass-overridden methods use the same scopes.
name_scopes = {}
# Setup graph
with graph.as_default():
if seed is not None:
tf.set_random_seed(seed)
features, labels, weights = self.add_placeholders(graph, name_scopes)
outputs = self.add_progressive_lattice(graph, name_scopes, training)
if training:
loss = self.add_task_training_costs(graph, name_scopes, outputs, labels,
weights)
else:
loss = None
return TensorflowGraph(
graph=graph,
session=shared_session,
name_scopes=name_scopes,
output=outputs,
labels=labels,
weights=weights,
loss=loss)
def add_placeholders(self, graph, name_scopes):
"""Adds all placeholders for this model."""
# Create placeholders
placeholder_scope = TensorflowGraph.get_placeholder_scope(
graph, name_scopes)
labels, weights = [], []
n_features = self.n_features
with placeholder_scope:
self.mol_features = tf.placeholder(
tf.float32, shape=[None, n_features], name='mol_features')
for task in range(self.n_tasks):
weights.append(
tf.identity(
tf.placeholder(
tf.float32, shape=[
None,
], name='weights_%d' % task)))
labels.append(
tf.identity(
tf.placeholder(
tf.float32, shape=[
None,
], name='labels_%d' % task)))
return self.mol_features, labels, weights
def add_progressive_lattice(self, graph, name_scopes, training):
"""Constructs the graph architecture as specified in its config.
This method creates the following Placeholders:
mol_features: Molecule descriptor (e.g. fingerprint) tensor with shape
batch_size x n_features.
"""
n_features = self.n_features
placeholder_scope = TensorflowGraph.get_placeholder_scope(
graph, name_scopes)
with graph.as_default():
layer_sizes = self.layer_sizes
weight_init_stddevs = self.weight_init_stddevs
bias_init_consts = self.bias_init_consts
dropouts = self.dropouts
lengths_set = {
len(layer_sizes),
len(weight_init_stddevs),
len(bias_init_consts),
len(dropouts),
}
assert len(lengths_set) == 1, 'All layer params must have same length.'
n_layers = lengths_set.pop()
assert n_layers > 0, 'Must have some layers defined.'
prev_layer = self.mol_features
prev_layer_size = n_features
all_layers = {}
for i in range(n_layers):
for task in range(self.n_tasks):
task_scope = TensorflowGraph.shared_name_scope(
"task%d_ops" % task, graph, name_scopes)
print("Adding weights for task %d, layer %d" % (task, i))
with task_scope as scope:
if i == 0:
prev_layer = self.mol_features
prev_layer_size = self.n_features
else:
prev_layer = all_layers[(i - 1, task)]
prev_layer_size = layer_sizes[i - 1]
if task > 0:
lateral_contrib = self.add_adapter(all_layers, task, i)
print("Creating W_layer_%d_task%d of shape %s" %
(i, task, str([prev_layer_size, layer_sizes[i]])))
W = tf.Variable(
tf.truncated_normal(
shape=[prev_layer_size, layer_sizes[i]],
stddev=self.weight_init_stddevs[i]),
name='W_layer_%d_task%d' % (i, task),
dtype=tf.float32)
print("Creating b_layer_%d_task%d of shape %s" %
(i, task, str([layer_sizes[i]])))
b = tf.Variable(
tf.constant(
value=self.bias_init_consts[i], shape=[layer_sizes[i]]),
name='b_layer_%d_task%d' % (i, task),
dtype=tf.float32)
layer = tf.matmul(prev_layer, W) + b
if i > 0 and task > 0:
layer = layer + lateral_contrib
layer = tf.nn.relu(layer)
layer = model_ops.dropout(layer, dropouts[i], training)
all_layers[(i, task)] = layer
output = []
for task in range(self.n_tasks):
prev_layer = all_layers[(i, task)]
prev_layer_size = layer_sizes[i]
task_scope = TensorflowGraph.shared_name_scope("task%d" % task, graph,
name_scopes)
with task_scope as scope:
if task > 0:
lateral_contrib = tf.squeeze(
self.add_adapter(all_layers, task, i + 1))
weight_init = tf.truncated_normal(
shape=[prev_layer_size, 1], stddev=weight_init_stddevs[i])
bias_init = tf.constant(value=bias_init_consts[i], shape=[1])
print("Creating W_output_task%d of shape %s" %
(task, str([prev_layer_size, 1])))
w = tf.Variable(
weight_init, name='W_output_task%d' % task, dtype=tf.float32)
print("Creating b_output_task%d of shape %s" % (task, str([1])))
b = tf.Variable(
bias_init, name='b_output_task%d' % task, dtype=tf.float32)
layer = tf.squeeze(tf.matmul(prev_layer, w) + b)
if i > 0 and task > 0:
layer = layer + lateral_contrib
output.append(layer)
return output
def add_adapter(self, all_layers, task, layer_num):
"""Add an adapter connection for given task/layer combo"""
i = layer_num
prev_layers = []
# Handle output layer
if i < len(self.layer_sizes):
layer_sizes = self.layer_sizes
alpha_init_stddev = self.alpha_init_stddevs[i]
weight_init_stddev = self.weight_init_stddevs[i]
bias_init_const = self.bias_init_consts[i]
elif i == len(self.layer_sizes):
layer_sizes = self.layer_sizes + [1]
alpha_init_stddev = self.alpha_init_stddevs[-1]
weight_init_stddev = self.weight_init_stddevs[-1]
bias_init_const = self.bias_init_consts[-1]
else:
raise ValueError("layer_num too large for add_adapter.")
# Iterate over all previous tasks.
for prev_task in range(task):
prev_layers.append(all_layers[(i - 1, prev_task)])
# prev_layers is a list with elements of size
# (batch_size, layer_sizes[i-1])
prev_layer = tf.concat(axis=1, values=prev_layers)
alpha = tf.Variable(
tf.truncated_normal([
1,
], stddev=alpha_init_stddev),
name="alpha_layer_%d_task%d" % (i, task))
prev_layer = tf.multiply(alpha, prev_layer)
prev_layer_size = task * layer_sizes[i - 1]
print("Creating V_layer_%d_task%d of shape %s" %
(i, task, str([prev_layer_size, layer_sizes[i - 1]])))
V = tf.Variable(
tf.truncated_normal(
shape=[prev_layer_size, layer_sizes[i - 1]],
stddev=weight_init_stddev),
name="V_layer_%d_task%d" % (i, task),
dtype=tf.float32)
print("Creating b_lat_layer_%d_task%d of shape %s" %
(i, task, str([layer_sizes[i - 1]])))
b_lat = tf.Variable(
tf.constant(value=bias_init_const, shape=[layer_sizes[i - 1]]),
name='b_lat_layer_%d_task%d' % (i, task),
dtype=tf.float32)
prev_layer = tf.matmul(prev_layer, V) + b_lat
print("Creating U_layer_%d_task%d of shape %s" %
(i, task, str([layer_sizes[i - 1], layer_sizes[i]])))
U = tf.Variable(
tf.truncated_normal(
shape=[layer_sizes[i - 1], layer_sizes[i]],
stddev=weight_init_stddev),
name="U_layer_%d_task%d" % (i, task),
dtype=tf.float32)
return tf.matmul(prev_layer, U)
def get_training_op(self, graph, loss):
"""Get training op for applying gradients to variables.
Subclasses that need to do anything fancy with gradients should override
this method.
Returns:
A training op.
"""
with graph.as_default():
opt = model_ops.optimizer(self.optimizer, self.learning_rate,
self.momentum)
return opt.minimize(loss, name='train')
def add_training_costs(self, graph, name_scopes, output, labels, weights):
with graph.as_default():
epsilon = 1e-3 # small float to avoid dividing by zero
weighted_costs = [] # weighted costs for each example
gradient_costs = [] # costs used for gradient calculation
with TensorflowGraph.shared_name_scope('costs', graph, name_scopes):
for task in range(self.n_tasks):
task_str = str(task).zfill(len(str(self.n_tasks)))
with TensorflowGraph.shared_name_scope('cost_{}'.format(task_str),
graph, name_scopes):
with tf.name_scope('weighted'):
weighted_cost = self.cost(output[task], labels[task],
weights[task])
weighted_costs.append(weighted_cost)
with tf.name_scope('gradient'):
# Note that we divide by the batch size and not the number of
# non-zero weight examples in the batch. Also, instead of using
# tf.reduce_mean (which can put ops on the CPU) we explicitly
# calculate with div/sum so it stays on the GPU.
gradient_cost = tf.math.divide(
tf.reduce_sum(weighted_cost), self.batch_size)
gradient_costs.append(gradient_cost)
# aggregated costs
with TensorflowGraph.shared_name_scope('aggregated', graph,
name_scopes):
with tf.name_scope('gradient'):
loss = tf.add_n(gradient_costs)
# weight decay
if self.penalty != 0.0:
penalty = model_ops.weight_decay(self.penalty_type, self.penalty)
loss += penalty
return loss
def construct_feed_dict(self, X_b, y_b=None, w_b=None, ids_b=None):
"""Construct a feed dictionary from minibatch data.
TODO(rbharath): ids_b is not used here. Can we remove it?
Args:
X_b: np.ndarray of shape (batch_size, n_features)
y_b: np.ndarray of shape (batch_size, n_tasks)
w_b: np.ndarray of shape (batch_size, n_tasks)
ids_b: List of length (batch_size) with datapoint identifiers.
"""
orig_dict = {}
orig_dict["mol_features"] = X_b
for task in range(self.n_tasks):
if y_b is not None:
orig_dict["labels_%d" % task] = y_b[:, task]
else:
# Dummy placeholders
orig_dict["labels_%d" % task] = np.squeeze(np.zeros((self.batch_size,)))
if w_b is not None:
orig_dict["weights_%d" % task] = w_b[:, task]
else:
# Dummy placeholders
orig_dict["weights_%d" % task] = np.ones((self.batch_size,))
return TensorflowGraph.get_feed_dict(orig_dict)
def predict_on_batch(self, X, pad_batch=False):
"""Return model output for the provided input.
Restore(checkpoint) must have previously been called on this object.
Args:
dataset: dc.data.Dataset object.
Returns:
Tuple of three numpy arrays with shape n_examples x n_tasks (x ...):
output: Model outputs.
labels: True labels.
weights: Example weights.
Note that the output and labels arrays may be more than 2D, e.g. for
classifier models that return class probabilities.
Raises:
AssertionError: If model is not in evaluation mode.
ValueError: If output and labels are not both 3D or both 2D.
"""
if not self._restored_model:
self.restore()
with self.eval_graph.graph.as_default():
# run eval data through the model
n_tasks = self.n_tasks
with self._get_shared_session(train=False).as_default():
n_samples = len(X)
feed_dict = self.construct_feed_dict(X)
data = self._get_shared_session(train=False).run(
self.eval_graph.output, feed_dict=feed_dict)
# Shape (n_tasks, n__samples)
batch_outputs = np.asarray(data[:n_tasks], dtype=float)
# reshape to batch_size x n_tasks x ...
if batch_outputs.ndim == 3:
batch_outputs = batch_outputs.transpose((1, 0, 2))
elif batch_outputs.ndim == 2:
batch_outputs = batch_outputs.transpose((1, 0))
# Handle edge case when batch-size is 1.
elif batch_outputs.ndim == 1:
n_samples = len(X)
batch_outputs = batch_outputs.reshape((n_samples, n_tasks))
else:
raise ValueError('Unrecognized rank combination for output: %s' %
(batch_outputs.shape))
outputs = np.squeeze(batch_outputs)
return outputs
def fit(self,
dataset,
tasks=None,
close_session=True,
max_checkpoints_to_keep=5,
**kwargs):
"""Fit the model.
Progressive networks are fit by training one task at a time. Iteratively
fits one task at a time with other weights frozen.
Parameters
----------
dataset: dc.data.Dataset
Dataset object holding training data
Raises
------
AssertionError
If model is not in training mode.
"""
if tasks is None:
tasks = range(self.n_tasks)
with self.train_graph.graph.as_default():
task_train_ops = {}
for task in tasks:
task_train_ops[task] = self.get_task_training_op(
self.train_graph.graph, self.train_graph.loss, task)
sess = self._get_shared_session(train=True)
#with self._get_shared_session(train=True) as sess:
sess.run(tf.global_variables_initializer())
# Save an initial checkpoint.
saver = tf.train.Saver(max_to_keep=max_checkpoints_to_keep)
saver.save(sess, self._save_path, global_step=0)
for task in tasks:
print("Fitting on task %d" % task)
self.fit_task(sess, dataset, task, task_train_ops[task], **kwargs)
saver.save(sess, self._save_path, global_step=task)
# Always save a final checkpoint when complete.
saver.save(sess, self._save_path, global_step=self.n_tasks)
if close_session:
sess.close()
def get_task_training_op(self, graph, losses, task):
"""Get training op for applying gradients to variables.
Subclasses that need to do anything fancy with gradients should override
this method.
Parameters
----------
graph: tf.Graph
Graph for this op
losses: dict
Dictionary mapping task to losses
Returns
-------
A training op.
"""
with graph.as_default():
task_loss = losses[task]
task_root = "task%d_ops" % task
task_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, task_root)
opt = model_ops.optimizer(self.optimizer, self.learning_rate,
self.momentum)
return opt.minimize(task_loss, name='train', var_list=task_vars)
def add_task_training_costs(self, graph, name_scopes, outputs, labels,
weights):
"""Adds the training costs for each task.
Since each task is trained separately, each task is optimized w.r.t a separate
task.
TODO(rbharath): Figure out how to support weight decay for this model.
Since each task is trained separately, weight decay should only be used
on weights in column for that task.
Parameters
----------
graph: tf.Graph
Graph for the model.
name_scopes: dict
Contains all the scopes for model
outputs: list
List of output tensors from model.
weights: list
List of weight placeholders for model.
"""
task_costs = {}
with TensorflowGraph.shared_name_scope('costs', graph, name_scopes):
for task in range(self.n_tasks):
with TensorflowGraph.shared_name_scope('cost_%d' % task, graph,
name_scopes):
weighted_cost = self.cost(outputs[task], labels[task], weights[task])
# Note that we divide by the batch size and not the number of
# non-zero weight examples in the batch. Also, instead of using
# tf.reduce_mean (which can put ops on the CPU) we explicitly
# calculate with div/sum so it stays on the GPU.
task_cost = tf.math.divide(
tf.reduce_sum(weighted_cost), self.batch_size)
task_costs[task] = task_cost
return task_costs
def construct_task_feed_dict(self,
this_task,
X_b,
y_b=None,
w_b=None,
ids_b=None):
"""Construct a feed dictionary from minibatch data.
TODO(rbharath): ids_b is not used here. Can we remove it?
Args:
X_b: np.ndarray of shape (batch_size, n_features)
y_b: np.ndarray of shape (batch_size, n_tasks)
w_b: np.ndarray of shape (batch_size, n_tasks)
ids_b: List of length (batch_size) with datapoint identifiers.
"""
orig_dict = {}
orig_dict["mol_features"] = X_b
n_samples = len(X_b)
for task in range(self.n_tasks):
if (this_task == task) and y_b is not None:
#orig_dict["labels_%d" % task] = np.reshape(y_b[:, task], (n_samples, 1))
orig_dict["labels_%d" % task] = np.reshape(y_b[:, task], (n_samples,))
else:
# Dummy placeholders
#orig_dict["labels_%d" % task] = np.zeros((n_samples, 1))
orig_dict["labels_%d" % task] = np.zeros((n_samples,))
if (this_task == task) and w_b is not None:
#orig_dict["weights_%d" % task] = np.reshape(w_b[:, task], (n_samples, 1))
orig_dict["weights_%d" % task] = np.reshape(w_b[:, task], (n_samples,))
else:
# Dummy placeholders
#orig_dict["weights_%d" % task] = np.zeros((n_samples, 1))
orig_dict["weights_%d" % task] = np.zeros((n_samples,))
return TensorflowGraph.get_feed_dict(orig_dict)
def _get_shared_session(self, train):
# allow_soft_placement=True allows ops without a GPU implementation
# to run on the CPU instead.
if train:
if not self.train_graph.session:
config = tf.ConfigProto(allow_soft_placement=True)
self.train_graph.session = tf.Session(config=config)
return self.train_graph.session
else:
if not self.eval_graph.session:
config = tf.ConfigProto(allow_soft_placement=True)
self.eval_graph.session = tf.Session(config=config)
return self.eval_graph.session
def fit_task(self,
sess,
dataset,
task,
task_train_op,
nb_epoch=10,
log_every_N_batches=50,
checkpoint_interval=10):
"""Fit the model.
Fit one task.
TODO(rbharath): Figure out if the logging will work correctly with the
global_step set as it is.
Parameters
----------
dataset: dc.data.Dataset
Dataset object holding training data
task: int
The index of the task to train on.
nb_epoch: 10
Number of training epochs.
max_checkpoints_to_keep: int
Maximum number of checkpoints to keep; older checkpoints will be deleted.
log_every_N_batches: int
Report every N batches. Useful for training on very large datasets,
where epochs can take long time to finish.
checkpoint_interval: int
Frequency at which to write checkpoints, measured in epochs
Raises
------
AssertionError
If model is not in training mode.
"""
############################################################## TIMING
time1 = time.time()
############################################################## TIMING
log("Training task %d for %d epochs" % (task, nb_epoch), self.verbose)
for epoch in range(nb_epoch):
avg_loss, n_batches = 0., 0
for ind, (X_b, y_b, w_b, ids_b) in enumerate(
# Turns out there are valid cases where we don't want pad-batches
# on by default.
#dataset.iterbatches(batch_size, pad_batches=True)):
dataset.iterbatches(self.batch_size, pad_batches=self.pad_batches)):
if ind % log_every_N_batches == 0:
log("On batch %d" % ind, self.verbose)
feed_dict = self.construct_task_feed_dict(task, X_b, y_b, w_b, ids_b)
fetches = self.train_graph.output + [
task_train_op, self.train_graph.loss[task]
]
fetched_values = sess.run(fetches, feed_dict=feed_dict)
output = fetched_values[:len(self.train_graph.output)]
loss = fetched_values[-1]
avg_loss += loss
y_pred = np.squeeze(np.array(output))
y_b = y_b.flatten()
n_batches += 1
#if epoch%checkpoint_interval == checkpoint_interval-1:
# saver.save(sess, self._save_path, global_step=epoch)
avg_loss = float(avg_loss) / n_batches
log('Ending epoch %d: Average loss %g' % (epoch, avg_loss), self.verbose)
############################################################## TIMING
time2 = time.time()
print("TIMING: model fitting took %0.3f s" % (time2 - time1), self.verbose)
############################################################## TIMING
|
marcsans/cnn-physics-perception | refs/heads/master | phy/lib/python2.7/site-packages/scipy/optimize/tests/test__differential_evolution.py | 21 | """
Unit tests for the differential global minimization algorithm.
"""
from scipy.optimize import _differentialevolution
from scipy.optimize._differentialevolution import DifferentialEvolutionSolver
from scipy.optimize import differential_evolution
import numpy as np
from scipy.optimize import rosen
from numpy.testing import (assert_equal, TestCase, assert_allclose,
run_module_suite, assert_almost_equal,
assert_string_equal, assert_raises, assert_)
class TestDifferentialEvolutionSolver(TestCase):
def setUp(self):
self.old_seterr = np.seterr(invalid='raise')
self.limits = np.array([[0., 0.],
[2., 2.]])
self.bounds = [(0., 2.), (0., 2.)]
self.dummy_solver = DifferentialEvolutionSolver(self.quadratic,
[(0, 100)])
# dummy_solver2 will be used to test mutation strategies
self.dummy_solver2 = DifferentialEvolutionSolver(self.quadratic,
[(0, 1)],
popsize=7,
mutation=0.5)
# create a population that's only 7 members long
# [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7]
population = np.atleast_2d(np.arange(0.1, 0.8, 0.1)).T
self.dummy_solver2.population = population
def tearDown(self):
np.seterr(**self.old_seterr)
def quadratic(self, x):
return x[0]**2
def test__strategy_resolves(self):
# test that the correct mutation function is resolved by
# different requested strategy arguments
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best1exp')
assert_equal(solver.strategy, 'best1exp')
assert_equal(solver.mutation_func.__name__, '_best1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best1bin')
assert_equal(solver.strategy, 'best1bin')
assert_equal(solver.mutation_func.__name__, '_best1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand1bin')
assert_equal(solver.strategy, 'rand1bin')
assert_equal(solver.mutation_func.__name__, '_rand1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand1exp')
assert_equal(solver.strategy, 'rand1exp')
assert_equal(solver.mutation_func.__name__, '_rand1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand2exp')
assert_equal(solver.strategy, 'rand2exp')
assert_equal(solver.mutation_func.__name__, '_rand2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best2bin')
assert_equal(solver.strategy, 'best2bin')
assert_equal(solver.mutation_func.__name__, '_best2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand2bin')
assert_equal(solver.strategy, 'rand2bin')
assert_equal(solver.mutation_func.__name__, '_rand2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand2exp')
assert_equal(solver.strategy, 'rand2exp')
assert_equal(solver.mutation_func.__name__, '_rand2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='randtobest1bin')
assert_equal(solver.strategy, 'randtobest1bin')
assert_equal(solver.mutation_func.__name__, '_randtobest1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='randtobest1exp')
assert_equal(solver.strategy, 'randtobest1exp')
assert_equal(solver.mutation_func.__name__, '_randtobest1')
def test__mutate1(self):
# strategies */1/*, i.e. rand/1/bin, best/1/exp, etc.
result = np.array([0.05])
trial = self.dummy_solver2._best1((2, 3, 4, 5, 6))
assert_allclose(trial, result)
result = np.array([0.25])
trial = self.dummy_solver2._rand1((2, 3, 4, 5, 6))
assert_allclose(trial, result)
def test__mutate2(self):
# strategies */2/*, i.e. rand/2/bin, best/2/exp, etc.
# [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7]
result = np.array([-0.1])
trial = self.dummy_solver2._best2((2, 3, 4, 5, 6))
assert_allclose(trial, result)
result = np.array([0.1])
trial = self.dummy_solver2._rand2((2, 3, 4, 5, 6))
assert_allclose(trial, result)
def test__randtobest1(self):
# strategies randtobest/1/*
result = np.array([0.1])
trial = self.dummy_solver2._randtobest1(1, (2, 3, 4, 5, 6))
assert_allclose(trial, result)
def test_can_init_with_dithering(self):
mutation = (0.5, 1)
solver = DifferentialEvolutionSolver(self.quadratic,
self.bounds,
mutation=mutation)
self.assertEqual(solver.dither, list(mutation))
def test_invalid_mutation_values_arent_accepted(self):
func = rosen
mutation = (0.5, 3)
self.assertRaises(ValueError,
DifferentialEvolutionSolver,
func,
self.bounds,
mutation=mutation)
mutation = (-1, 1)
self.assertRaises(ValueError,
DifferentialEvolutionSolver,
func,
self.bounds,
mutation=mutation)
mutation = (0.1, np.nan)
self.assertRaises(ValueError,
DifferentialEvolutionSolver,
func,
self.bounds,
mutation=mutation)
mutation = 0.5
solver = DifferentialEvolutionSolver(func,
self.bounds,
mutation=mutation)
assert_equal(0.5, solver.scale)
assert_equal(None, solver.dither)
def test__scale_parameters(self):
trial = np.array([0.3])
assert_equal(30, self.dummy_solver._scale_parameters(trial))
# it should also work with the limits reversed
self.dummy_solver.limits = np.array([[100], [0.]])
assert_equal(30, self.dummy_solver._scale_parameters(trial))
def test__unscale_parameters(self):
trial = np.array([30])
assert_equal(0.3, self.dummy_solver._unscale_parameters(trial))
# it should also work with the limits reversed
self.dummy_solver.limits = np.array([[100], [0.]])
assert_equal(0.3, self.dummy_solver._unscale_parameters(trial))
def test__ensure_constraint(self):
trial = np.array([1.1, -100, 2., 300., -0.00001])
self.dummy_solver._ensure_constraint(trial)
assert_equal(np.all(trial <= 1), True)
def test_differential_evolution(self):
# test that the Jmin of DifferentialEvolutionSolver
# is the same as the function evaluation
solver = DifferentialEvolutionSolver(self.quadratic, [(-2, 2)])
result = solver.solve()
assert_almost_equal(result.fun, self.quadratic(result.x))
def test_best_solution_retrieval(self):
# test that the getter property method for the best solution works.
solver = DifferentialEvolutionSolver(self.quadratic, [(-2, 2)])
result = solver.solve()
assert_equal(result.x, solver.x)
def test_callback_terminates(self):
# test that if the callback returns true, then the minimization halts
bounds = [(0, 2), (0, 2)]
def callback(param, convergence=0.):
return True
result = differential_evolution(rosen, bounds, callback=callback)
assert_string_equal(result.message,
'callback function requested stop early '
'by returning True')
def test_args_tuple_is_passed(self):
# test that the args tuple is passed to the cost function properly.
bounds = [(-10, 10)]
args = (1., 2., 3.)
def quadratic(x, *args):
if type(args) != tuple:
raise ValueError('args should be a tuple')
return args[0] + args[1] * x + args[2] * x**2.
result = differential_evolution(quadratic,
bounds,
args=args,
polish=True)
assert_almost_equal(result.fun, 2 / 3.)
def test_init_with_invalid_strategy(self):
# test that passing an invalid strategy raises ValueError
func = rosen
bounds = [(-3, 3)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds,
strategy='abc')
def test_bounds_checking(self):
# test that the bounds checking works
func = rosen
bounds = [(-3, None)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds)
bounds = [(-3)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds)
bounds = [(-3, 3), (3, 4, 5)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds)
def test_select_samples(self):
# select_samples should return 5 separate random numbers.
limits = np.arange(12., dtype='float64').reshape(2, 6)
bounds = list(zip(limits[0, :], limits[1, :]))
solver = DifferentialEvolutionSolver(None, bounds, popsize=1)
candidate = 0
r1, r2, r3, r4, r5 = solver._select_samples(candidate, 5)
assert_equal(
len(np.unique(np.array([candidate, r1, r2, r3, r4, r5]))), 6)
def test_maxiter_stops_solve(self):
# test that if the maximum number of iterations is exceeded
# the solver stops.
solver = DifferentialEvolutionSolver(rosen, self.bounds, maxiter=1)
result = solver.solve()
assert_equal(result.success, False)
assert_equal(result.message,
'Maximum number of iterations has been exceeded.')
def test_maxfun_stops_solve(self):
# test that if the maximum number of function evaluations is exceeded
# during initialisation the solver stops
solver = DifferentialEvolutionSolver(rosen, self.bounds, maxfun=1,
polish=False)
result = solver.solve()
assert_equal(result.nfev, 2)
assert_equal(result.success, False)
assert_equal(result.message,
'Maximum number of function evaluations has '
'been exceeded.')
# test that if the maximum number of function evaluations is exceeded
# during the actual minimisation, then the solver stops.
# Have to turn polishing off, as this will still occur even if maxfun
# is reached. For popsize=5 and len(bounds)=2, then there are only 10
# function evaluations during initialisation.
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
popsize=5,
polish=False,
maxfun=40)
result = solver.solve()
assert_equal(result.nfev, 41)
assert_equal(result.success, False)
assert_equal(result.message,
'Maximum number of function evaluations has '
'been exceeded.')
def test_quadratic(self):
# test the quadratic function from object
solver = DifferentialEvolutionSolver(self.quadratic,
[(-100, 100)],
tol=0.02)
solver.solve()
assert_equal(np.argmin(solver.population_energies), 0)
def test_quadratic_from_diff_ev(self):
# test the quadratic function from differential_evolution function
differential_evolution(self.quadratic,
[(-100, 100)],
tol=0.02)
def test_seed_gives_repeatability(self):
result = differential_evolution(self.quadratic,
[(-100, 100)],
polish=False,
seed=1,
tol=0.5)
result2 = differential_evolution(self.quadratic,
[(-100, 100)],
polish=False,
seed=1,
tol=0.5)
assert_equal(result.x, result2.x)
def test_exp_runs(self):
# test whether exponential mutation loop runs
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best1exp',
maxiter=1)
solver.solve()
def test__make_random_gen(self):
# If seed is None, return the RandomState singleton used by np.random.
# If seed is an int, return a new RandomState instance seeded with seed.
# If seed is already a RandomState instance, return it.
# Otherwise raise ValueError.
rsi = _differentialevolution._make_random_gen(1)
assert_equal(type(rsi), np.random.RandomState)
rsi = _differentialevolution._make_random_gen(rsi)
assert_equal(type(rsi), np.random.RandomState)
rsi = _differentialevolution._make_random_gen(None)
assert_equal(type(rsi), np.random.RandomState)
self.assertRaises(
ValueError, _differentialevolution._make_random_gen, 'a')
def test_gh_4511_regression(self):
# This modification of the differential evolution docstring example
# uses a custom popsize that had triggered an off-by-one error.
# Because we do not care about solving the optimization problem in
# this test, we use maxiter=1 to reduce the testing time.
bounds = [(-5, 5), (-5, 5)]
result = differential_evolution(rosen, bounds, popsize=1815, maxiter=1)
def test_calculate_population_energies(self):
# if popsize is 2 then the overall generation has size (4,)
solver = DifferentialEvolutionSolver(rosen, self.bounds, popsize=2)
solver._calculate_population_energies()
assert_equal(np.argmin(solver.population_energies), 0)
# initial calculation of the energies should require 4 nfev.
assert_equal(solver._nfev, 4)
def test_iteration(self):
# test that DifferentialEvolutionSolver is iterable
# if popsize is 2 then the overall generation has size (4,)
solver = DifferentialEvolutionSolver(rosen, self.bounds, popsize=2,
maxfun=8)
x, fun = next(solver)
assert_equal(np.size(x, 0), 2)
# 4 nfev are required for initial calculation of energies, 4 nfev are
# required for the evolution of the 4 population members.
assert_equal(solver._nfev, 8)
# the next generation should halt because it exceeds maxfun
assert_raises(StopIteration, next, solver)
# check a proper minimisation can be done by an iterable solver
solver = DifferentialEvolutionSolver(rosen, self.bounds)
for i, soln in enumerate(solver):
x_current, fun_current = soln
# need to have this otherwise the solver would never stop.
if i == 1000:
break
assert_almost_equal(fun_current, 0)
def test_convergence(self):
solver = DifferentialEvolutionSolver(rosen, self.bounds, tol=0.2,
polish=False)
solver.solve()
assert_(solver.convergence < 0.2)
def test_maxiter_none_GH5731(self):
# Pre 0.17 the previous default for maxiter and maxfun was None.
# the numerical defaults are now 1000 and np.inf. However, some scripts
# will still supply None for both of those, this will raise a TypeError
# in the solve method.
solver = DifferentialEvolutionSolver(rosen, self.bounds, maxiter=None,
maxfun=None)
solver.solve()
def test_population_initiation(self):
# test the different modes of population initiation
# init must be either 'latinhypercube' or 'random'
# raising ValueError is something else is passed in
assert_raises(ValueError,
DifferentialEvolutionSolver,
*(rosen, self.bounds),
**{'init': 'rubbish'})
solver = DifferentialEvolutionSolver(rosen, self.bounds)
# check that population initiation:
# 1) resets _nfev to 0
# 2) all population energies are np.inf
solver.init_population_random()
assert_equal(solver._nfev, 0)
assert_(np.all(np.isinf(solver.population_energies)))
solver.init_population_lhs()
assert_equal(solver._nfev, 0)
assert_(np.all(np.isinf(solver.population_energies)))
if __name__ == '__main__':
run_module_suite()
|
diorcety/translate | refs/heads/master | translate/lang/__init__.py | 4 | # -*- coding: utf-8 -*-
#
# Copyright 2007 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Classes that represent languages and provides language-specific information.
All classes inherit from the parent class called :class:`common`.
The type of data includes:
- Language codes
- Language name
- Plurals
- Punctuation transformation
- etc.
"""
|
unseenlaser/python-for-android | refs/heads/master | python-modules/twisted/twisted/web/iweb.py | 53 | # -*- test-case-name: twisted.web.test -*-
# Copyright (c) 2008-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Interface definitions for L{twisted.web}.
@var UNKNOWN_LENGTH: An opaque object which may be used as the value of
L{IBodyProducer.length} to indicate that the length of the entity
body is not known in advance.
"""
from zope.interface import Interface, Attribute
from twisted.internet.interfaces import IPushProducer
from twisted.cred.credentials import IUsernameDigestHash
class IRequest(Interface):
"""
An HTTP request.
@since: 9.0
"""
method = Attribute("A C{str} giving the HTTP method that was used.")
uri = Attribute(
"A C{str} giving the full encoded URI which was requested (including "
"query arguments).")
path = Attribute(
"A C{str} giving the encoded query path of the request URI.")
args = Attribute(
"A mapping of decoded query argument names as C{str} to "
"corresponding query argument values as C{list}s of C{str}. "
"For example, for a URI with C{'foo=bar&foo=baz&quux=spam'} "
"for its query part, C{args} will be C{{'foo': ['bar', 'baz'], "
"'quux': ['spam']}}.")
received_headers = Attribute(
"Backwards-compatibility access to C{requestHeaders}. Use "
"C{requestHeaders} instead. C{received_headers} behaves mostly "
"like a C{dict} and does not provide access to all header values.")
requestHeaders = Attribute(
"A L{http_headers.Headers} instance giving all received HTTP request "
"headers.")
headers = Attribute(
"Backwards-compatibility access to C{responseHeaders}. Use"
"C{responseHeaders} instead. C{headers} behaves mostly like a "
"C{dict} and does not provide access to all header values nor "
"does it allow multiple values for one header to be set.")
responseHeaders = Attribute(
"A L{http_headers.Headers} instance holding all HTTP response "
"headers to be sent.")
def getHeader(key):
"""
Get an HTTP request header.
@type key: C{str}
@param key: The name of the header to get the value of.
@rtype: C{str} or C{NoneType}
@return: The value of the specified header, or C{None} if that header
was not present in the request.
"""
def getCookie(key):
"""
Get a cookie that was sent from the network.
"""
def getAllHeaders():
"""
Return dictionary mapping the names of all received headers to the last
value received for each.
Since this method does not return all header information,
C{requestHeaders.getAllRawHeaders()} may be preferred.
"""
def getRequestHostname():
"""
Get the hostname that the user passed in to the request.
This will either use the Host: header (if it is available) or the
host we are listening on if the header is unavailable.
@returns: the requested hostname
@rtype: C{str}
"""
def getHost():
"""
Get my originally requesting transport's host.
@return: An L{IAddress}.
"""
def getClientIP():
"""
Return the IP address of the client who submitted this request.
@returns: the client IP address or C{None} if the request was submitted
over a transport where IP addresses do not make sense.
@rtype: C{str} or L{NoneType}
"""
def getClient():
"""
Return the hostname of the IP address of the client who submitted this
request, if possible.
This method is B{deprecated}. See L{getClientIP} instead.
@rtype: L{NoneType} or L{str}
@return: The canonical hostname of the client, as determined by
performing a name lookup on the IP address of the client.
"""
def getUser():
"""
Return the HTTP user sent with this request, if any.
If no user was supplied, return the empty string.
@returns: the HTTP user, if any
@rtype: C{str}
"""
def getPassword():
"""
Return the HTTP password sent with this request, if any.
If no password was supplied, return the empty string.
@returns: the HTTP password, if any
@rtype: C{str}
"""
def isSecure():
"""
Return True if this request is using a secure transport.
Normally this method returns True if this request's HTTPChannel
instance is using a transport that implements ISSLTransport.
This will also return True if setHost() has been called
with ssl=True.
@returns: True if this request is secure
@rtype: C{bool}
"""
def getSession(sessionInterface=None):
"""
Look up the session associated with this request or create a new one if
there is not one.
@return: The L{Session} instance identified by the session cookie in
the request, or the C{sessionInterface} component of that session
if C{sessionInterface} is specified.
"""
def URLPath():
"""
@return: A L{URLPath} instance which identifies the URL for which this
request is.
"""
def prePathURL():
"""
@return: At any time during resource traversal, a L{str} giving an
absolute URL to the most nested resource which has yet been
reached.
"""
def rememberRootURL():
"""
Remember the currently-processed part of the URL for later
recalling.
"""
def getRootURL():
"""
Get a previously-remembered URL.
"""
# Methods for outgoing response
def finish():
"""
Indicate that the response to this request is complete.
"""
def write(data):
"""
Write some data to the body of the response to this request. Response
headers are written the first time this method is called, after which
new response headers may not be added.
"""
def addCookie(k, v, expires=None, domain=None, path=None, max_age=None, comment=None, secure=None):
"""
Set an outgoing HTTP cookie.
In general, you should consider using sessions instead of cookies, see
L{twisted.web.server.Request.getSession} and the
L{twisted.web.server.Session} class for details.
"""
def setResponseCode(code, message=None):
"""
Set the HTTP response code.
"""
def setHeader(k, v):
"""
Set an HTTP response header. Overrides any previously set values for
this header.
@type name: C{str}
@param name: The name of the header for which to set the value.
@type value: C{str}
@param value: The value to set for the named header.
"""
def redirect(url):
"""
Utility function that does a redirect.
The request should have finish() called after this.
"""
def setLastModified(when):
"""
Set the C{Last-Modified} time for the response to this request.
If I am called more than once, I ignore attempts to set Last-Modified
earlier, only replacing the Last-Modified time if it is to a later
value.
If I am a conditional request, I may modify my response code to
L{NOT_MODIFIED} if appropriate for the time given.
@param when: The last time the resource being returned was modified, in
seconds since the epoch.
@type when: C{int}, C{long} or C{float}
@return: If I am a C{If-Modified-Since} conditional request and the
time given is not newer than the condition, I return
L{http.CACHED<CACHED>} to indicate that you should write no body.
Otherwise, I return a false value.
"""
def setETag(etag):
"""
Set an C{entity tag} for the outgoing response.
That's "entity tag" as in the HTTP/1.1 C{ETag} header, "used for
comparing two or more entities from the same requested resource."
If I am a conditional request, I may modify my response code to
L{NOT_MODIFIED} or L{PRECONDITION_FAILED}, if appropriate for the tag
given.
@param etag: The entity tag for the resource being returned.
@type etag: C{str}
@return: If I am a C{If-None-Match} conditional request and the tag
matches one in the request, I return L{http.CACHED<CACHED>} to
indicate that you should write no body. Otherwise, I return a
false value.
"""
def setHost(host, port, ssl=0):
"""
Change the host and port the request thinks it's using.
This method is useful for working with reverse HTTP proxies (e.g. both
Squid and Apache's mod_proxy can do this), when the address the HTTP
client is using is different than the one we're listening on.
For example, Apache may be listening on https://www.example.com, and
then forwarding requests to http://localhost:8080, but we don't want
HTML produced by Twisted to say 'http://localhost:8080', they should
say 'https://www.example.com', so we do::
request.setHost('www.example.com', 443, ssl=1)
"""
class ICredentialFactory(Interface):
"""
A credential factory defines a way to generate a particular kind of
authentication challenge and a way to interpret the responses to these
challenges. It creates L{ICredentials} providers from responses. These
objects will be used with L{twisted.cred} to authenticate an authorize
requests.
"""
scheme = Attribute(
"A C{str} giving the name of the authentication scheme with which "
"this factory is associated. For example, C{'basic'} or C{'digest'}.")
def getChallenge(request):
"""
Generate a new challenge to be sent to a client.
@type peer: L{twisted.web.http.Request}
@param peer: The request the response to which this challenge will be
included.
@rtype: C{dict}
@return: A mapping from C{str} challenge fields to associated C{str}
values.
"""
def decode(response, request):
"""
Create a credentials object from the given response.
@type response: C{str}
@param response: scheme specific response string
@type request: L{twisted.web.http.Request}
@param request: The request being processed (from which the response
was taken).
@raise twisted.cred.error.LoginFailed: If the response is invalid.
@rtype: L{twisted.cred.credentials.ICredentials} provider
@return: The credentials represented by the given response.
"""
class IBodyProducer(IPushProducer):
"""
Objects which provide L{IBodyProducer} write bytes to an object which
provides L{IConsumer} by calling its C{write} method repeatedly.
L{IBodyProducer} providers may start producing as soon as they have
an L{IConsumer} provider. That is, they should not wait for a
C{resumeProducing} call to begin writing data.
L{IConsumer.unregisterProducer} must not be called. Instead, the
L{Deferred} returned from C{startProducing} must be fired when all bytes
have been written.
L{IConsumer.write} may synchronously invoke any of C{pauseProducing},
C{resumeProducing}, or C{stopProducing}. These methods must be implemented
with this in mind.
@since: 9.0
"""
# Despite the restrictions above and the additional requirements of
# stopProducing documented below, this interface still needs to be an
# IPushProducer subclass. Providers of it will be passed to IConsumer
# providers which only know about IPushProducer and IPullProducer, not
# about this interface. This interface needs to remain close enough to one
# of those interfaces for consumers to work with it.
length = Attribute(
"""
C{length} is a C{int} indicating how many bytes in total this
L{IBodyProducer} will write to the consumer or L{UNKNOWN_LENGTH}
if this is not known in advance.
""")
def startProducing(consumer):
"""
Start producing to the given L{IConsumer} provider.
@return: A L{Deferred} which fires with C{None} when all bytes have
been produced or with a L{Failure} if there is any problem before
all bytes have been produced.
"""
def stopProducing():
"""
In addition to the standard behavior of L{IProducer.stopProducing}
(stop producing data), make sure the L{Deferred} returned by
C{startProducing} is never fired.
"""
UNKNOWN_LENGTH = u"twisted.web.iweb.UNKNOWN_LENGTH"
__all__ = [
"IUsernameDigestHash", "ICredentialFactory", "IRequest",
"IBodyProducer",
"UNKNOWN_LENGTH"]
|
yalcinozhabes/pythonJDFTx | refs/heads/master | setup.py | 1 | # adapted from http://wiki.cython.org/PackageHierarchy
from __future__ import print_function
import sys, os, shutil, site
import multiprocessing
import subprocess as sb
import tempfile as tmp
import mpi4py
from distutils.core import setup
from distutils.extension import Extension
from distutils import log
from Cython.Distutils import build_ext
from Cython.Build import cythonize
# Use parallel compilation on this number of cores.
nthreads = int(os.getenv('COMPILE_NTHREADS', multiprocessing.cpu_count() ))
isRoot = os.geteuid() == 0 # Do we have root privileges?
class inTempFolder:
"""Context manager for working in temporary folder.
Creates a temporary folder enters in it and removes it when done.
"""
def __enter__(self):
self.originalDir = os.getcwd()
self.tmpdir = tmp.mkdtemp()
os.chdir(self.tmpdir)
return self.tmpdir, self.originalDir
def __exit__(self, type, value, traceback):
shutil.rmtree(self.tmpdir)
os.chdir(self.originalDir)
def installJDFTx(isRoot=False, enableGPU=False):
"""Search and return the path to libjdfx.so and compile jdftx if not found.
Check user folder for jdftx/libjdftx.so. Unless jdftx library is found
compile the source code, copy the library to the user directory and
return the path to the library. If GPU is enabled, look for libjdftx_gpu.so
too.
"""
# is there a valid installation in user folders:
if os.path.exists(os.path.join(site.USER_BASE, "jdftx/libjdftx.so")):
if (not enableGPU) or \
os.path.exists(os.path.join(site.USER_BASE, "jdftx/libjdftx_gpu.so")):
return os.path.join(site.USER_BASE, "jdftx")
with inTempFolder() as (jdftxDir, pythonJDFTxDir):
log.info("JDFTx Compilation:")
log.info("Running cmake...")
jdftxCodeDir = os.path.join(pythonJDFTxDir, "jdftx")
if enableGPU:
sb.check_call(["cmake", "-D", "EnableCUDA=yes",
jdftxCodeDir])
else:
sb.check_call(["cmake", "-D", jdftxCodeDir])
log.info("Running make. This takes a few minutes.")
sb.check_call(["make", "-j%d" % nthreads])
if isRoot:
jdftxLibDir = "/usr/local/jdftx"
else:
jdftxLibDir = os.path.join(site.USER_BASE, "jdftx")
if not os.path.exists(jdftxLibDir):
os.mkdir(jdftxLibDir)
shutil.move("libjdftx.so", jdftxLibDir)
if enableGPU:
shutil.move("libjdftx_gpu.so", jdftxLibDir)
if not os.path.exists(os.path.join(site.USER_BASE, "jdftx/pseudopotentials")):
shutil.move("pseudopotentials", jdftxLibDir)
try:
os.symlink("/usr/local/jdftx/libjdftx.so",
"/usr/lib/libjdftx.so")
if enableGPU:
os.symlink("/usr/local/jdftx/libjdftx_gpu.so",
"/usr/lib/libjdftx_gpu.so")
return ""
except OSError:
return jdftxLibDir
def make_extension(ext_name, ext_libraries=(), is_directory=False, enableGPU=0):
try:
sb.check_call(["ld", "-ljdftx"], stderr=open("/dev/null"))
if enableGPU:
sb.check_call(["ld", "-ljdftx_gpu"])
jdftxLibDir = ""
except sb.CalledProcessError:
jdftxLibDir = installJDFTx(isRoot, enableGPU)
jdftxIncDirs = ["jdftx", ".", mpi4py.get_include()]
if enableGPU: # adds only the default directory for now
jdftxIncDirs.append("/usr/local/cuda/include")
ext_path = ext_name
if is_directory:
ext_path += ".__init__"
return Extension(
ext_name,
[ext_path.replace(".", os.path.sep) + ".pyx"],
include_dirs=jdftxIncDirs,
language="c++",
libraries=ext_libraries,
library_dirs=[jdftxLibDir],
runtime_library_dirs=[jdftxLibDir],
extra_compile_args=['-std=c++0x', '-O3', '-DMPI_ENABLED'] +
['-DGPU_ENABLED'] * enableGPU,
#depends=["jdftx/libjdftx.so"],
)
def writeTargetToSourceCode(target, baseFName="JDFTxCalcBase.pyx"):
targetFName = "JDFTxCalc"+target+".pyx"
with open(baseFName) as baseFile, open(targetFName, 'w') as t:
t.write("# Generated by the setup script from " + baseFName + "\n")
# Do not copy the first comment block that is specific to JDFTxCalcBase.pyx.
while True:
if baseFile.readline()[0] != '#': break
t.write(baseFile.read().format(TARGET = target))
def main():
enableGPU = (len(sys.argv) >= 2 and "--GPU" in sys.argv)
if enableGPU: sys.argv.pop(sys.argv.index("--GPU"))
extensions = []
if enableGPU:
writeTargetToSourceCode(target = 'GPU')
extensions.append(
make_extension("JDFTxCalcGPU", ["jdftx_gpu"], enableGPU=True,
),
)
writeTargetToSourceCode(target = 'CPU')
extensions.append(
make_extension("JDFTxCalcCPU", ["jdftx"]),
)
for e in extensions:
e.cython_directives = {"boundscheck": False,
"wraparound": False,
"infer_types": True}
mpiCompilers = mpi4py.get_config()
os.environ['CC'] = mpiCompilers['mpicc']
os.environ['CXX'] = mpiCompilers['mpicxx']
pyVersion = sys.version_info[0]
extensions = cythonize(extensions, nthreads=nthreads,
compiler_directives = {'language_level': pyVersion})
setup(**{
"name": "pythonJDFTx",
# "packages": [
# "core",
# "electronic",
# "includes",
# "fluid",
# ],
"py_modules":["ElectronicMinimize"],
"ext_modules": extensions,
"cmdclass": {'build_ext': build_ext},
})
if __name__ == "__main__":
main()
|
genomoncology/related | refs/heads/master | src/related/validators.py | 1 | from attr import attr, attributes
import re
@attributes(repr=False, slots=True)
class _CompositeValidator(object):
validators = attr()
def __call__(self, inst, attr, value):
for validator in self.validators:
validator(inst, attr, value)
def __repr__(self):
return (
"<composite validator for validators {!r}>".format(self.validators)
)
def composite(*validators):
"""A validator that executes each validator passed as arguments.
"""
return _CompositeValidator(validators)
@attributes(repr=False, slots=True)
class _RegexValidator(object):
regex = attr()
def __call__(self, inst, attr, value):
if not re.match(self.regex, value):
raise TypeError(
"'{name}' must match {regex!r} (got {value!r}).".format(
name=attr.name, regex=self.regex, value=value), attr,
self.regex, value)
def __repr__(self):
return (
"<regex validator for {!r}>".format(self.regex)
)
def regex(match_string):
"""A validator that executes each validator passed as arguments.
"""
return _RegexValidator(match_string)
|
bmwiedemann/linuxcnc-mirror | refs/heads/master | configs/sim/gmoccapy/gmoccapy_plasma/plasma.py | 7 | #!/usr/bin/env python
# -*- coding:UTF-8 -*-
"""
This file will control some options of the gmoccapy plasma screen
and demonstrats at the same time the possibilities you have introducing
your own handler files and functions to that screen, showing the
possibilities to modify the layout and behavior
Copyright 2013 Norbert Schechner
nieson@web.de
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import hal_glib # needed to make our own hal pins
import hal # needed to make our own hal pins
from gladevcp.persistence import IniFile # we use this one to save the states of the widgets on shut down and restart
from gladevcp.persistence import widget_defaults
from gladevcp.persistence import select_widgets
import gtk
from gmoccapy import preferences
from gmoccapy import getiniinfo
class PlasmaClass:
def __init__(self, halcomp, builder, useropts):
self.builder = builder
self.halcomp = halcomp
self.defaults = { IniFile.vars : { "thcspeedval" : 15.0 ,
"thcspeedmax" : 20.0 ,
"thcspeedmin" : 1.0 ,
"thcspeedincr" : 1.0 ,
"cutgapval" : 4.0 ,
"cutgapmax" : 10.0 ,
"cutgapmin" : 0.1 ,
"cutgapincr" : 0.1 ,
"g0gapval" : 45.0 ,
"g0gapmax" : 55.0 ,
"g0gapmin" : 0.5 ,
"g0gapincr" : 0.5 ,
"pierceutostart" : True ,
"piercegapval" : 5.0 ,
"piercegapmax" : 12.0 ,
"piercegapmin" : 2.0 ,
"piercegapincr" : 0.5 ,
"piercedelayval" : 0.5 ,
"piercedelaymax" : 10.0 ,
"piercedelaymin" : 0.01 ,
"piercedelayincr" : 0.01 ,
"enableheightlock" : False,
"chlthresholdval" : 60.0 ,
"chlthresholdmax" : 100.0,
"chlthresholdmin" : 10.0 ,
"chlthresholdincr" : 10.0 ,
"thctargetvoltval" : 100.0,
"thctargetvoltmax" : 255.0,
"thctargetvoltmin" : 55.0 ,
"thctargetvoltincr" : 5.0 ,
},
IniFile.widgets: widget_defaults(select_widgets([self.builder.get_object("hal-btn-THC"),
], hal_only = True, output_only = True)),
}
get_ini_info = getiniinfo.GetIniInfo()
prefs = preferences.preferences(get_ini_info.get_preference_file_path())
theme_name = prefs.getpref("gtk_theme", "Follow System Theme", str)
if theme_name == "Follow System Theme":
theme_name = gtk.settings_get_default().get_property("gtk-theme-name")
gtk.settings_get_default().set_string_property("gtk-theme-name", theme_name, "")
self.ini_filename = __name__ + ".var"
self.ini = IniFile(self.ini_filename, self.defaults, self.builder)
self.ini.restore_state(self)
# lets make our pins
self.THC_speed = hal_glib.GPin(halcomp.newpin("THC-Speed", hal.HAL_FLOAT, hal.HAL_OUT))
self.cut_gap = hal_glib.GPin(halcomp.newpin("Cut-Gap", hal.HAL_FLOAT, hal.HAL_OUT))
self.g0_gap = hal_glib.GPin(halcomp.newpin("G0-Gap", hal.HAL_FLOAT, hal.HAL_OUT))
self.pierce_deley = hal_glib.GPin(halcomp.newpin("Pierce-Delay", hal.HAL_FLOAT, hal.HAL_OUT))
self.pierce_gap = hal_glib.GPin(halcomp.newpin("Pierce-Gap", hal.HAL_FLOAT, hal.HAL_OUT))
self.target_voltage = hal_glib.GPin(halcomp.newpin("Target-Voltage", hal.HAL_FLOAT, hal.HAL_OUT))
# get all widgets and connect them
self.lbl_prog_volt = self.builder.get_object("lbl_prog_volt")
self.lbl_cut_speed = self.builder.get_object("lbl_cut_speed")
self.lbl_cut_gap = self.builder.get_object("lbl_cut_gap")
self.lbl_g0_gap = self.builder.get_object("lbl_g0_gap")
self.lbl_pierce_gap = self.builder.get_object("lbl_pierce_gap")
self.lbl_pierce_delay = self.builder.get_object("lbl_pierce_delay")
self.btn_THC_speed_minus = self.builder.get_object("btn_THC_speed_minus")
self.btn_THC_speed_minus.connect("pressed", self.on_btn_THC_speed_pressed, -1)
self.btn_THC_speed_plus = self.builder.get_object("btn_THC_speed_plus")
self.btn_THC_speed_plus.connect("pressed", self.on_btn_THC_speed_pressed, 1)
self.adj_THC_speed = self.builder.get_object("adj_THC_speed")
self.adj_THC_speed.connect("value_changed", self.on_adj_THC_speed_value_changed)
self.adj_THC_speed.upper = self.thcspeedmax
self.adj_THC_speed.lower = self.thcspeedmin
self.adj_THC_speed.set_value(self.thcspeedval)
self.tbl_cutting = self.builder.get_object("tbl_cutting")
self.tbl_cutting.connect("destroy", self._on_destroy)
self.btn_cut_gap_minus = self.builder.get_object("btn_cut_gap_minus")
self.btn_cut_gap_minus.connect("pressed", self.on_btn_cut_gap_pressed, -1)
self.btn_cut_gap_plus = self.builder.get_object("btn_cut_gap_plus")
self.btn_cut_gap_plus.connect("pressed", self.on_btn_cut_gap_pressed, 1)
self.adj_cut_gap = self.builder.get_object("adj_cut_gap")
self.adj_cut_gap.connect("value_changed", self.on_adj_cut_gap_value_changed)
self.adj_cut_gap.upper = self.cutgapmax
self.adj_cut_gap.lower = self.cutgapmin
self.adj_cut_gap.set_value(self.cutgapval)
self.btn_g0_minus = self.builder.get_object("btn_g0_minus")
self.btn_g0_minus.connect("pressed", self.on_btn_g0_pressed, -1)
self.btn_g0_plus = self.builder.get_object("btn_g0_plus")
self.btn_g0_plus.connect("pressed", self.on_btn_g0_pressed, 1)
self.adj_G0_gap = self.builder.get_object("adj_G0_gap")
self.adj_G0_gap.connect("value_changed", self.on_adj_G0_gap_value_changed)
self.adj_G0_gap.upper = self.g0gapmax
self.adj_G0_gap.lower = self.g0gapmin
self.adj_G0_gap.set_value(self.g0gapval)
self.Piercing_autostart = self.builder.get_object("Piercing-autostart")
self.Piercing_autostart.connect("toggled", self.on_Piercing_autostart_toggled)
self.Piercing_autostart.set_active(self.pierceutostart)
self.btn_pierce_gap_minus = self.builder.get_object("btn_pierce_gap_minus")
self.btn_pierce_gap_minus.connect("pressed", self.on_btn_pierce_gap_pressed, -1)
self.btn_pierce_gap_plus = self.builder.get_object("btn_pierce_gap_plus")
self.btn_pierce_gap_plus.connect("pressed", self.on_btn_pierce_gap_pressed, 1)
self.adj_pierce_gap = self.builder.get_object("adj_pierce_gap")
self.adj_pierce_gap.connect("value_changed", self.on_adj_pierce_gap_value_changed)
self.adj_pierce_gap.upper = self.piercegapmax
self.adj_pierce_gap.lower = self.piercegapmin
self.adj_pierce_gap.set_value(self.piercegapval)
self.btn_pierce_delay_minus = self.builder.get_object("btn_pierce_delay_minus")
self.btn_pierce_delay_minus.connect("pressed", self.on_btn_pierce_delay_pressed, -1)
self.btn_pierce_delay_plus = self.builder.get_object("btn_pierce_delay_plus")
self.btn_pierce_delay_plus.connect("pressed", self.on_btn_pierce_delay_pressed, 1)
self.adj_pierce_delay = self.builder.get_object("adj_pierce_delay")
self.adj_pierce_delay.connect("value_changed", self.on_adj_pierce_delay_value_changed)
self.adj_pierce_delay.upper = self.piercedelaymax
self.adj_pierce_delay.lower = self.piercedelaymin
self.adj_pierce_delay.set_value(self.piercedelayval)
self.enable_HeightLock = self.builder.get_object("enable-HeightLock")
self.enable_HeightLock.connect("toggled", self.on_enable_HeightLock_toggled)
self.enable_HeightLock.set_active(self.enableheightlock)
self.adj_CHL_threshold = self.builder.get_object("adj_CHL_threshold")
self.adj_CHL_threshold.connect("value_changed", self.on_adj_CHL_threshold_value_changed)
self.adj_CHL_threshold.upper = self.chlthresholdmax
self.adj_CHL_threshold.lower = self.chlthresholdmin
self.adj_CHL_threshold.set_value(self.chlthresholdval)
self.btn_THC_target_minus = self.builder.get_object("btn_THC_target_minus")
self.btn_THC_target_minus.connect("pressed", self.on_btn_THC_target_pressed, -1)
self.btn_THC_target_plus = self.builder.get_object("btn_THC_target_plus")
self.btn_THC_target_plus.connect("pressed", self.on_btn_THC_target_pressed, 1)
self.adj_THC_Voltage = self.builder.get_object("adj_THC_Voltage")
self.adj_THC_Voltage.connect("value_changed", self.on_adj_THC_Voltage_value_changed)
self.adj_THC_Voltage.upper = self.thctargetvoltmax
self.adj_THC_Voltage.lower = self.thctargetvoltmin
self.adj_THC_Voltage.set_value(self.thctargetvoltval)
def _on_destroy(self, obj, data = None):
self.ini.save_state(self)
# What to do on button pres events?
def on_btn_THC_speed_pressed(self, widget, dir):
increment = self.thcspeedincr * dir
self.thcspeedval = self.adj_THC_speed.get_value() + increment
self.adj_THC_speed.set_value(self.thcspeedval)
def on_btn_cut_gap_pressed(self, widget, dir):
increment = self.cutgapincr * dir
self.cutgapval = self.adj_cut_gap.get_value() + increment
self.adj_cut_gap.set_value(self.cutgapval)
def on_btn_g0_pressed(self, widget, dir):
increment = self.g0gapincr * dir
self.g0gapval = self.adj_G0_gap.get_value() + increment
self.adj_G0_gap.set_value(self.g0gapval)
def on_btn_pierce_gap_pressed(self, widget, dir):
increment = self.piercegapincr * dir
self.piercegapval = self.adj_pierce_gap.get_value() + increment
self.adj_pierce_gap.set_value(self.piercegapval)
def on_btn_pierce_delay_pressed(self, widget, dir):
increment = self.piercedelayincr * dir
self.piercedelayval = self.adj_pierce_delay.get_value() + increment
self.adj_pierce_delay.set_value(self.piercedelayval)
def on_btn_THC_target_pressed(self, widget, dir):
increment = self.thctargetvoltincr * dir
self.thctargetvoltval = self.adj_THC_Voltage.get_value() + increment
self.adj_THC_Voltage.set_value(self.thctargetvoltval)
# and the behavior of the adjustments to control max and min values
def on_adj_THC_speed_value_changed(self, widget, data = None):
if widget.get_value() >= widget.upper:
self.btn_THC_speed_plus.set_sensitive(False)
elif widget.get_value() <= widget.lower:
self.btn_THC_speed_minus.set_sensitive(False)
else:
self.btn_THC_speed_plus.set_sensitive(True)
self.btn_THC_speed_minus.set_sensitive(True)
self.halcomp["THC-Speed"] = widget.get_value()
self.lbl_cut_speed.set_label("%.1f" % (widget.get_value()))
def on_adj_cut_gap_value_changed(self, widget, data = None):
if widget.get_value() >= widget.upper:
self.btn_cut_gap_plus.set_sensitive(False)
elif widget.get_value() <= widget.lower:
self.btn_cut_gap_minus.set_sensitive(False)
else:
self.btn_cut_gap_plus.set_sensitive(True)
self.btn_cut_gap_minus.set_sensitive(True)
self.halcomp["Cut-Gap"] = widget.get_value()
self.lbl_cut_gap.set_label("%.3f" % (widget.get_value()))
def on_adj_G0_gap_value_changed(self, widget, data = None):
if widget.get_value() >= widget.upper:
self.btn_g0_plus.set_sensitive(False)
elif widget.get_value() <= widget.lower:
self.btn_g0_minus.set_sensitive(False)
else:
self.btn_g0_plus.set_sensitive(True)
self.btn_g0_minus.set_sensitive(True)
self.halcomp["G0-Gap"] = widget.get_value()
self.lbl_g0_gap.set_label("%.3f" % (widget.get_value()))
def on_adj_pierce_gap_value_changed(self, widget, data = None):
if widget.get_value() >= widget.upper:
self.btn_pierce_gap_plus.set_sensitive(False)
elif widget.get_value() <= widget.lower:
self.btn_pierce_gap_minus.set_sensitive(False)
else:
self.btn_pierce_gap_plus.set_sensitive(True)
self.btn_pierce_gap_minus.set_sensitive(True)
self.halcomp["Pierce-Gap"] = widget.get_value()
self.lbl_pierce_gap.set_label("%.3f" % (widget.get_value()))
def on_adj_pierce_delay_value_changed(self, widget, data = None):
if widget.get_value() >= widget.upper:
self.btn_pierce_delay_plus.set_sensitive(False)
elif widget.get_value() <= widget.lower:
self.btn_pierce_delay_minus.set_sensitive(False)
else:
self.btn_pierce_delay_plus.set_sensitive(True)
self.btn_pierce_delay_minus.set_sensitive(True)
self.halcomp["Pierce-Delay"] = widget.get_value()
self.lbl_pierce_delay.set_label("%.2f" % (widget.get_value()))
def on_adj_CHL_threshold_value_changed(self, widget, data = None):
self.chlthresholdval = widget.get_value()
def on_adj_THC_Voltage_value_changed(self, widget, data = None):
if widget.get_value() >= widget.upper:
self.btn_THC_target_plus.set_sensitive(False)
elif widget.get_value() <= widget.lower:
self.btn_THC_target_minus.set_sensitive(False)
else:
self.btn_THC_target_plus.set_sensitive(True)
self.btn_THC_target_minus.set_sensitive(True)
self.halcomp["Target-Voltage"] = widget.get_value()
self.lbl_prog_volt.set_label("%d" % (widget.get_value()))
def on_Piercing_autostart_toggled(self, widget, data = None):
self.pierceutostart = widget.get_active()
def on_enable_HeightLock_toggled(self, widget, data = None):
self.enableheightlock = widget.get_active()
def get_handlers(halcomp, builder, useropts):
return[PlasmaClass(halcomp, builder, useropts)]
|
magcius/sweettooth | refs/heads/master | sweettooth/ratings/views.py | 1 |
import json
from django.core.urlresolvers import reverse
from django.contrib import comments
from django.contrib.messages import info
from django.shortcuts import redirect
from django.utils.dateformat import format as format_date
from extensions import models
from decorators import ajax_view, model_view
from utils import gravatar_url
def comment_done(request):
pk = request.GET['c']
comment = comments.get_model().objects.get(pk=pk)
info(request, "Thank you for your comment")
return redirect(comment.get_content_object_url())
def comment_details(request, comment):
extension = comment.content_object
gravatar = gravatar_url(request, comment.email)
is_extension_creator = (comment.user == extension.creator)
details = dict(gravatar = gravatar,
is_extension_creator = is_extension_creator,
comment = comment.comment,
author = dict(username=comment.user.username,
url=reverse('auth-profile', kwargs=dict(user=comment.user.username))),
date = dict(timestamp = comment.submit_date.isoformat(),
standard = format_date(comment.submit_date, 'F j, Y')))
if comment.rating > -1:
details['rating'] = comment.rating
return details
@ajax_view
def get_comments(request):
extension = models.Extension.objects.get(pk=request.GET['pk'])
show_all = json.loads(request.GET.get('all', 'false'))
comment_list = comments.get_model().objects.for_model(extension)
comment_list = comment_list.order_by('-submit_date')
if not show_all:
comment_list = comment_list[:5]
return [comment_details(request, comment) for comment in comment_list]
|
luosch/leetcode | refs/heads/master | python/Minimum Path Sum.py | 1 | class Solution(object):
def minPathSum(self, grid):
m = len(grid)
n = len(grid[0])
dp = [[0 for _ in range(n)] for _ in range(m)]
dp[0][0] = grid[0][0]
for i in range(1, m):
dp[i][0] = dp[i - 1][0] + grid[i][0]
for i in range(1, n):
dp[0][i] = dp[0][i - 1] + grid[0][i]
for i in range(1, m):
for j in range(1, n):
dp[i][j] = min(dp[i - 1][j], dp[i][j - 1]) + grid[i][j]
return dp[-1][-1]
|
thecodinghub/news-for-good | refs/heads/master | news/Lib/site-packages/pip/_vendor/requests/packages/chardet/latin1prober.py | 1777 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe
from .compat import wrap_ord
FREQ_CAT_NUM = 4
UDF = 0 # undefined
OTH = 1 # other
ASC = 2 # ascii capital letter
ASS = 3 # ascii small letter
ACV = 4 # accent capital vowel
ACO = 5 # accent capital other
ASV = 6 # accent small vowel
ASO = 7 # accent small other
CLASS_NUM = 8 # total classes
Latin1_CharToClass = (
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
)
# 0 : illegal
# 1 : very unlikely
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
# UDF OTH ASC ASS ACV ACO ASV ASO
0, 0, 0, 0, 0, 0, 0, 0, # UDF
0, 3, 3, 3, 3, 3, 3, 3, # OTH
0, 3, 3, 3, 3, 3, 3, 3, # ASC
0, 3, 3, 3, 1, 1, 3, 3, # ASS
0, 3, 3, 3, 1, 2, 1, 2, # ACV
0, 3, 3, 3, 3, 3, 3, 3, # ACO
0, 3, 1, 3, 1, 1, 1, 3, # ASV
0, 3, 1, 3, 1, 1, 3, 3, # ASO
)
class Latin1Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self.reset()
def reset(self):
self._mLastCharClass = OTH
self._mFreqCounter = [0] * FREQ_CAT_NUM
CharSetProber.reset(self)
def get_charset_name(self):
return "windows-1252"
def feed(self, aBuf):
aBuf = self.filter_with_english_letters(aBuf)
for c in aBuf:
charClass = Latin1_CharToClass[wrap_ord(c)]
freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ charClass]
if freq == 0:
self._mState = eNotMe
break
self._mFreqCounter[freq] += 1
self._mLastCharClass = charClass
return self.get_state()
def get_confidence(self):
if self.get_state() == eNotMe:
return 0.01
total = sum(self._mFreqCounter)
if total < 0.01:
confidence = 0.0
else:
confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
/ total)
if confidence < 0.0:
confidence = 0.0
# lower the confidence of latin1 so that other more accurate
# detector can take priority.
confidence = confidence * 0.73
return confidence
|
marty331/jakesclock | refs/heads/master | flask/lib/python2.7/site-packages/wtforms/widgets/core.py | 93 | from __future__ import unicode_literals
try:
from html import escape
except ImportError:
from cgi import escape
from wtforms.compat import text_type, iteritems
__all__ = (
'CheckboxInput', 'FileInput', 'HiddenInput', 'ListWidget', 'PasswordInput',
'RadioInput', 'Select', 'SubmitInput', 'TableWidget', 'TextArea',
'TextInput', 'Option'
)
def html_params(**kwargs):
"""
Generate HTML attribute syntax from inputted keyword arguments.
The output value is sorted by the passed keys, to provide consistent output
each time this function is called with the same parameters. Because of the
frequent use of the normally reserved keywords `class` and `for`, suffixing
these with an underscore will allow them to be used.
In addition, the values ``True`` and ``False`` are special:
* ``attr=True`` generates the HTML compact output of a boolean attribute,
e.g. ``checked=True`` will generate simply ``checked``
* ``attr=`False`` will be ignored and generate no output.
>>> html_params(name='text1', id='f', class_='text')
'class="text" id="f" name="text1"'
>>> html_params(checked=True, readonly=False, name="text1", abc="hello")
'abc="hello" checked name="text1"'
"""
params = []
for k, v in sorted(iteritems(kwargs)):
if k in ('class_', 'class__', 'for_'):
k = k[:-1]
elif k.startswith('data_'):
k = k.replace('_', '-', 1)
if v is True:
params.append(k)
elif v is False:
pass
else:
params.append('%s="%s"' % (text_type(k), escape(text_type(v), quote=True)))
return ' '.join(params)
class HTMLString(text_type):
"""
This is an "HTML safe string" class that is returned by WTForms widgets.
For the most part, HTMLString acts like a normal unicode string, except
in that it has a `__html__` method. This method is invoked by a compatible
auto-escaping HTML framework to get the HTML-safe version of a string.
Usage::
HTMLString('<input type="text" value="hello">')
"""
def __html__(self):
"""
Give an HTML-safe string.
This method actually returns itself, because it's assumed that
whatever you give to HTMLString is a string with any unsafe values
already escaped. This lets auto-escaping template frameworks
know that this string is safe for HTML rendering.
"""
return self
class ListWidget(object):
"""
Renders a list of fields as a `ul` or `ol` list.
This is used for fields which encapsulate many inner fields as subfields.
The widget will try to iterate the field to get access to the subfields and
call them to render them.
If `prefix_label` is set, the subfield's label is printed before the field,
otherwise afterwards. The latter is useful for iterating radios or
checkboxes.
"""
def __init__(self, html_tag='ul', prefix_label=True):
assert html_tag in ('ol', 'ul')
self.html_tag = html_tag
self.prefix_label = prefix_label
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
html = ['<%s %s>' % (self.html_tag, html_params(**kwargs))]
for subfield in field:
if self.prefix_label:
html.append('<li>%s %s</li>' % (subfield.label, subfield()))
else:
html.append('<li>%s %s</li>' % (subfield(), subfield.label))
html.append('</%s>' % self.html_tag)
return HTMLString(''.join(html))
class TableWidget(object):
"""
Renders a list of fields as a set of table rows with th/td pairs.
If `with_table_tag` is True, then an enclosing <table> is placed around the
rows.
Hidden fields will not be displayed with a row, instead the field will be
pushed into a subsequent table row to ensure XHTML validity. Hidden fields
at the end of the field list will appear outside the table.
"""
def __init__(self, with_table_tag=True):
self.with_table_tag = with_table_tag
def __call__(self, field, **kwargs):
html = []
if self.with_table_tag:
kwargs.setdefault('id', field.id)
html.append('<table %s>' % html_params(**kwargs))
hidden = ''
for subfield in field:
if subfield.type == 'HiddenField':
hidden += text_type(subfield)
else:
html.append('<tr><th>%s</th><td>%s%s</td></tr>' % (text_type(subfield.label), hidden, text_type(subfield)))
hidden = ''
if self.with_table_tag:
html.append('</table>')
if hidden:
html.append(hidden)
return HTMLString(''.join(html))
class Input(object):
"""
Render a basic ``<input>`` field.
This is used as the basis for most of the other input fields.
By default, the `_value()` method will be called upon the associated field
to provide the ``value=`` HTML attribute.
"""
html_params = staticmethod(html_params)
def __init__(self, input_type=None):
if input_type is not None:
self.input_type = input_type
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
kwargs.setdefault('type', self.input_type)
if 'value' not in kwargs:
kwargs['value'] = field._value()
return HTMLString('<input %s>' % self.html_params(name=field.name, **kwargs))
class TextInput(Input):
"""
Render a single-line text input.
"""
input_type = 'text'
class PasswordInput(Input):
"""
Render a password input.
For security purposes, this field will not reproduce the value on a form
submit by default. To have the value filled in, set `hide_value` to
`False`.
"""
input_type = 'password'
def __init__(self, hide_value=True):
self.hide_value = hide_value
def __call__(self, field, **kwargs):
if self.hide_value:
kwargs['value'] = ''
return super(PasswordInput, self).__call__(field, **kwargs)
class HiddenInput(Input):
"""
Render a hidden input.
"""
input_type = 'hidden'
class CheckboxInput(Input):
"""
Render a checkbox.
The ``checked`` HTML attribute is set if the field's data is a non-false value.
"""
input_type = 'checkbox'
def __call__(self, field, **kwargs):
if getattr(field, 'checked', field.data):
kwargs['checked'] = True
return super(CheckboxInput, self).__call__(field, **kwargs)
class RadioInput(Input):
"""
Render a single radio button.
This widget is most commonly used in conjunction with ListWidget or some
other listing, as singular radio buttons are not very useful.
"""
input_type = 'radio'
def __call__(self, field, **kwargs):
if field.checked:
kwargs['checked'] = True
return super(RadioInput, self).__call__(field, **kwargs)
class FileInput(object):
"""
Renders a file input chooser field.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
return HTMLString('<input %s>' % html_params(name=field.name, type='file', **kwargs))
class SubmitInput(Input):
"""
Renders a submit button.
The field's label is used as the text of the submit button instead of the
data on the field.
"""
input_type = 'submit'
def __call__(self, field, **kwargs):
kwargs.setdefault('value', field.label.text)
return super(SubmitInput, self).__call__(field, **kwargs)
class TextArea(object):
"""
Renders a multi-line text area.
`rows` and `cols` ought to be passed as keyword args when rendering.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
return HTMLString('<textarea %s>%s</textarea>' % (
html_params(name=field.name, **kwargs),
escape(text_type(field._value()), quote=False)
))
class Select(object):
"""
Renders a select field.
If `multiple` is True, then the `size` property should be specified on
rendering to make the field useful.
The field must provide an `iter_choices()` method which the widget will
call on rendering; this method must yield tuples of
`(value, label, selected)`.
"""
def __init__(self, multiple=False):
self.multiple = multiple
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
if self.multiple:
kwargs['multiple'] = True
html = ['<select %s>' % html_params(name=field.name, **kwargs)]
for val, label, selected in field.iter_choices():
html.append(self.render_option(val, label, selected))
html.append('</select>')
return HTMLString(''.join(html))
@classmethod
def render_option(cls, value, label, selected, **kwargs):
if value is True:
# Handle the special case of a 'True' value.
value = text_type(value)
options = dict(kwargs, value=value)
if selected:
options['selected'] = True
return HTMLString('<option %s>%s</option>' % (html_params(**options), escape(text_type(label), quote=False)))
class Option(object):
"""
Renders the individual option from a select field.
This is just a convenience for various custom rendering situations, and an
option by itself does not constitute an entire field.
"""
def __call__(self, field, **kwargs):
return Select.render_option(field._value(), field.label.text, field.checked, **kwargs)
|
ponyorm/pony | refs/heads/orm | pony/orm/examples/compositekeys.py | 2 | from __future__ import absolute_import
from datetime import date
from pony.orm.core import *
db = Database('sqlite', 'complex.sqlite', create_db=True)
class Group(db.Entity):
dept = Required('Department')
year = Required(int)
spec = Required(int)
students = Set('Student')
courses = Set('Course')
lessons = Set('Lesson', columns=['building', 'number', 'dt'])
PrimaryKey(dept, year, spec)
class Department(db.Entity):
number = PrimaryKey(int)
faculty = Required('Faculty')
name = Required(str)
groups = Set(Group)
teachers = Set('Teacher')
class Faculty(db.Entity):
number = PrimaryKey(int)
name = Required(str)
depts = Set(Department)
class Student(db.Entity):
name = Required(str)
group = Required(Group)
dob = Optional(date)
grades = Set('Grade')
PrimaryKey(name, group)
class Grade(db.Entity):
student = Required(Student, columns=['student_name', 'dept', 'year', 'spec'])
task = Required('Task')
date = Required(date)
value = Required(int)
PrimaryKey(student, task)
class Task(db.Entity):
course = Required('Course')
type = Required(str)
number = Required(int)
descr = Optional(str)
grades = Set(Grade)
PrimaryKey(course, type, number)
class Course(db.Entity):
subject = Required('Subject')
semester = Required(int)
groups = Set(Group)
tasks = Set(Task)
lessons = Set('Lesson')
teachers = Set('Teacher')
PrimaryKey(subject, semester)
class Subject(db.Entity):
name = PrimaryKey(str)
descr = Optional(str)
courses = Set(Course)
class Room(db.Entity):
building = Required(str)
number = Required(str)
floor = Optional(int)
schedules = Set('Lesson')
PrimaryKey(building, number)
class Teacher(db.Entity):
dept = Required(Department)
name = Required(str)
courses = Set(Course)
lessons = Set('Lesson')
class Lesson(db.Entity):
_table_ = 'Schedule'
groups = Set(Group)
course = Required(Course)
room = Required(Room)
teacher = Required(Teacher)
date = Required(date)
PrimaryKey(room, date)
composite_key(teacher, date)
db.generate_mapping(create_tables=True)
def test_queries():
select(grade for grade in Grade if grade.task.type == 'Lab')[:]
select(grade for grade in Grade if grade.task.descr.startswith('Intermediate'))[:]
select(grade for grade in Grade if grade.task.course.semester == 2)[:]
select(grade for grade in Grade if grade.task.course.subject.name == 'Math')[:]
select(grade for grade in Grade if 'elementary' in grade.task.course.subject.descr.lower())[:]
select(grade for grade in Grade if 'elementary' in grade.task.course.subject.descr.lower() and grade.task.descr.startswith('Intermediate'))[:]
select(grade for grade in Grade if grade.task.descr.startswith('Intermediate') and 'elementary' in grade.task.course.subject.descr.lower())[:]
select(s for s in Student if s.group.dept.faculty.name == 'Abc')[:]
select(g for g in Group if avg(g.students.grades.value) > 4)[:]
select(g for g in Group if avg(g.students.grades.value) > 4 and max(g.students.grades.date) < date(2011, 3, 2))[:]
select(g for g in Group if '4-A' in g.lessons.room.number)[:]
select(g for g in Group if 1 in g.lessons.room.floor)[:]
select(t for t in Teacher if t not in t.courses.groups.lessons.teacher)[:]
sql_debug(True)
|
Mega-DatA-Lab/mxnet | refs/heads/master | tools/bandwidth/test_measure.py | 46 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
test measure.py
"""
from measure import run
import subprocess
import logging
def get_gpus():
try:
re = subprocess.check_output(["nvidia-smi", "-L"], universal_newlines=True)
except OSError:
return ''
gpus = [i for i in re.split('\n') if 'GPU' in i]
return ','.join([str(i) for i in range(len(gpus))])
def test_measure(**kwargs):
logging.info(kwargs)
res = run(image_shape='3,224,224', num_classes=1000,
num_layers=50, disp_batches=2, num_batches=2, test_results=1, **kwargs)
assert len(res) == 1
assert res[0].error < 1e-4
if __name__ == '__main__':
gpus = get_gpus()
assert gpus is not ''
test_measure(gpus=gpus, network='alexnet', optimizer=None, kv_store='device')
test_measure(gpus=gpus, network='resnet', optimizer='sgd', kv_store='device')
test_measure(gpus=gpus, network='inception-bn', optimizer=None, kv_store='local')
test_measure(gpus=gpus, network='resnet', optimizer=None, kv_store='local')
test_measure(gpus=gpus, network='resnet', optimizer='sgd', kv_store='local')
|
eaplatanios/tensorflow | refs/heads/master | tensorflow/python/framework/op_def_registry.py | 196 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Global registry for OpDefs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import op_def_pb2
_registered_ops = {}
def register_op_list(op_list):
"""Register all the ops in an op_def_pb2.OpList."""
if not isinstance(op_list, op_def_pb2.OpList):
raise TypeError("%s is %s, not an op_def_pb2.OpList" %
(op_list, type(op_list)))
for op_def in op_list.op:
if op_def.name in _registered_ops:
assert _registered_ops[op_def.name] == op_def
else:
_registered_ops[op_def.name] = op_def
def get_registered_ops():
"""Returns a dictionary mapping names to OpDefs."""
return _registered_ops
|
metglobal/django-exchange | refs/heads/master | exchange/__init__.py | 1 | __version__ = '0.8.0'
__author__ = 'Metglobal'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013 Metglobal'
|
Huskerboy/startbootstrap-freelancer | refs/heads/master | freelancer_env/Lib/site-packages/gunicorn/workers/gaiohttp.py | 101 | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import sys
if sys.version_info >= (3, 3):
try:
import aiohttp # NOQA
except ImportError:
raise RuntimeError("You need aiohttp installed to use this worker.")
else:
from gunicorn.workers._gaiohttp import AiohttpWorker
__all__ = ['AiohttpWorker']
else:
raise RuntimeError("You need Python >= 3.3 to use the asyncio worker")
|
jagguli/intellij-community | refs/heads/master | python/lib/Lib/site-packages/django/conf/locale/sr/formats.py | 655 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
'%Y-%m-%d', # '2006-10-25'
# '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.'
# '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.'
# '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
kernc/networkx | refs/heads/master | networkx/algorithms/centrality/__init__.py | 45 | from .betweenness import *
from .betweenness_subset import *
from .closeness import *
from .communicability_alg import *
from .current_flow_closeness import *
from .current_flow_betweenness import *
from .current_flow_betweenness_subset import *
from .degree_alg import *
from .dispersion import *
from .eigenvector import *
from .harmonic import *
from .katz import *
from .load import *
|
xcu/conference-central | refs/heads/master | Lesson_2/001_Hello_Endpoints_Solutions/helloworld_api.py | 48 | """Hello World API implemented using Google Cloud Endpoints.
Contains declarations of endpoint, endpoint methods,
as well as the ProtoRPC message class and container required
for endpoint method definition.
"""
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
# If the request contains path or querystring arguments,
# you cannot use a simple Message class.
# Instead, you must use a ResourceContainer class
REQUEST_CONTAINER = endpoints.ResourceContainer(
message_types.VoidMessage,
name=messages.StringField(1),
)
REQUEST_GREETING_CONTAINER = endpoints.ResourceContainer(
period=messages.StringField(1),
name=messages.StringField(2),
)
package = 'Hello'
class Hello(messages.Message):
"""String that stores a message."""
greeting = messages.StringField(1)
@endpoints.api(name='helloworldendpoints', version='v1')
class HelloWorldApi(remote.Service):
"""Helloworld API v1."""
@endpoints.method(message_types.VoidMessage, Hello,
path = "sayHello", http_method='GET', name = "sayHello")
def say_hello(self, request):
return Hello(greeting="Hello World")
@endpoints.method(REQUEST_CONTAINER, Hello,
path = "sayHelloByName", http_method='GET', name = "sayHelloByName")
def say_hello_by_name(self, request):
greet = "Hello {}".format(request.name)
return Hello(greeting=greet)
@endpoints.method(REQUEST_GREETING_CONTAINER, Hello,
path = "greetByPeriod", http_method='GET', name = "greetByPeriod")
def greet_by_period(self, request):
greet = "Good {} {}".format(request.period, request.name)
return Hello(greeting=greet)
APPLICATION = endpoints.api_server([HelloWorldApi])
|
wpgallih/servo | refs/heads/master | tests/wpt/web-platform-tests/XMLHttpRequest/resources/conditional.py | 205 | def main(request, response):
tag = request.GET.first("tag", None)
match = request.headers.get("If-None-Match", None)
date = request.GET.first("date", "")
modified = request.headers.get("If-Modified-Since", None)
if tag:
response.headers.set("ETag", '"%s"' % tag)
elif date:
response.headers.set("Last-Modified", date)
if ((match is not None and match == tag) or
(modified is not None and modified == date)):
response.status = (304, "SUPERCOOL")
return ""
else:
response.headers.set("Content-Type", "text/plain")
return "MAYBE NOT"
|
KitKatXperience/platform_external_chromium_org | refs/heads/kk | tools/PRESUBMIT.py | 35 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for bisect trybot.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
import imp
def _ExamineBisectConfigFile(input_api, output_api):
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith('run-bisect-perf-regression.cfg'):
continue
try:
cfg_file = imp.load_source('config', 'run-bisect-perf-regression.cfg')
for k, v in cfg_file.config.iteritems():
if v:
return f.LocalPath()
except (IOError, AttributeError, TypeError):
return f.LocalPath()
return None
def _CheckNoChangesToBisectConfigFile(input_api, output_api):
results = _ExamineBisectConfigFile(input_api, output_api)
if results:
return [output_api.PresubmitError(
'The bisection config file should only contain a config dict with '
'empty fields. Changes to this file should never be submitted.',
items=[results])]
return []
def CommonChecks(input_api, output_api):
results = []
results.extend(_CheckNoChangesToBisectConfigFile(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
|
kenlist/chromium_tools_gyp | refs/heads/master | test/rules/src/copy-file.py | 600 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = open(sys.argv[1], 'r').read()
open(sys.argv[2], 'wb').write(contents)
sys.exit(0)
|
dfang/odoo | refs/heads/10.0 | addons/hw_posbox_upgrade/__manifest__.py | 22 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'PosBox Software Upgrader',
'category': 'Point of Sale',
'website': 'https://www.odoo.com/page/point-of-sale',
'sequence': 6,
'summary': 'Allows to remotely upgrade the PosBox software',
'description': """
PosBox Software Upgrader
========================
This module allows to remotely upgrade the PosBox software to a
new version. This module is specific to the PosBox setup and environment
and should not be installed on regular openerp servers.
""",
'depends': ['hw_proxy'],
'installable': False,
}
|
PyCQA/pylint | refs/heads/main | pylint/testutils/lint_module_test.py | 1 | # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/LICENSE
import csv
import operator
import platform
import sys
from collections import Counter
from io import StringIO
from typing import Dict, List, Optional, Tuple
import pytest
from _pytest.config import Config
from pylint import checkers
from pylint.lint import PyLinter
from pylint.testutils.constants import _EXPECTED_RE, _OPERATORS, UPDATE_OPTION
from pylint.testutils.functional_test_file import (
FunctionalTestFile,
NoFileError,
parse_python_version,
)
from pylint.testutils.output_line import OutputLine
from pylint.testutils.reporter_for_tests import FunctionalTestReporter
from pylint.utils import utils
class LintModuleTest:
maxDiff = None
def __init__(self, test_file: FunctionalTestFile, config: Optional[Config] = None):
_test_reporter = FunctionalTestReporter()
self._linter = PyLinter()
self._linter.set_reporter(_test_reporter)
self._linter.config.persistent = 0
checkers.initialize(self._linter)
self._linter.disable("suppressed-message")
self._linter.disable("locally-disabled")
self._linter.disable("useless-suppression")
try:
self._linter.read_config_file(test_file.option_file)
if self._linter.cfgfile_parser.has_option("MASTER", "load-plugins"):
plugins = utils._splitstrip(
self._linter.cfgfile_parser.get("MASTER", "load-plugins")
)
self._linter.load_plugin_modules(plugins)
self._linter.load_config_file()
except NoFileError:
pass
self._test_file = test_file
self._config = config
def setUp(self):
if self._should_be_skipped_due_to_version():
pytest.skip(
"Test cannot run with Python %s."
% sys.version.split(" ", maxsplit=1)[0]
)
missing = []
for requirement in self._test_file.options["requires"]:
try:
__import__(requirement)
except ImportError:
missing.append(requirement)
if missing:
pytest.skip("Requires %s to be present." % ",".join(missing))
except_implementations = self._test_file.options["except_implementations"]
if except_implementations:
implementations = [i.strip() for i in except_implementations.split(",")]
if platform.python_implementation() in implementations:
msg = "Test cannot run with Python implementation %r"
pytest.skip(msg % platform.python_implementation())
excluded_platforms = self._test_file.options["exclude_platforms"]
if excluded_platforms:
platforms = [p.strip() for p in excluded_platforms.split(",")]
if sys.platform.lower() in platforms:
pytest.skip("Test cannot run on platform %r" % sys.platform)
def runTest(self):
self._runTest()
def _should_be_skipped_due_to_version(self):
return (
sys.version_info < self._test_file.options["min_pyver"]
or sys.version_info > self._test_file.options["max_pyver"]
)
def __str__(self):
return f"{self._test_file.base} ({self.__class__.__module__}.{self.__class__.__name__})"
@staticmethod
def get_expected_messages(stream):
"""Parses a file and get expected messages.
:param stream: File-like input stream.
:type stream: enumerable
:returns: A dict mapping line,msg-symbol tuples to the count on this line.
:rtype: dict
"""
messages = Counter()
for i, line in enumerate(stream):
match = _EXPECTED_RE.search(line)
if match is None:
continue
line = match.group("line")
if line is None:
line = i + 1
elif line.startswith("+") or line.startswith("-"):
line = i + 1 + int(line)
else:
line = int(line)
version = match.group("version")
op = match.group("op")
if version:
required = parse_python_version(version)
if not _OPERATORS[op](sys.version_info, required):
continue
for msg_id in match.group("msgs").split(","):
messages[line, msg_id.strip()] += 1
return messages
@staticmethod
def multiset_difference(
expected_entries: Counter, actual_entries: Counter
) -> Tuple[Counter, Dict[str, int]]:
"""Takes two multisets and compares them.
A multiset is a dict with the cardinality of the key as the value."""
missing = expected_entries.copy()
missing.subtract(actual_entries)
unexpected = {}
for key, value in list(missing.items()):
if value <= 0:
missing.pop(key)
if value < 0:
unexpected[key] = -value
return missing, unexpected
# pylint: disable=consider-using-with
def _open_expected_file(self):
try:
return open(self._test_file.expected_output)
except FileNotFoundError:
return StringIO("")
# pylint: disable=consider-using-with
def _open_source_file(self):
if self._test_file.base == "invalid_encoded_data":
return open(self._test_file.source)
if "latin1" in self._test_file.base:
return open(self._test_file.source, encoding="latin1")
return open(self._test_file.source, encoding="utf8")
def _get_expected(self):
with self._open_source_file() as f:
expected_msgs = self.get_expected_messages(f)
if not expected_msgs:
return Counter(), []
with self._open_expected_file() as f:
expected_output_lines = [
OutputLine.from_csv(row) for row in csv.reader(f, "test")
]
return expected_msgs, expected_output_lines
def _get_actual(self):
messages = self._linter.reporter.messages
messages.sort(key=lambda m: (m.line, m.symbol, m.msg))
received_msgs = Counter()
received_output_lines = []
for msg in messages:
assert (
msg.symbol != "fatal"
), f"Pylint analysis failed because of '{msg.msg}'"
received_msgs[msg.line, msg.symbol] += 1
received_output_lines.append(OutputLine.from_msg(msg))
return received_msgs, received_output_lines
def _runTest(self):
__tracebackhide__ = True # pylint: disable=unused-variable
modules_to_check = [self._test_file.source]
self._linter.check(modules_to_check)
expected_messages, expected_output = self._get_expected()
actual_messages, actual_output = self._get_actual()
assert (
expected_messages == actual_messages
), self.error_msg_for_unequal_messages(
actual_messages, expected_messages, actual_output
)
self._check_output_text(expected_messages, expected_output, actual_output)
def error_msg_for_unequal_messages(
self, actual_messages, expected_messages, actual_output: List[OutputLine]
):
msg = ['Wrong results for file "%s":' % (self._test_file.base)]
missing, unexpected = self.multiset_difference(
expected_messages, actual_messages
)
if missing:
msg.append("\nExpected in testdata:")
msg.extend(" %3d: %s" % msg for msg in sorted(missing))
if unexpected:
msg.append("\nUnexpected in testdata:")
msg.extend(" %3d: %s" % msg for msg in sorted(unexpected)) # type: ignore
error_msg = "\n".join(msg)
if self._config and self._config.getoption("verbose") > 0:
error_msg += "\n\nActual pylint output for this file:\n"
error_msg += "\n".join(str(o) for o in actual_output)
return error_msg
def error_msg_for_unequal_output(self, expected_lines, received_lines) -> str:
missing = set(expected_lines) - set(received_lines)
unexpected = set(received_lines) - set(expected_lines)
error_msg = (
f"Wrong output for '{self._test_file.base}.txt':\n"
"You can update the expected output automatically with: '"
f"python tests/test_functional.py {UPDATE_OPTION} -k "
f'"test_functional[{self._test_file.base}]"\'\n\n'
)
sort_by_line_number = operator.attrgetter("lineno")
if missing:
error_msg += "\n- Missing lines:\n"
for line in sorted(missing, key=sort_by_line_number):
error_msg += f"{line}\n"
if unexpected:
error_msg += "\n- Unexpected lines:\n"
for line in sorted(unexpected, key=sort_by_line_number):
error_msg += f"{line}\n"
return error_msg
def _check_output_text(self, _, expected_output, actual_output):
"""This is a function because we want to be able to update the text in LintModuleOutputUpdate"""
assert expected_output == actual_output, self.error_msg_for_unequal_output(
expected_output, actual_output
)
|
walnutgeek/slinck | refs/heads/master | scent.py | 1 | import os
from sniffer.api import *
@file_validator
def js_files(filename):
return filename.endswith('.js') or filename.endswith('.yaml') or filename.endswith('.json')
@runnable
def execute_mocha(*args):
import nose
if 0 == os.system('cd mocha ; mocha .'):
vars = { 's' : 'slinck.js',
'd' : '~/git/OnTimer/ontimer/web' };
os.system( 'rm -f {d}/{s}; cp app/{s} {d}/; chmod 444 {d}/{s}'.format(**vars) );
return True
return False
|
felliott/osf.io | refs/heads/develop | api_tests/providers/registrations/views/test_registration_provider_licenses.py | 14 | import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import RegistrationProviderFactory
from api_tests.providers.mixins import ProviderLicensesViewTestBaseMixin
class TestRegistrationProviderLicenses(ProviderLicensesViewTestBaseMixin):
provider_class = RegistrationProviderFactory
@pytest.fixture()
def url(self, provider):
return '/{}providers/registrations/{}/licenses/'.format(
API_BASE, provider._id)
|
isida/4 | refs/heads/master | plugins/redirect.py | 2 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------- #
# #
# Plugin for iSida Jabber Bot #
# Copyright (C) diSabler <dsy@dsy.name> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
# --------------------------------------------------------------------------- #
def to_private(type, room, nick, text): raw_redirect('chat', room, nick, text)
def to_public(type, room, nick, text): raw_redirect('groupchat', room, nick, text)
def raw_redirect(type, room, nick, text):
access_mode,jid = get_level(room,nick)
com_parser(access_mode, get_xnick(room), type, room, nick, text, jid)
global execute
execute = [(3, 'private', to_private, 2, 'Redirect command output in private.'),
(3, 'public', to_public, 2, 'Redirect command output in groupchat.')]
|
ProyectosVariados/NMFdocuments | refs/heads/master | auxiliar.py | 1 | #! /usr/bin/python
# -*- coding: utf-8 -*-
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from cStringIO import StringIO
import numpy as np
import os
import unicodedata
def convertir_pdf_a_txt(path):
'''
convierte a txt documentos en pdf
INPUT path example 'documento_prueba.pdf' or '/home/username/documents/documento_prueba.pdf'
OUTPUT String
'''
rsrcmgr = PDFResourceManager()
retstr = StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)
fp = file(path, 'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
password = ""
maxpages = 0
caching = True
pagenos=set()
for pagina in PDFPage.get_pages(fp, pagenos, maxpages=maxpages, password=password,caching=caching, check_extractable=True):
interpreter.process_page(pagina)
texto = retstr.getvalue()
fp.close()
device.close()
retstr.close()
return texto
def contar_palabras(diccionario, texto):
'''
regresa una lista con el numero de ocurrencias de cada palabra dentro de diccionario en determinado texto
'''
ocurrencias = []
for i in range(len(diccionario)):
for j in range(len(diccionario[i])):
ocurrencias.append(texto.count(diccionario[i][j]))
return ocurrencias
def dar_formato(texto):
'''
regresa le texto en minusculas y sin letras acentuadas
'''
texto = texto.decode('utf-8')
texto = ''.join((i for i in unicodedata.normalize('NFD', texto) if unicodedata.category(i) != 'Mn'))
texto = texto.lower()
return texto
def sacar_documentos(path, categorias):
'''
Busca en cada carpeta de categorias dentro de nuestro directorio en busca de pdfs
'''
documentos = [[] for i in categorias]
for i in range(len(categorias)):
directorio = os.walk(path+'/'+categorias[i])
for root, dirs, files in directorio:
for fichero in files:
(nombreFichero, extension) = os.path.splitext(fichero)
if(extension == ".pdf"):
documentos[i].append(nombreFichero+extension)
print "Documentos de categoria %s extraidos satisfactoriamente" % (i)
return documentos
def generar_matriz_V(path, diccionario, documentos, categorias):
matriz = []
for i in range(len(documentos)):
print "Comenzando con los documentos de %s" % (categorias[i-1])
for j in range(1,len(documentos[i])+1):
texto = convertir_pdf_a_txt('/'.join([path,categorias[i],documentos[i][j-1]]))
texto = dar_formato(texto)
matriz.append(contar_palabras(diccionario, texto))
print "Convertido y contado documento %i de %i" % (len(documentos[i])*i+j,len(documentos)*len(documentos[i]))
return np.matrix(matriz).T
def hacer_matriz_lista(H):
'''convierte un objeto del tipo matrix de numpy en una lista de una sola dimension'''
H_lista = []
for i in range(np.shape(H)[1]):
H_lista.append([])
for j in range(np.shape(H)[0]):
H_lista[i].append(H.item((j,i)))
return H_lista
def comparar(Hp, He, eti, compa='euc'):
'''
INPUT
Objeto Matrix de numpy
OUTPUT
Objeto list
'''
#comenzamos creando una lista con una lista por cada una documento a clasificar
distancias = [[] for i in range(len(Hp))]
#llenamos la distancia entre cada documento a clasificar, con todas las imagenes de entrenamiento
for i in range(len(Hp)):
for j in range(len(He)):
if compa == 'euc':
distancias[i].append(d_euclidean(Hp[i], He[j]))
elif compa == 'man':
distancias[i].append(d_manhattan(Hp[i], He[j]))
elif compa == 'min':
distancias[i].append(d_minkowski(Hp[i], He[j],20))
elif compa == 'max':
distancias[i].append(d_maxDist(Hp[i], He[j]))
#creamos otra lista que contiene una lista por cada imagen a clasificar
etiquetas_obtenidas = [[] for i in range(len(Hp))]
#llenamos el arreglo con cada una de los indices de las etiquetas de las k imagenes mas cercanas
k = 11
for i in range(len(distancias)):
buffere = 0
while buffere <= k:
menor_indice = 0
for j in range(len(distancias[i])):
if distancias[i][j] < distancias[i][menor_indice] and distancias[i][j] >= 0:
menor_indice = j
distancias[i][menor_indice] = -1
etiquetas_obtenidas[i].append(eti[menor_indice])
buffere+=1
etiquetas_obtenidas_bueno = [moda(etiquetas_obtenidas[i]) for i in range(len(etiquetas_obtenidas))]
return etiquetas_obtenidas_bueno
def sacar_mal(t,p):
'''compara las dos listas de etiquetas, y regresa un entero con el numero de bien etiquetados en todo el conjunto'''
bien_etiquetado = 0
for i in range(len(t)):
if t[i] == p[i]:
bien_etiquetado += 1
return bien_etiquetado
def moda(lista):
'''regresa la moda de una lista'''
rep = 0
cont = 0
moda = -1
lista.sort()
for i in range(len(lista)-1):
if lista[i] == lista[i+1]:
cont+=1
if cont>=rep:
rep=cont
moda=lista[i]
else:
cont = 0
return moda
def d_euclidean(v1,v2):
suma = 0
for i in range(len(v1)):
suma += ((v1[i]-v2[i])**2)
return np.sqrt(suma)
def d_manhattan(v1,v2):
suma = 0
for i in range(len(v1)):
suma += abs(v1[i]-v2[i])
return suma
def d_minkowski(v1,v2,r):
suma = 0
for i in range(len(v1)):
suma += (abs(v1[i]-v2[i])**r)
return pow(suma, 1.0/r)
def d_maxDist(v1,v2):
valor_maximo = abs(v1[0]-v2[0])
for i in range(1,len(v1)):
if valor_maximo < abs(v1[i]-v2[i]):
valor_maximo = abs(v1[i]-v2[i])
return valor_maximo
|
OpenTreeOfLife/germinator | refs/heads/master | trees_report/taxa_in_synthesis.py | 1 | # Some class and method names borrowed from peyotl/nexson_proxy.py
import sys, os
from org.opentreeoflife.taxa import Taxonomy
def write_ids(tree, dest):
with open(dest, 'w') as outfile:
for node in tree.taxa():
if node.id != None:
outfile.write('%s\n' % node.id)
def load_tree(path):
tree = Taxonomy.getTaxonomy(path, 'ott')
count = 0
for id in tree.allIds():
count += 1
print count, 'ids'
return tree
write_ids(load_tree(sys.argv[1]), sys.argv[2])
|
miloszz/DIRAC | refs/heads/integration | FrameworkSystem/private/monitoring/__init__.py | 38 | # $HeadURL$
__RCSID__ = "$Id$" |
fullmooninu/messy | refs/heads/master | roundrobin2.py | 1 | '''round robin tournament matches generator'''
import random
# demo code
teams = ["Team1", "Team2", "Team3", "Team4", "Team5"]
def fixtures(teams):
if len(teams) % 2:
teams.append('Day off') # if team number is odd - use 'day off' as fake team
rotation = list(teams) # copy the list
fixtures = []
for i in range(0, len(teams)-1):
fixtures.append(rotation)
rotation = [rotation[0]] + [rotation[-1]] + rotation[1:-1]
return fixtures
fixtures1 = fixtures(teams)
random.shuffle(fixtures1)
print('fixtures1')
print(fixtures1)
fixtures2 = []
for turn in fixtures1:
newTurn = []
while len(turn) > 0:
newTurn.append([turn.pop(0),turn.pop(-1)])
fixtures2.append(newTurn)
print('fixtures2')
print(fixtures2)
def printout(team):
for turn in fixtures2:
for match in turn:
if team == match[0]:
print(match)
break
if team == match[1]:
print(match[::-1])
break
for i in range(len(teams)):
print(teams[i])
printout(teams[i])
|
zhimingxie/grpc | refs/heads/master | tools/distrib/python/submit.py | 14 | #!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import shutil
import subprocess
parser = argparse.ArgumentParser(
description='Submit the package to a PyPI repository.')
parser.add_argument(
'--repository', '-r', metavar='r', type=str, default='pypi',
help='The repository to push the package to. '
'Ensure the value appears in your .pypirc file. '
'Defaults to "pypi".'
)
parser.add_argument(
'--identity', '-i', metavar='i', type=str,
help='GPG identity to sign the files with.'
)
parser.add_argument(
'--username', '-u', metavar='u', type=str,
help='Username to authenticate with the repository. Not needed if you have '
'configured your .pypirc to include your username.'
)
parser.add_argument(
'--password', '-p', metavar='p', type=str,
help='Password to authenticate with the repository. Not needed if you have '
'configured your .pypirc to include your password.'
)
parser.add_argument(
'--bdist', '-b', action='store_true',
help='Generate a binary distribution (wheel) for the current OS.'
)
parser.add_argument(
'--dist-args', type=str,
help='Additional arguments to pass to the *dist setup.py command.'
)
args = parser.parse_args()
# Move to the root directory of Python GRPC.
pkgdir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../../../')
# Remove previous distributions; they somehow confuse twine.
try:
shutil.rmtree(os.path.join(pkgdir, 'dist/'))
except:
pass
# Build the Cython C files
build_env = os.environ.copy()
build_env['GRPC_PYTHON_BUILD_WITH_CYTHON'] = "1"
cmd = ['python', 'setup.py', 'build_ext', '--inplace']
subprocess.call(cmd, cwd=pkgdir, env=build_env)
# Make the push.
if args.bdist:
cmd = ['python', 'setup.py', 'bdist_wheel']
else:
cmd = ['python', 'setup.py', 'sdist']
if args.dist_args:
cmd += args.dist_args.split()
subprocess.call(cmd, cwd=pkgdir)
cmd = ['twine', 'upload', '-r', args.repository]
if args.identity is not None:
cmd.extend(['-i', args.identity])
if args.username is not None:
cmd.extend(['-u', args.username])
if args.password is not None:
cmd.extend(['-p', args.password])
cmd.append('dist/*')
subprocess.call(cmd, cwd=pkgdir)
|
0x90sled/catapult | refs/heads/master | third_party/gsutil/third_party/boto/tests/unit/cloudfront/test_invalidation_list.py | 114 | #!/usr/bin/env python
import random
import string
from tests.compat import unittest, mock
import boto
RESPONSE_TEMPLATE = r"""
<InvalidationList>
<Marker/>
<NextMarker>%(next_marker)s</NextMarker>
<MaxItems>%(max_items)s</MaxItems>
<IsTruncated>%(is_truncated)s</IsTruncated>
%(inval_summaries)s
</InvalidationList>
"""
INVAL_SUMMARY_TEMPLATE = r"""
<InvalidationSummary>
<Id>%(cfid)s</Id>
<Status>%(status)s</Status>
</InvalidationSummary>
"""
class CFInvalidationListTest(unittest.TestCase):
cloudfront = True
def setUp(self):
self.cf = boto.connect_cloudfront('aws.aws_access_key_id',
'aws.aws_secret_access_key')
def _get_random_id(self, length=14):
return ''.join([random.choice(string.ascii_letters) for i in
range(length)])
def _group_iter(self, iterator, n):
accumulator = []
for item in iterator:
accumulator.append(item)
if len(accumulator) == n:
yield accumulator
accumulator = []
if len(accumulator) != 0:
yield accumulator
def _get_mock_responses(self, num, max_items):
max_items = min(max_items, 100)
cfid_groups = list(self._group_iter([self._get_random_id() for i in
range(num)], max_items))
cfg = dict(status='Completed', max_items=max_items, next_marker='')
responses = []
is_truncated = 'true'
for i, group in enumerate(cfid_groups):
next_marker = group[-1]
if (i + 1) == len(cfid_groups):
is_truncated = 'false'
next_marker = ''
invals = ''
cfg.update(dict(next_marker=next_marker,
is_truncated=is_truncated))
for cfid in group:
cfg.update(dict(cfid=cfid))
invals += INVAL_SUMMARY_TEMPLATE % cfg
cfg.update(dict(inval_summaries=invals))
mock_response = mock.Mock()
mock_response.read.return_value = (RESPONSE_TEMPLATE % cfg).encode('utf-8')
mock_response.status = 200
responses.append(mock_response)
return responses
def test_manual_pagination(self, num_invals=30, max_items=4):
"""
Test that paginating manually works properly
"""
self.assertGreater(num_invals, max_items)
responses = self._get_mock_responses(num=num_invals,
max_items=max_items)
self.cf.make_request = mock.Mock(side_effect=responses)
ir = self.cf.get_invalidation_requests('dist-id-here',
max_items=max_items)
all_invals = list(ir)
self.assertEqual(len(all_invals), max_items)
while ir.is_truncated:
ir = self.cf.get_invalidation_requests('dist-id-here',
marker=ir.next_marker,
max_items=max_items)
invals = list(ir)
self.assertLessEqual(len(invals), max_items)
all_invals.extend(invals)
remainder = num_invals % max_items
if remainder != 0:
self.assertEqual(len(invals), remainder)
self.assertEqual(len(all_invals), num_invals)
def test_auto_pagination(self, num_invals=1024):
"""
Test that auto-pagination works properly
"""
max_items = 100
self.assertGreaterEqual(num_invals, max_items)
responses = self._get_mock_responses(num=num_invals,
max_items=max_items)
self.cf.make_request = mock.Mock(side_effect=responses)
ir = self.cf.get_invalidation_requests('dist-id-here')
self.assertEqual(len(ir._inval_cache), max_items)
self.assertEqual(len(list(ir)), num_invals)
if __name__ == '__main__':
unittest.main()
|
pexip/os-foolscap | refs/heads/master | foolscap/api.py | 1 |
# application code should import all names from here instead of from
# __init__.py . Use code like this:
#
# from foolscap.api import Tub
#
# This will make it easier to rearrange Foolscap's internals in the future.
# Anything you might import from outside foolscap.api is subject to movement
# in new releases.
from foolscap._version import verstr as __version__
# here are the primary entry points
from foolscap.pb import Tub, UnauthenticatedTub
# names we import so that others can reach them as foolscap.api.foo
from foolscap.remoteinterface import RemoteInterface
from foolscap.referenceable import Referenceable, SturdyRef
from foolscap.copyable import Copyable, RemoteCopy, registerRemoteCopy
from foolscap.copyable import registerCopier, registerRemoteCopyFactory
from foolscap.ipb import DeadReferenceError
from foolscap.tokens import BananaError
from foolscap.schema import StringConstraint, IntegerConstraint, \
ListOf, TupleOf, SetOf, DictOf, ChoiceOf, Any
from foolscap.storage import serialize, unserialize
from foolscap.tokens import Violation, RemoteException
from foolscap.eventual import eventually, fireEventually, flushEventualQueue
from foolscap.logging import app_versions
# hush pyflakes
_unused = [
__version__,
Tub, UnauthenticatedTub,
RemoteInterface,
Referenceable, SturdyRef,
Copyable, RemoteCopy, registerRemoteCopy,
registerCopier, registerRemoteCopyFactory,
DeadReferenceError,
BananaError,
StringConstraint, IntegerConstraint,
ListOf, TupleOf, SetOf, DictOf, ChoiceOf, Any,
serialize, unserialize,
Violation, RemoteException,
eventually, fireEventually, flushEventualQueue,
app_versions,
]
del _unused
|
pepetreshere/odoo | refs/heads/patch-2 | addons/point_of_sale/models/account_journal.py | 13 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2004-2008 PC Solutions (<http://pcsol.be>). All Rights Reserved
from odoo import fields, models, api
class AccountJournal(models.Model):
_inherit = 'account.journal'
pos_payment_method_ids = fields.One2many('pos.payment.method', 'cash_journal_id', string='Point of Sale Payment Methods')
|
Tejal011089/huntercamp_erpnext | refs/heads/develop | erpnext/stock/report/supplier_wise_sales_analytics/supplier_wise_sales_analytics.py | 75 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
def execute(filters=None):
columns = get_columns(filters)
consumed_details = get_consumed_details(filters)
supplier_details = get_suppliers_details(filters)
material_transfer_vouchers = get_material_transfer_vouchers()
data = []
for item_code, suppliers in supplier_details.items():
consumed_qty = consumed_amount = delivered_qty = delivered_amount = 0.0
total_qty = total_amount = 0.0
if consumed_details.get(item_code):
for cd in consumed_details.get(item_code):
if (cd.voucher_no not in material_transfer_vouchers):
if cd.voucher_type=="Delivery Note":
delivered_qty += abs(flt(cd.actual_qty))
delivered_amount += abs(flt(cd.stock_value_difference))
elif cd.voucher_type!="Delivery Note":
consumed_qty += abs(flt(cd.actual_qty))
consumed_amount += abs(flt(cd.stock_value_difference))
if consumed_qty or consumed_amount or delivered_qty or delivered_amount:
total_qty += delivered_qty + consumed_qty
total_amount += delivered_amount + consumed_amount
row = [cd.item_code, cd.item_name, cd.description, cd.stock_uom, \
consumed_qty, consumed_amount, delivered_qty, delivered_amount, \
total_qty, total_amount, list(set(suppliers))]
data.append(row)
return columns, data
def get_columns(filters):
"""return columns based on filters"""
columns = [_("Item") + ":Link/Item:100"] + [_("Item Name") + "::100"] + \
[_("Description") + "::150"] + [_("UOM") + ":Link/UOM:90"] + \
[_("Consumed Qty") + ":Float:110"] + [_("Consumed Amount") + ":Currency:130"] + \
[_("Delivered Qty") + ":Float:110"] + [_("Delivered Amount") + ":Currency:130"] + \
[_("Total Qty") + ":Float:110"] + [_("Total Amount") + ":Currency:130"] + \
[_("Supplier(s)") + "::250"]
return columns
def get_conditions(filters):
conditions = ""
values = []
if filters.get('from_date') and filters.get('to_date'):
conditions = "and sle.posting_date>=%s and sle.posting_date<=%s"
values = [filters.get('from_date'), filters.get('to_date')]
return conditions, values
def get_consumed_details(filters):
conditions, values = get_conditions(filters)
consumed_details = {}
for d in frappe.db.sql("""select sle.item_code, i.item_name, i.description,
i.stock_uom, sle.actual_qty, sle.stock_value_difference,
sle.voucher_no, sle.voucher_type
from `tabStock Ledger Entry` sle, `tabItem` i
where sle.item_code=i.name and sle.actual_qty < 0 %s""" % conditions, values, as_dict=1):
consumed_details.setdefault(d.item_code, []).append(d)
return consumed_details
def get_suppliers_details(filters):
item_supplier_map = {}
supplier = filters.get('supplier')
for d in frappe.db.sql("""select pr.supplier, pri.item_code from
`tabPurchase Receipt` pr, `tabPurchase Receipt Item` pri
where pr.name=pri.parent and pr.docstatus=1 and
pri.item_code=(select name from `tabItem` where
is_stock_item=1 and name=pri.item_code)""", as_dict=1):
item_supplier_map.setdefault(d.item_code, []).append(d.supplier)
if supplier:
for item_code, suppliers in item_supplier_map.items():
if supplier not in suppliers:
del item_supplier_map[item_code]
return item_supplier_map
def get_material_transfer_vouchers():
return frappe.db.sql_list("""select name from `tabStock Entry` where
purpose='Material Transfer' and docstatus=1""")
|
Acehaidrey/incubator-airflow | refs/heads/master | tests/plugins/test_plugins_manager.py | 1 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
from airflow.hooks.base_hook import BaseHook
from airflow.plugins_manager import AirflowPlugin
from airflow.www import app as application
from tests.test_utils.mock_plugins import mock_plugin_manager
class TestPluginsRBAC(unittest.TestCase):
def setUp(self):
self.app = application.create_app(testing=True)
self.appbuilder = self.app.appbuilder # pylint: disable=no-member
def test_flaskappbuilder_views(self):
from tests.plugins.test_plugin import v_appbuilder_package
appbuilder_class_name = str(v_appbuilder_package['view'].__class__.__name__)
plugin_views = [
view for view in self.appbuilder.baseviews if view.blueprint.name == appbuilder_class_name
]
self.assertTrue(len(plugin_views) == 1)
# view should have a menu item matching category of v_appbuilder_package
links = [
menu_item
for menu_item in self.appbuilder.menu.menu
if menu_item.name == v_appbuilder_package['category']
]
self.assertTrue(len(links) == 1)
# menu link should also have a link matching the name of the package.
link = links[0]
self.assertEqual(link.name, v_appbuilder_package['category'])
self.assertEqual(link.childs[0].name, v_appbuilder_package['name'])
def test_flaskappbuilder_menu_links(self):
from tests.plugins.test_plugin import appbuilder_mitem
# menu item should exist matching appbuilder_mitem
links = [
menu_item
for menu_item in self.appbuilder.menu.menu
if menu_item.name == appbuilder_mitem['category']
]
self.assertTrue(len(links) == 1)
# menu link should also have a link matching the name of the package.
link = links[0]
self.assertEqual(link.name, appbuilder_mitem['category'])
self.assertEqual(link.childs[0].name, appbuilder_mitem['name'])
def test_app_blueprints(self):
from tests.plugins.test_plugin import bp
# Blueprint should be present in the app
self.assertTrue('test_plugin' in self.app.blueprints)
self.assertEqual(self.app.blueprints['test_plugin'].name, bp.name)
@mock.patch('airflow.plugins_manager.pkg_resources.iter_entry_points')
def test_entrypoint_plugin_errors_dont_raise_exceptions(self, mock_ep_plugins):
"""
Test that Airflow does not raise an Error if there is any Exception because of the
Plugin.
"""
from airflow.plugins_manager import import_errors, load_entrypoint_plugins
mock_entrypoint = mock.Mock()
mock_entrypoint.name = 'test-entrypoint'
mock_entrypoint.module_name = 'test.plugins.test_plugins_manager'
mock_entrypoint.load.side_effect = Exception('Version Conflict')
mock_ep_plugins.return_value = [mock_entrypoint]
with self.assertLogs("airflow.plugins_manager", level="ERROR") as log_output:
load_entrypoint_plugins()
received_logs = log_output.output[0]
# Assert Traceback is shown too
assert "Traceback (most recent call last):" in received_logs
assert "Version Conflict" in received_logs
assert "Failed to import plugin test-entrypoint" in received_logs
assert ("test.plugins.test_plugins_manager", "Version Conflict") in import_errors.items()
class TestPluginsManager(unittest.TestCase):
def test_should_load_plugins_from_property(self):
class AirflowTestPropertyPlugin(AirflowPlugin):
name = "test_property_plugin"
@property
def hooks(self):
class TestPropertyHook(BaseHook):
pass
return [TestPropertyHook]
with mock_plugin_manager(plugins=[AirflowTestPropertyPlugin()]):
from airflow import plugins_manager
plugins_manager.ensure_plugins_loaded()
self.assertIn('AirflowTestPropertyPlugin', str(plugins_manager.plugins))
self.assertIn("TestPropertyHook", str(plugins_manager.registered_hooks))
def test_should_warning_about_incompatible_plugins(self):
class AirflowAdminViewsPlugin(AirflowPlugin):
name = "test_admin_views_plugin"
admin_views = [mock.MagicMock()]
class AirflowAdminMenuLinksPlugin(AirflowPlugin):
name = "test_menu_links_plugin"
menu_links = [mock.MagicMock()]
with mock_plugin_manager(plugins=[AirflowAdminViewsPlugin(), AirflowAdminMenuLinksPlugin()]):
from airflow import plugins_manager
# assert not logs
with self.assertLogs(plugins_manager.log) as cm:
plugins_manager.initialize_web_ui_plugins()
self.assertEqual(
cm.output,
[
'WARNING:airflow.plugins_manager:Plugin \'test_admin_views_plugin\' may not be '
'compatible with the current Airflow version. Please contact the author of '
'the plugin.',
'WARNING:airflow.plugins_manager:Plugin \'test_menu_links_plugin\' may not be '
'compatible with the current Airflow version. Please contact the author of '
'the plugin.',
],
)
def test_should_not_warning_about_fab_plugins(self):
class AirflowAdminViewsPlugin(AirflowPlugin):
name = "test_admin_views_plugin"
appbuilder_views = [mock.MagicMock()]
class AirflowAdminMenuLinksPlugin(AirflowPlugin):
name = "test_menu_links_plugin"
appbuilder_menu_items = [mock.MagicMock()]
with mock_plugin_manager(plugins=[AirflowAdminViewsPlugin(), AirflowAdminMenuLinksPlugin()]):
from airflow import plugins_manager
# assert not logs
with self.assertRaises(AssertionError), self.assertLogs(plugins_manager.log):
plugins_manager.initialize_web_ui_plugins()
def test_should_not_warning_about_fab_and_flask_admin_plugins(self):
class AirflowAdminViewsPlugin(AirflowPlugin):
name = "test_admin_views_plugin"
admin_views = [mock.MagicMock()]
appbuilder_views = [mock.MagicMock()]
class AirflowAdminMenuLinksPlugin(AirflowPlugin):
name = "test_menu_links_plugin"
menu_links = [mock.MagicMock()]
appbuilder_menu_items = [mock.MagicMock()]
with mock_plugin_manager(plugins=[AirflowAdminViewsPlugin(), AirflowAdminMenuLinksPlugin()]):
from airflow import plugins_manager
# assert not logs
with self.assertRaises(AssertionError), self.assertLogs(plugins_manager.log):
plugins_manager.initialize_web_ui_plugins()
class TestPluginsDirectorySource(unittest.TestCase):
def test_should_return_correct_path_name(self):
from airflow import plugins_manager
source = plugins_manager.PluginsDirectorySource(__file__)
self.assertEqual("test_plugins_manager.py", source.path)
self.assertEqual("$PLUGINS_FOLDER/test_plugins_manager.py", str(source))
self.assertEqual("<em>$PLUGINS_FOLDER/</em>test_plugins_manager.py", source.__html__())
class TestEntryPointSource(unittest.TestCase):
@mock.patch('airflow.plugins_manager.pkg_resources.iter_entry_points')
def test_should_return_correct_source_details(self, mock_ep_plugins):
from airflow import plugins_manager
mock_entrypoint = mock.Mock()
mock_entrypoint.name = 'test-entrypoint-plugin'
mock_entrypoint.module_name = 'module_name_plugin'
mock_entrypoint.dist = 'test-entrypoint-plugin==1.0.0'
mock_ep_plugins.return_value = [mock_entrypoint]
plugins_manager.load_entrypoint_plugins()
source = plugins_manager.EntryPointSource(mock_entrypoint)
self.assertEqual(str(mock_entrypoint), source.entrypoint)
self.assertEqual("test-entrypoint-plugin==1.0.0: " + str(mock_entrypoint), str(source))
self.assertEqual("<em>test-entrypoint-plugin==1.0.0:</em> " + str(mock_entrypoint), source.__html__())
|
zjutjsj1004/third | refs/heads/master | boost/tools/build/src/tools/notfile.py | 10 | # Status: ported.
# Base revision: 64429.
#
# Copyright (c) 2005-2010 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
import b2.build.type as type
import b2.build.generators as generators
import b2.build.virtual_target as virtual_target
import b2.build.toolset as toolset
import b2.build.targets as targets
from b2.manager import get_manager
from b2.util import bjam_signature
type.register("NOTFILE_MAIN")
class NotfileGenerator(generators.Generator):
def run(self, project, name, ps, sources):
pass
action_name = ps.get('action')[0]
if action_name[0] == '@':
action = virtual_target.Action(get_manager(), sources, action_name[1:], ps)
else:
action = virtual_target.Action(get_manager(), sources, "notfile.run", ps)
return [get_manager().virtual_targets().register(
virtual_target.NotFileTarget(name, project, action))]
generators.register(NotfileGenerator("notfile.main", False, [], ["NOTFILE_MAIN"]))
toolset.flags("notfile.run", "ACTION", [], ["<action>"])
get_manager().engine().register_action("notfile.run", "$(ACTION)")
@bjam_signature((["target_name"], ["action"], ["sources", "*"], ["requirements", "*"],
["default_build", "*"]))
def notfile(target_name, action, sources, requirements, default_build):
requirements.append("<action>" + action)
return targets.create_typed_metatarget(target_name, "NOTFILE_MAIN", sources, requirements,
default_build, [])
get_manager().projects().add_rule("notfile", notfile)
|
hn8841182/20150623-test02 | refs/heads/master | static/Brython3.1.0-20150301-090019/Lib/_testcapi.py | 742 |
CHAR_MAX = 127
CHAR_MIN = -128
DBL_MAX = 1.7976931348623157e+308
DBL_MIN = 2.2250738585072014e-308
FLT_MAX = 3.4028234663852886e+38
FLT_MIN = 1.1754943508222875e-38
INT_MAX = 2147483647
INT_MIN = -2147483648
LLONG_MAX = 9223372036854775807
LLONG_MIN = -9223372036854775808
LONG_MAX = 2147483647
LONG_MIN = -2147483648
PY_SSIZE_T_MAX = 2147483647
PY_SSIZE_T_MIN = -2147483648
SHRT_MAX = 32767
SHRT_MIN = -32768
SIZEOF_PYGC_HEAD = 16
UCHAR_MAX = 255
UINT_MAX = 4294967295
ULLONG_MAX = 18446744073709551615
ULONG_MAX = 4294967295
USHRT_MAX = 65535
__loader__ = "<_frozen_importlib.ExtensionFileLoader object at 0x00C98DD0>"
def _pending_threadfunc(*args,**kw):
pass
class _test_structmembersType(object):
pass
def _test_thread_state(*args,**kw):
pass
def argparsing(*args,**kw):
pass
def code_newempty(*args,**kw):
pass
def codec_incrementaldecoder(*args,**kw):
pass
def codec_incrementalencoder(*args,**kw):
pass
def crash_no_current_thread(*args,**kw):
pass
class error(Exception):
pass
def exception_print(*args,**kw):
pass
def getargs_B(*args,**kw):
pass
def getargs_H(*args,**kw):
pass
def getargs_I(*args,**kw):
pass
def getargs_K(*args,**kw):
pass
def getargs_L(*args,**kw):
pass
def getargs_Z(*args,**kw):
pass
def getargs_Z_hash(*args,**kw):
pass
def getargs_b(*args,**kw):
pass
def getargs_c(*args,**kw):
pass
def getargs_h(*args,**kw):
pass
def getargs_i(*args,**kw):
pass
def getargs_k(*args,**kw):
pass
def getargs_keyword_only(*args,**kw):
pass
def getargs_keywords(*args,**kw):
pass
def getargs_l(*args,**kw):
pass
def getargs_n(*args,**kw):
pass
def getargs_p(*args,**kw):
pass
def getargs_s(*args,**kw):
pass
def getargs_s_hash(*args,**kw):
pass
def getargs_s_star(*args,**kw):
pass
def getargs_tuple(*args,**kw):
pass
def getargs_u(*args,**kw):
pass
def getargs_u_hash(*args,**kw):
pass
def getargs_w_star(*args,**kw):
pass
def getargs_y(*args,**kw):
pass
def getargs_y_hash(*args,**kw):
pass
def getargs_y_star(*args,**kw):
pass
def getargs_z(*args,**kw):
pass
def getargs_z_hash(*args,**kw):
pass
def getargs_z_star(*args,**kw):
pass
class instancemethod(object):
pass
def make_exception_with_doc(*args,**kw):
pass
def make_memoryview_from_NULL_pointer(*args,**kw):
pass
def parse_tuple_and_keywords(*args,**kw):
pass
def pytime_object_to_time_t(*args,**kw):
pass
def pytime_object_to_timespec(*args,**kw):
pass
def pytime_object_to_timeval(*args,**kw):
pass
def raise_exception(*args,**kw):
pass
def raise_memoryerror(*args,**kw):
pass
def run_in_subinterp(*args,**kw):
pass
def set_exc_info(*args,**kw):
pass
def test_L_code(*args,**kw):
pass
def test_Z_code(*args,**kw):
pass
def test_capsule(*args,**kw):
pass
def test_config(*args,**kw):
pass
def test_datetime_capi(*args,**kw):
pass
def test_dict_iteration(*args,**kw):
pass
def test_empty_argparse(*args,**kw):
pass
def test_k_code(*args,**kw):
pass
def test_lazy_hash_inheritance(*args,**kw):
pass
def test_list_api(*args,**kw):
pass
def test_long_and_overflow(*args,**kw):
pass
def test_long_api(*args,**kw):
pass
def test_long_as_double(*args,**kw):
pass
def test_long_as_size_t(*args,**kw):
pass
def test_long_long_and_overflow(*args,**kw):
pass
def test_long_numbits(*args,**kw):
pass
def test_longlong_api(*args,**kw):
pass
def test_null_strings(*args,**kw):
pass
def test_s_code(*args,**kw):
pass
def test_string_from_format(*args,**kw):
pass
def test_string_to_double(*args,**kw):
pass
def test_u_code(*args,**kw):
pass
def test_unicode_compare_with_ascii(*args,**kw):
pass
def test_widechar(*args,**kw):
pass
def test_with_docstring(*args,**kw):
"""This is a pretty normal docstring."""
pass
def traceback_print(*args,**kw):
pass
def unicode_aswidechar(*args,**kw):
pass
def unicode_aswidecharstring(*args,**kw):
pass
def unicode_encodedecimal(*args,**kw):
pass
def unicode_transformdecimaltoascii(*args,**kw):
pass
|
hgdeoro/py-arduino | refs/heads/master | scripts/github-md2html.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8
import BaseHTTPServer
import codecs
import markdown
import os.path
import SimpleHTTPServer
import sys
class H(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
print self.path
if self.path.endswith('.md'):
fs_path = self.path[1:]
input_file = codecs.open(fs_path, mode="r", encoding="utf8")
mdtext = input_file.read()
input_file.close()
html = markdown.Markdown().convert(mdtext)
self.end_headers()
self.wfile.write("""<html><head>
<link href="/scripts/github-md2html_bundle_github.css" media="screen" rel="stylesheet" type="text/css" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body class="logged_in page-blob linux env-production">
<div class="subnavd" id="main">
<div class="site">
<div class="slider">
<div class="frames">
<div class="frame frame-center">
<div id="files">
<div class="file">
<div class="blob instapaper_body">
<div class="wikistyle">
""")
html_string = html.encode('utf-8')
html_string = str(html_string)
self.wfile.write(html_string)
self.wfile.write("""
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</body></html>
""")
elif self.path.startswith('/hgdeoro/py-arduino-proxy/raw/'):
# /hgdeoro/py-arduino-proxy/raw/<ANY_BRANCH>/
splitted = self.path.split('/')
# '' $ hgdeoro $ py-arduino-proxy $ raw $ <ANY_BRANCH> $ many $ paths $ elements
splitted = splitted[5:]
self.path = '/' + '/'.join(splitted)
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
else:
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
def main():
if not os.path.exists('scripts/github-md2html_bundle_github.css'):
print "ERROR: Couldn't find 'scripts/github-md2html_bundle_github.css'."
print "Maybe you aren't running this scripts from the base directory of the project..."
sys.exit(1)
httpd = BaseHTTPServer.HTTPServer(('', 8055), H)
print "Listeting on http://localhost:8055/"
httpd.serve_forever()
if __name__ == '__main__':
main()
|
glatard/nipype | refs/heads/master | nipype/interfaces/fsl/tests/test_auto_Randomise.py | 9 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.fsl.model import Randomise
def test_Randomise_inputs():
input_map = dict(args=dict(argstr='%s',
),
base_name=dict(argstr='-o "%s"',
position=1,
usedefault=True,
),
c_thresh=dict(argstr='-c %.2f',
),
cm_thresh=dict(argstr='-C %.2f',
),
demean=dict(argstr='-D',
),
design_mat=dict(argstr='-d %s',
position=2,
),
environ=dict(nohash=True,
usedefault=True,
),
f_c_thresh=dict(argstr='-F %.2f',
),
f_cm_thresh=dict(argstr='-S %.2f',
),
f_only=dict(argstr='--f_only',
),
fcon=dict(argstr='-f %s',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='-i %s',
mandatory=True,
position=0,
),
mask=dict(argstr='-m %s',
),
num_perm=dict(argstr='-n %d',
),
one_sample_group_mean=dict(argstr='-1',
),
output_type=dict(),
p_vec_n_dist_files=dict(argstr='-P',
),
raw_stats_imgs=dict(argstr='-R',
),
seed=dict(argstr='--seed=%d',
),
show_info_parallel_mode=dict(argstr='-Q',
),
show_total_perms=dict(argstr='-q',
),
tcon=dict(argstr='-t %s',
position=3,
),
terminal_output=dict(nohash=True,
),
tfce=dict(argstr='-T',
),
tfce2D=dict(argstr='--T2',
),
tfce_C=dict(argstr='--tfce_C=%.2f',
),
tfce_E=dict(argstr='--tfce_E=%.2f',
),
tfce_H=dict(argstr='--tfce_H=%.2f',
),
var_smooth=dict(argstr='-v %d',
),
vox_p_values=dict(argstr='-x',
),
x_block_labels=dict(argstr='-e %s',
),
)
inputs = Randomise.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Randomise_outputs():
output_map = dict(f_corrected_p_files=dict(),
f_p_files=dict(),
fstat_files=dict(),
t_corrected_p_files=dict(),
t_p_files=dict(),
tstat_files=dict(),
)
outputs = Randomise.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
aodag/skype4py | refs/heads/master | Skype4Py/lang/fr.py | 23 | apiAttachAvailable = u'API disponible'
apiAttachNotAvailable = u'Indisponible'
apiAttachPendingAuthorization = u'Autorisation en attente'
apiAttachRefused = u'Refus\xe9'
apiAttachSuccess = u'Connexion r\xe9ussie'
apiAttachUnknown = u'Inconnu'
budDeletedFriend = u'Supprim\xe9 de la liste d\u2019amis'
budFriend = u'Ami'
budNeverBeenFriend = u"N'a jamais \xe9t\xe9 ajout\xe9 \xe0 la liste d\u2019amis"
budPendingAuthorization = u'Autorisation en attente'
budUnknown = u'Inconnu'
cfrBlockedByRecipient = u'Appel bloqu\xe9 par le destinataire'
cfrMiscError = u'Erreurs diverses'
cfrNoCommonCodec = u'Aucun codec en commun'
cfrNoProxyFound = u'Aucun proxy trouv\xe9'
cfrNotAuthorizedByRecipient = u'Utilisateur actuel non autoris\xe9 par le destinataire'
cfrRecipientNotFriend = u'Destinataire n\u2019est pas un ami'
cfrRemoteDeviceError = u'Erreur E/S audio distante'
cfrSessionTerminated = u'Session termin\xe9e'
cfrSoundIOError = u'Erreur E/S son'
cfrSoundRecordingError = u'Erreur d\u2019enregistrement du son'
cfrUnknown = u'Inconnu'
cfrUserDoesNotExist = u'Utilisateur/n\xb0 de t\xe9l\xe9phone inexistant'
cfrUserIsOffline = u'Il/Elle est D\xe9connect\xe9(e)'
chsAllCalls = u'Ancien dialogue'
chsDialog = u'Dialogue'
chsIncomingCalls = u'Attente multi acceptation'
chsLegacyDialog = u'Ancien dialogue'
chsMissedCalls = u'Dialogue'
chsMultiNeedAccept = u'Attente multi acceptation'
chsMultiSubscribed = u'Multi abonn\xe9s'
chsOutgoingCalls = u'Multi abonn\xe9s'
chsUnknown = u'Inconnu'
chsUnsubscribed = u'D\xe9sabonn\xe9'
clsBusy = u'Occup\xe9'
clsCancelled = u'Annul\xe9'
clsEarlyMedia = u'Lecture flux m\xe9dia (Early Media)'
clsFailed = u"D\xe9sol\xe9, l'appel a \xe9chou\xe9 !"
clsFinished = u'Termin\xe9'
clsInProgress = u'Appel en cours...'
clsLocalHold = u'En attente locale'
clsMissed = u'Appel en absence'
clsOnHold = u'En attente'
clsRefused = u'Refus\xe9'
clsRemoteHold = u'En attente \xe0 distance'
clsRinging = u'un appel'
clsRouting = u'Routage'
clsTransferred = u'Inconnu'
clsTransferring = u'Inconnu'
clsUnknown = u'Inconnu'
clsUnplaced = u'Jamais plac\xe9'
clsVoicemailBufferingGreeting = u'Buff\xe9risation du message d\u2019accueil'
clsVoicemailCancelled = u'Message vocal annul\xe9'
clsVoicemailFailed = u'Echec du message vocal'
clsVoicemailPlayingGreeting = u'Lecture du message d\u2019accueil'
clsVoicemailRecording = u'Enregistrement sur la boite vocale'
clsVoicemailSent = u'Message vocal envoy\xe9'
clsVoicemailUploading = u'T\xe9l\xe9chargement du message vocal'
cltIncomingP2P = u'Appel P2P entrant'
cltIncomingPSTN = u'Appel entrant'
cltOutgoingP2P = u'Appel P2P sortant'
cltOutgoingPSTN = u'Appel sortant'
cltUnknown = u'Inconnu'
cmeAddedMembers = u'A ajout\xe9 des membres'
cmeCreatedChatWith = u'Cr\xe9\xe9 un dialogue avec'
cmeEmoted = u'Inconnu'
cmeLeft = u'Laiss\xe9'
cmeSaid = u'A dit'
cmeSawMembers = u'A vu des membres'
cmeSetTopic = u'A d\xe9fini un sujet'
cmeUnknown = u'Inconnu'
cmsRead = u'Lu'
cmsReceived = u'Re\xe7u'
cmsSending = u'Envoi en cours...'
cmsSent = u'Envoy\xe9'
cmsUnknown = u'Inconnu'
conConnecting = u'Connexion en cours'
conOffline = u'D\xe9connect\xe9'
conOnline = u'Connect\xe9'
conPausing = u'En pause'
conUnknown = u'Inconnu'
cusAway = u'Absent'
cusDoNotDisturb = u'Ne pas d\xe9ranger'
cusInvisible = u'Invisible'
cusLoggedOut = u'D\xe9connect\xe9'
cusNotAvailable = u'Indisponible'
cusOffline = u'D\xe9connect\xe9'
cusOnline = u'Connect\xe9'
cusSkypeMe = u'Accessible'
cusUnknown = u'Inconnu'
cvsBothEnabled = u'Envoi et r\xe9ception vid\xe9o'
cvsNone = u'Pas de vid\xe9o'
cvsReceiveEnabled = u'R\xe9ception vid\xe9o'
cvsSendEnabled = u'Envoi vid\xe9o'
cvsUnknown = u''
grpAllFriends = u'Tous les amis'
grpAllUsers = u'Tous les utilisateurs'
grpCustomGroup = u'Personnalis\xe9'
grpOnlineFriends = u'Amis en ligne'
grpPendingAuthorizationFriends = u'Autorisation en attente'
grpProposedSharedGroup = u'Proposed Shared Group'
grpRecentlyContactedUsers = u'Utilisateurs r\xe9cemment contact\xe9s'
grpSharedGroup = u'Shared Group'
grpSkypeFriends = u'Amis Skype'
grpSkypeOutFriends = u'Amis SkypeOut'
grpUngroupedFriends = u'Amis sans groupe'
grpUnknown = u'Inconnu'
grpUsersAuthorizedByMe = u'Autoris\xe9 par moi'
grpUsersBlockedByMe = u'Bloqu\xe9 par moi'
grpUsersWaitingMyAuthorization = u'En attente de mon autorisation'
leaAddDeclined = u'Ajout refus\xe9'
leaAddedNotAuthorized = u'La personne ajout\xe9e doit \xeatre autoris\xe9e'
leaAdderNotFriend = u'La personne qui ajoute doit \xeatre un ami'
leaUnknown = u'Inconnu'
leaUnsubscribe = u'D\xe9sabonn\xe9'
leaUserIncapable = u'Utilisateur incapable'
leaUserNotFound = u'Utilisateur introuvable'
olsAway = u'Absent'
olsDoNotDisturb = u'Ne pas d\xe9ranger'
olsNotAvailable = u'Indisponible'
olsOffline = u'D\xe9connect\xe9'
olsOnline = u'Connect\xe9'
olsSkypeMe = u'Accessible'
olsSkypeOut = u'SkypeOut'
olsUnknown = u'Inconnu'
smsMessageStatusComposing = u'Composing'
smsMessageStatusDelivered = u'Delivered'
smsMessageStatusFailed = u'Failed'
smsMessageStatusRead = u'Read'
smsMessageStatusReceived = u'Received'
smsMessageStatusSendingToServer = u'Sending to Server'
smsMessageStatusSentToServer = u'Sent to Server'
smsMessageStatusSomeTargetsFailed = u'Some Targets Failed'
smsMessageStatusUnknown = u'Unknown'
smsMessageTypeCCRequest = u'Confirmation Code Request'
smsMessageTypeCCSubmit = u'Confirmation Code Submit'
smsMessageTypeIncoming = u'Incoming'
smsMessageTypeOutgoing = u'Outgoing'
smsMessageTypeUnknown = u'Unknown'
smsTargetStatusAcceptable = u'Acceptable'
smsTargetStatusAnalyzing = u'Analyzing'
smsTargetStatusDeliveryFailed = u'Delivery Failed'
smsTargetStatusDeliveryPending = u'Delivery Pending'
smsTargetStatusDeliverySuccessful = u'Delivery Successful'
smsTargetStatusNotRoutable = u'Not Routable'
smsTargetStatusUndefined = u'Undefined'
smsTargetStatusUnknown = u'Unknown'
usexFemale = u'Femme'
usexMale = u'Homme'
usexUnknown = u'Inconnu'
vmrConnectError = u'Erreur de connexion'
vmrFileReadError = u'Erreur de lecture fichier'
vmrFileWriteError = u'Erreur d\u2019\xe9criture fichier'
vmrMiscError = u'Erreurs diverses'
vmrNoError = u'Pas d\u2019erreur'
vmrNoPrivilege = u'Pas de privil\xe8ge Voicemail'
vmrNoVoicemail = u'Aucun message vocal de ce type'
vmrPlaybackError = u'Erreur de lecture'
vmrRecordingError = u'Erreur d\u2019enregistrement'
vmrUnknown = u'Inconnu'
vmsBlank = u'Vierge'
vmsBuffering = u'Buff\xe9risation en cours'
vmsDeleting = u'Suppression en cours'
vmsDownloading = u'T\xe9l\xe9chargement en cours'
vmsFailed = u'\xc9chec'
vmsNotDownloaded = u'Non t\xe9l\xe9charg\xe9'
vmsPlayed = u'Lu'
vmsPlaying = u'Lecture en cours'
vmsRecorded = u'Enregistr\xe9'
vmsRecording = u'Enregistrement sur la boite vocale'
vmsUnknown = u'Inconnu'
vmsUnplayed = u'Non lu'
vmsUploaded = u'T\xe9l\xe9charg\xe9'
vmsUploading = u'T\xe9l\xe9chargement en cours'
vmtCustomGreeting = u'Message d\u2019accueil personnalis\xe9'
vmtDefaultGreeting = u'Message d\u2019accueil par d\xe9faut'
vmtIncoming = u'R\xe9ception de message sur la boite vocale'
vmtOutgoing = u'Sortant'
vmtUnknown = u'Inconnu'
vssAvailable = u'Disponible'
vssNotAvailable = u'Indisponible'
vssPaused = u'En pause'
vssRejected = u'Rejet\xe9'
vssRunning = u'En cours'
vssStarting = u'D\xe9marrage'
vssStopping = u'En cours d\u2019arr\xeat'
vssUnknown = u'Inconnu'
|
witwall/gyp | refs/heads/master | test/additional-targets/src/dir1/emit.py | 337 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
f = open(sys.argv[1], 'wb')
f.write('Hello from emit.py\n')
f.close()
|
larrybradley/astropy | refs/heads/remote-tests | astropy/coordinates/tests/test_angular_separation.py | 8 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the projected separation stuff
"""
import pytest
import numpy as np
from astropy.tests.helper import assert_quantity_allclose as assert_allclose
from astropy import units as u
from astropy.coordinates.builtin_frames import ICRS, FK5, Galactic
from astropy.coordinates import Angle, Distance
# lon1, lat1, lon2, lat2 in degrees
coords = [(1, 0, 0, 0),
(0, 1, 0, 0),
(0, 0, 1, 0),
(0, 0, 0, 1),
(0, 0, 10, 0),
(0, 0, 90, 0),
(0, 0, 180, 0),
(0, 45, 0, -45),
(0, 60, 0, -30),
(-135, -15, 45, 15),
(100, -89, -80, 89),
(0, 0, 0, 0),
(0, 0, 1. / 60., 1. / 60.)]
correct_seps = [1, 1, 1, 1, 10, 90, 180, 90, 90, 180, 180, 0,
0.023570225877234643]
correctness_margin = 2e-10
def test_angsep():
"""
Tests that the angular separation object also behaves correctly.
"""
from astropy.coordinates.angle_utilities import angular_separation
# check it both works with floats in radians, Quantities, or Angles
for conv in (np.deg2rad,
lambda x: u.Quantity(x, "deg"),
lambda x: Angle(x, "deg")):
for (lon1, lat1, lon2, lat2), corrsep in zip(coords, correct_seps):
angsep = angular_separation(conv(lon1), conv(lat1),
conv(lon2), conv(lat2))
assert np.fabs(angsep - conv(corrsep)) < conv(correctness_margin)
def test_fk5_seps():
"""
This tests if `separation` works for FK5 objects.
This is a regression test for github issue #891
"""
a = FK5(1.*u.deg, 1.*u.deg)
b = FK5(2.*u.deg, 2.*u.deg)
a.separation(b)
def test_proj_separations():
"""
Test angular separation functionality
"""
c1 = ICRS(ra=0*u.deg, dec=0*u.deg)
c2 = ICRS(ra=0*u.deg, dec=1*u.deg)
sep = c2.separation(c1)
# returns an Angle object
assert isinstance(sep, Angle)
assert sep.degree == 1
assert_allclose(sep.arcminute, 60.)
# these operations have ambiguous interpretations for points on a sphere
with pytest.raises(TypeError):
c1 + c2
with pytest.raises(TypeError):
c1 - c2
ngp = Galactic(l=0*u.degree, b=90*u.degree)
ncp = ICRS(ra=0*u.degree, dec=90*u.degree)
# if there is a defined conversion between the relevant coordinate systems,
# it will be automatically performed to get the right angular separation
assert_allclose(ncp.separation(ngp.transform_to(ICRS())).degree,
ncp.separation(ngp).degree)
# distance from the north galactic pole to celestial pole
assert_allclose(ncp.separation(ngp.transform_to(ICRS())).degree,
62.87174758503201)
def test_3d_separations():
"""
Test 3D separation functionality
"""
c1 = ICRS(ra=1*u.deg, dec=1*u.deg, distance=9*u.kpc)
c2 = ICRS(ra=1*u.deg, dec=1*u.deg, distance=10*u.kpc)
sep3d = c2.separation_3d(c1)
assert isinstance(sep3d, Distance)
assert_allclose(sep3d - 1*u.kpc, 0*u.kpc, atol=1e-12*u.kpc)
|
zhangpanrobot/myblog | refs/heads/master | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/lexers/_stan_builtins.py | 292 | # -*- coding: utf-8 -*-
"""
pygments.lexers._stan_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer.
:copyright: Copyright 2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
KEYWORDS = ['else', 'for', 'if', 'in', 'lower', 'lp__', 'print', 'upper', 'while']
TYPES = [ 'corr_matrix',
'cov_matrix',
'int',
'matrix',
'ordered',
'positive_ordered',
'real',
'row_vector',
'simplex',
'unit_vector',
'vector']
FUNCTIONS = [ 'Phi',
'Phi_approx',
'abs',
'acos',
'acosh',
'asin',
'asinh',
'atan',
'atan2',
'atanh',
'bernoulli_cdf',
'bernoulli_log',
'bernoulli_logit_log',
'bernoulli_rng',
'beta_binomial_cdf',
'beta_binomial_log',
'beta_binomial_rng',
'beta_cdf',
'beta_log',
'beta_rng',
'binary_log_loss',
'binomial_cdf',
'binomial_coefficient_log',
'binomial_log',
'binomial_logit_log',
'binomial_rng',
'block',
'categorical_log',
'categorical_rng',
'cauchy_cdf',
'cauchy_log',
'cauchy_rng',
'cbrt',
'ceil',
'chi_square_log',
'chi_square_rng',
'cholesky_decompose',
'col',
'cols',
'cos',
'cosh',
'crossprod',
'cumulative_sum',
'determinant',
'diag_matrix',
'diag_post_multiply',
'diag_pre_multiply',
'diagonal',
'dims',
'dirichlet_log',
'dirichlet_rng',
'dot_product',
'dot_self',
'double_exponential_log',
'double_exponential_rng',
'e',
'eigenvalues_sym',
'eigenvectors_sym',
'epsilon',
'erf',
'erfc',
'exp',
'exp2',
'exp_mod_normal_cdf',
'exp_mod_normal_log',
'exp_mod_normal_rng',
'expm1',
'exponential_cdf',
'exponential_log',
'exponential_rng',
'fabs',
'fdim',
'floor',
'fma',
'fmax',
'fmin',
'fmod',
'gamma_log',
'gamma_rng',
'gumbel_cdf',
'gumbel_log',
'gumbel_rng',
'hypergeometric_log',
'hypergeometric_rng',
'hypot',
'if_else',
'int_step',
'inv_chi_square_cdf',
'inv_chi_square_log',
'inv_chi_square_rng',
'inv_cloglog',
'inv_gamma_cdf',
'inv_gamma_log',
'inv_gamma_rng',
'inv_logit',
'inv_wishart_log',
'inv_wishart_rng',
'inverse',
'lbeta',
'lgamma',
'lkj_corr_cholesky_log',
'lkj_corr_cholesky_rng',
'lkj_corr_log',
'lkj_corr_rng',
'lkj_cov_log',
'lmgamma',
'log',
'log10',
'log1m',
'log1m_inv_logit',
'log1p',
'log1p_exp',
'log2',
'log_determinant',
'log_inv_logit',
'log_sum_exp',
'logistic_cdf',
'logistic_log',
'logistic_rng',
'logit',
'lognormal_cdf',
'lognormal_log',
'lognormal_rng',
'max',
'mdivide_left_tri_low',
'mdivide_right_tri_low',
'mean',
'min',
'multi_normal_cholesky_log',
'multi_normal_log',
'multi_normal_prec_log',
'multi_normal_rng',
'multi_student_t_log',
'multi_student_t_rng',
'multinomial_cdf',
'multinomial_log',
'multinomial_rng',
'multiply_log',
'multiply_lower_tri_self_transpose',
'neg_binomial_cdf',
'neg_binomial_log',
'neg_binomial_rng',
'negative_epsilon',
'negative_infinity',
'normal_cdf',
'normal_log',
'normal_rng',
'not_a_number',
'ordered_logistic_log',
'ordered_logistic_rng',
'owens_t',
'pareto_cdf',
'pareto_log',
'pareto_rng',
'pi',
'poisson_cdf',
'poisson_log',
'poisson_log_log',
'poisson_rng',
'positive_infinity',
'pow',
'prod',
'rep_array',
'rep_matrix',
'rep_row_vector',
'rep_vector',
'round',
'row',
'rows',
'scaled_inv_chi_square_cdf',
'scaled_inv_chi_square_log',
'scaled_inv_chi_square_rng',
'sd',
'sin',
'singular_values',
'sinh',
'size',
'skew_normal_cdf',
'skew_normal_log',
'skew_normal_rng',
'softmax',
'sqrt',
'sqrt2',
'square',
'step',
'student_t_cdf',
'student_t_log',
'student_t_rng',
'sum',
'tan',
'tanh',
'tcrossprod',
'tgamma',
'trace',
'trunc',
'uniform_log',
'uniform_rng',
'variance',
'weibull_cdf',
'weibull_log',
'weibull_rng',
'wishart_log',
'wishart_rng']
DISTRIBUTIONS = [ 'bernoulli',
'bernoulli_logit',
'beta',
'beta_binomial',
'binomial',
'binomial_coefficient',
'binomial_logit',
'categorical',
'cauchy',
'chi_square',
'dirichlet',
'double_exponential',
'exp_mod_normal',
'exponential',
'gamma',
'gumbel',
'hypergeometric',
'inv_chi_square',
'inv_gamma',
'inv_wishart',
'lkj_corr',
'lkj_corr_cholesky',
'lkj_cov',
'logistic',
'lognormal',
'multi_normal',
'multi_normal_cholesky',
'multi_normal_prec',
'multi_student_t',
'multinomial',
'multiply',
'neg_binomial',
'normal',
'ordered_logistic',
'pareto',
'poisson',
'poisson_log',
'scaled_inv_chi_square',
'skew_normal',
'student_t',
'uniform',
'weibull',
'wishart']
RESERVED = [ 'alignas',
'alignof',
'and',
'and_eq',
'asm',
'auto',
'bitand',
'bitor',
'bool',
'break',
'case',
'catch',
'char',
'char16_t',
'char32_t',
'class',
'compl',
'const',
'const_cast',
'constexpr',
'continue',
'decltype',
'default',
'delete',
'do',
'double',
'dynamic_cast',
'enum',
'explicit',
'export',
'extern',
'false',
'false',
'float',
'friend',
'goto',
'inline',
'int',
'long',
'mutable',
'namespace',
'new',
'noexcept',
'not',
'not_eq',
'nullptr',
'operator',
'or',
'or_eq',
'private',
'protected',
'public',
'register',
'reinterpret_cast',
'repeat',
'return',
'short',
'signed',
'sizeof',
'static',
'static_assert',
'static_cast',
'struct',
'switch',
'template',
'then',
'this',
'thread_local',
'throw',
'true',
'true',
'try',
'typedef',
'typeid',
'typename',
'union',
'unsigned',
'until',
'using',
'virtual',
'void',
'volatile',
'wchar_t',
'xor',
'xor_eq']
|
mpharrigan/mdtraj | refs/heads/master | mdtraj/tests/test_arc.py | 8 | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Lee-Ping Wang
# Contributors: Robert McGibbon
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
import tempfile, os
import numpy as np
import mdtraj as md
from mdtraj.formats import ArcTrajectoryFile, arc
from mdtraj.formats import PDBTrajectoryFile
from mdtraj.testing import get_fn, eq
def test_read_0():
with ArcTrajectoryFile(get_fn('4waters.arc')) as f:
xyz, leng, ang = f.read()
with PDBTrajectoryFile(get_fn('4waters.pdb')) as f:
xyz2 = f.positions
eq(xyz, xyz2, decimal=3)
def test_read_arctraj():
traj = md.load(get_fn('nitrogen.arc'), top=get_fn('nitrogen.pdb'))
owntop = md.load(get_fn('nitrogen.arc'))
eq(traj.xyz, owntop.xyz)
|
AdaptiveApplications/carnegie | refs/heads/master | tarc_bus_locator_client/protobuf-2.5.0/gtest/test/gtest_shuffle_test.py | 3023 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
|
moijes12/oh-mainline | refs/heads/master | vendor/packages/Django/django/contrib/gis/gdal/base.py | 224 | from ctypes import c_void_p
from django.contrib.gis.gdal.error import GDALException
from django.utils import six
class GDALBase(object):
"""
Base object for GDAL objects that has a pointer access property
that controls access to the underlying C pointer.
"""
# Initially the pointer is NULL.
_ptr = None
# Default allowed pointer type.
ptr_type = c_void_p
# Pointer access property.
def _get_ptr(self):
# Raise an exception if the pointer isn't valid don't
# want to be passing NULL pointers to routines --
# that's very bad.
if self._ptr: return self._ptr
else: raise GDALException('GDAL %s pointer no longer valid.' % self.__class__.__name__)
def _set_ptr(self, ptr):
# Only allow the pointer to be set with pointers of the
# compatible type or None (NULL).
if isinstance(ptr, six.integer_types):
self._ptr = self.ptr_type(ptr)
elif ptr is None or isinstance(ptr, self.ptr_type):
self._ptr = ptr
else:
raise TypeError('Incompatible pointer type')
ptr = property(_get_ptr, _set_ptr)
|
hsorby/fifa_world_cup_2014 | refs/heads/master | languages/ro.py | 162 | # coding: utf8
{
'!=': '!=',
'!langcode!': 'ro',
'!langname!': 'Română',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" (actualizează) este o expresie opțională precum "câmp1=\'valoare_nouă\'". Nu puteți actualiza sau șterge rezultatele unui JOIN',
'%(nrows)s records found': '%(nrows)s înregistrări găsite',
'%d days ago': '%d days ago',
'%d weeks ago': '%d weeks ago',
'%s %%{row} deleted': '%s linii șterse',
'%s %%{row} updated': '%s linii actualizate',
'%s selected': '%s selectat(e)',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(ceva ce seamănă cu "it-it")',
'1 day ago': '1 day ago',
'1 week ago': '1 week ago',
'<': '<',
'<=': '<=',
'=': '=',
'>': '>',
'>=': '>=',
'A new version of web2py is available': 'O nouă versiune de web2py este disponibilă',
'A new version of web2py is available: %s': 'O nouă versiune de web2py este disponibilă: %s',
'About': 'Despre',
'about': 'despre',
'About application': 'Despre aplicație',
'Access Control': 'Control acces',
'Add': 'Adaugă',
'additional code for your application': 'cod suplimentar pentru aplicația dvs.',
'admin disabled because no admin password': 'administrare dezactivată deoarece parola de administrator nu a fost furnizată',
'admin disabled because not supported on google app engine': 'administrare dezactivată deoarece funcționalitatea nu e suportat pe Google App Engine',
'admin disabled because unable to access password file': 'administrare dezactivată deoarece nu există acces la fișierul cu parole',
'Admin is disabled because insecure channel': 'Adminstrarea este dezactivată deoarece conexiunea nu este sigură',
'Admin is disabled because unsecure channel': 'Administrarea este dezactivată deoarece conexiunea nu este securizată',
'Administration': 'Administrare',
'Administrative Interface': 'Interfață administrare',
'Administrator Password:': 'Parolă administrator:',
'Ajax Recipes': 'Rețete Ajax',
'And': 'Și',
'and rename it (required):': 'și renumiți (obligatoriu):',
'and rename it:': ' și renumiți:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin dezactivat deoarece conexiunea nu e sigură',
'application "%s" uninstalled': 'aplicația "%s" a fost dezinstalată',
'application compiled': 'aplicația a fost compilată',
'application is compiled and cannot be designed': 'aplicația este compilată și nu poate fi editată',
'Are you sure you want to delete file "%s"?': 'Sigur ștergeți fișierul "%s"?',
'Are you sure you want to delete this object?': 'Sigur ștergeți acest obiect?',
'Are you sure you want to uninstall application "%s"': 'Sigur dezinstalați aplicația "%s"',
'Are you sure you want to uninstall application "%s"?': 'Sigur dezinstalați aplicația "%s"?',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENȚIE: Nu vă puteți conecta decât utilizând o conexiune securizată (HTTPS) sau rulând aplicația pe computerul local.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENȚIE: Nu puteți efectua mai multe teste o dată deoarece lansarea în execuție a mai multor subpocese nu este sigură.',
'ATTENTION: you cannot edit the running application!': 'ATENȚIE: nu puteți edita o aplicație în curs de execuție!',
'Authentication': 'Autentificare',
'Available Databases and Tables': 'Baze de date și tabele disponibile',
'Back': 'Înapoi',
'Buy this book': 'Cumpără această carte',
'Cache': 'Cache',
'cache': 'cache',
'Cache Keys': 'Chei cache',
'cache, errors and sessions cleaned': 'cache, erori și sesiuni golite',
'Cannot be empty': 'Nu poate fi vid',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Compilare imposibilă: aplicația conține erori. Debogați aplicația și încercați din nou.',
'cannot create file': 'fișier imposibil de creat',
'cannot upload file "%(filename)s"': 'imposibil de încărcat fișierul "%(filename)s"',
'Change Password': 'Schimbare parolă',
'Change password': 'Schimbare parolă',
'change password': 'schimbare parolă',
'check all': 'coșați tot',
'Check to delete': 'Coșați pentru a șterge',
'clean': 'golire',
'Clear': 'Golește',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'click to check for upgrades': 'Clic pentru a verifica dacă există upgrade-uri',
'Client IP': 'IP client',
'Community': 'Comunitate',
'compile': 'compilare',
'compiled application removed': 'aplicația compilată a fost ștearsă',
'Components and Plugins': 'Componente și plugin-uri',
'contains': 'conține',
'Controller': 'Controlor',
'Controllers': 'Controlori',
'controllers': 'controlori',
'Copyright': 'Drepturi de autor',
'create file with filename:': 'crează fișier cu numele:',
'Create new application': 'Creați aplicație nouă',
'create new application:': 'crează aplicație nouă:',
'crontab': 'crontab',
'Current request': 'Cerere curentă',
'Current response': 'Răspuns curent',
'Current session': 'Sesiune curentă',
'currently saved or': 'în prezent salvat sau',
'customize me!': 'Personalizează-mă!',
'data uploaded': 'date încărcate',
'Database': 'bază de date',
'Database %s select': 'selectare bază de date %s',
'database administration': 'administrare bază de date',
'Date and Time': 'Data și ora',
'db': 'db',
'DB Model': 'Model bază de date',
'defines tables': 'definire tabele',
'Delete': 'Șterge',
'delete': 'șterge',
'delete all checked': 'șterge tot ce e coșat',
'Delete:': 'Șterge:',
'Demo': 'Demo',
'Deploy on Google App Engine': 'Instalare pe Google App Engine',
'Deployment Recipes': 'Rețete de instalare',
'Description': 'Descriere',
'design': 'design',
'DESIGN': 'DESIGN',
'Design for': 'Design pentru',
'DISK': 'DISK',
'Disk Cache Keys': 'Chei cache de disc',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentație',
"Don't know what to do?": 'Nu știți ce să faceți?',
'done!': 'gata!',
'Download': 'Descărcare',
'E-mail': 'E-mail',
'E-mail invalid': 'E-mail invalid',
'edit': 'editare',
'EDIT': 'EDITARE',
'Edit': 'Editare',
'Edit application': 'Editare aplicație',
'edit controller': 'editare controlor',
'Edit current record': 'Editare înregistrare curentă',
'Edit Profile': 'Editare profil',
'edit profile': 'editare profil',
'Edit This App': 'Editați această aplicație',
'Editing file': 'Editare fișier',
'Editing file "%s"': 'Editare fișier "%s"',
'Email and SMS': 'E-mail și SMS',
'enter a number between %(min)g and %(max)g': 'introduceți un număr între %(min)g și %(max)g',
'enter an integer between %(min)g and %(max)g': 'introduceți un întreg între %(min)g și %(max)g',
'Error logs for "%(app)s"': 'Log erori pentru "%(app)s"',
'errors': 'erori',
'Errors': 'Erori',
'Export': 'Export',
'export as csv file': 'exportă ca fișier csv',
'exposes': 'expune',
'extends': 'extinde',
'failed to reload module': 'reîncarcare modul nereușită',
'False': 'Neadevărat',
'FAQ': 'Întrebări frecvente',
'file "%(filename)s" created': 'fișier "%(filename)s" creat',
'file "%(filename)s" deleted': 'fișier "%(filename)s" șters',
'file "%(filename)s" uploaded': 'fișier "%(filename)s" încărcat',
'file "%(filename)s" was not deleted': 'fișierul "%(filename)s" n-a fost șters',
'file "%s" of %s restored': 'fișier "%s" de %s restaurat',
'file changed on disk': 'fișier modificat pe disc',
'file does not exist': 'fișier inexistent',
'file saved on %(time)s': 'fișier salvat %(time)s',
'file saved on %s': 'fișier salvat pe %s',
'First name': 'Prenume',
'Forbidden': 'Interzis',
'Forms and Validators': 'Formulare și validatori',
'Free Applications': 'Aplicații gratuite',
'Functions with no doctests will result in [passed] tests.': 'Funcțiile fără doctests vor genera teste [trecute].',
'Group %(group_id)s created': 'Grup %(group_id)s creat',
'Group ID': 'ID grup',
'Group uniquely assigned to user %(id)s': 'Grup asociat în mod unic utilizatorului %(id)s',
'Groups': 'Grupuri',
'Hello World': 'Salutare lume',
'help': 'ajutor',
'Home': 'Acasă',
'How did you get here?': 'Cum ați ajuns aici?',
'htmledit': 'editare html',
'import': 'import',
'Import/Export': 'Import/Export',
'includes': 'include',
'Index': 'Index',
'insert new': 'adaugă nou',
'insert new %s': 'adaugă nou %s',
'Installed applications': 'Aplicații instalate',
'internal error': 'eroare internă',
'Internal State': 'Stare internă',
'Introduction': 'Introducere',
'Invalid action': 'Acțiune invalidă',
'Invalid email': 'E-mail invalid',
'invalid password': 'parolă invalidă',
'Invalid password': 'Parolă invalidă',
'Invalid Query': 'Interogare invalidă',
'invalid request': 'cerere invalidă',
'invalid ticket': 'tichet invalid',
'Key': 'Key',
'language file "%(filename)s" created/updated': 'fișier de limbă "%(filename)s" creat/actualizat',
'Language files (static strings) updated': 'Fișierele de limbă (șirurile statice de caractere) actualizate',
'languages': 'limbi',
'Languages': 'Limbi',
'languages updated': 'limbi actualizate',
'Last name': 'Nume',
'Last saved on:': 'Ultima salvare:',
'Layout': 'Șablon',
'Layout Plugins': 'Șablon plugin-uri',
'Layouts': 'Șabloane',
'License for': 'Licență pentru',
'Live Chat': 'Chat live',
'loading...': 'încarc...',
'Logged in': 'Logat',
'Logged out': 'Delogat',
'Login': 'Autentificare',
'login': 'autentificare',
'Login to the Administrative Interface': 'Logare interfață de administrare',
'logout': 'ieșire',
'Logout': 'Ieșire',
'Lost Password': 'Parolă pierdută',
'Lost password?': 'Parolă pierdută?',
'Main Menu': 'Meniu principal',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Model meniu',
'merge': 'unește',
'Models': 'Modele',
'models': 'modele',
'Modules': 'Module',
'modules': 'module',
'My Sites': 'Site-urile mele',
'Name': 'Nume',
'New': 'Nou',
'new application "%s" created': 'aplicația nouă "%s" a fost creată',
'New password': 'Parola nouă',
'New Record': 'Înregistrare nouă',
'new record inserted': 'înregistrare nouă adăugată',
'next 100 rows': 'următoarele 100 de linii',
'NO': 'NU',
'No databases in this application': 'Aplicație fără bază de date',
'Object or table name': 'Obiect sau nume de tabel',
'Old password': 'Parola veche',
'Online examples': 'Exemple online',
'Or': 'Sau',
'or import from csv file': 'sau importă din fișier csv',
'or provide application url:': 'sau furnizează adresă url:',
'Origin': 'Origine',
'Original/Translation': 'Original/Traducere',
'Other Plugins': 'Alte plugin-uri',
'Other Recipes': 'Alte rețete',
'Overview': 'Prezentare de ansamblu',
'pack all': 'împachetează toate',
'pack compiled': 'pachet compilat',
'Password': 'Parola',
"Password fields don't match": 'Câmpurile de parolă nu se potrivesc',
'Peeking at file': 'Vizualizare fișier',
'please input your password again': 'introduceți parola din nou',
'Plugins': 'Plugin-uri',
'Powered by': 'Pus în mișcare de',
'Preface': 'Prefață',
'previous 100 rows': '100 de linii anterioare',
'Profile': 'Profil',
'Python': 'Python',
'Query': 'Interogare',
'Query:': 'Interogare:',
'Quick Examples': 'Exemple rapide',
'RAM': 'RAM',
'RAM Cache Keys': 'Chei cache RAM',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Rețete',
'Record': 'înregistrare',
'record does not exist': 'înregistrare inexistentă',
'Record ID': 'ID înregistrare',
'Record id': 'id înregistrare',
'register': 'înregistrare',
'Register': 'Înregistrare',
'Registration identifier': 'Identificator de autentificare',
'Registration key': 'Cheie înregistrare',
'Registration successful': 'Autentificare reușită',
'Remember me (for 30 days)': 'Ține-mă minte (timp de 30 de zile)',
'remove compiled': 'șterge compilate',
'Request reset password': 'Cerere resetare parolă',
'Reset Password key': 'Cheie restare parolă',
'Resolve Conflict file': 'Fișier rezolvare conflict',
'restore': 'restaurare',
'revert': 'revenire',
'Role': 'Rol',
'Rows in Table': 'Linii în tabel',
'Rows selected': 'Linii selectate',
'save': 'salvare',
'Save profile': 'Salvează profil',
'Saved file hash:': 'Hash fișier salvat:',
'Search': 'Căutare',
'Semantic': 'Semantică',
'Services': 'Servicii',
'session expired': 'sesiune expirată',
'shell': 'line de commandă',
'site': 'site',
'Size of cache:': 'Size of cache:',
'some files could not be removed': 'anumite fișiere n-au putut fi șterse',
'starts with': 'începe cu',
'state': 'stare',
'static': 'static',
'Static files': 'Fișiere statice',
'Statistics': 'Statistics',
'Stylesheet': 'Foaie de stiluri',
'Submit': 'Înregistrează',
'submit': 'submit',
'Support': 'Suport',
'Sure you want to delete this object?': 'Sigur ștergeți acest obiect?',
'Table': 'tabel',
'Table name': 'Nume tabel',
'test': 'test',
'Testing application': 'Testare aplicație',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Interogarea (query)" este o condiție de tipul "db.tabel1.câmp1==\'valoare\'". Ceva de genul "db.tabel1.câmp1==db.tabel2.câmp2" generează un JOIN SQL.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'logica aplicației, fiecare rută URL este mapată într-o funcție expusă de controlor',
'The Core': 'Nucleul',
'the data representation, define database tables and sets': 'reprezentarea datelor, definește tabelele bazei de date și seturile (de date)',
'The output of the file is a dictionary that was rendered by the view %s': 'Fișierul produce un dicționar care a fost prelucrat de vederea %s',
'the presentations layer, views are also known as templates': 'nivelul de prezentare, vederile sunt de asemenea numite și șabloane',
'The Views': 'Vederile',
'There are no controllers': 'Nu există controlori',
'There are no models': 'Nu există modele',
'There are no modules': 'Nu există module',
'There are no static files': 'Nu există fișiere statice',
'There are no translators, only default language is supported': 'Nu există traduceri, doar limba implicită este suportată',
'There are no views': 'Nu există vederi',
'these files are served without processing, your images go here': 'aceste fișiere sunt servite fără procesare, imaginea se plasează acolo',
'This App': 'Această aplicație',
'This is a copy of the scaffolding application': 'Aceasta este o copie a aplicației schelet',
'This is the %(filename)s template': 'Aceasta este șablonul fișierului %(filename)s',
'Ticket': 'Tichet',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Moment în timp (timestamp)',
'to previous version.': 'la versiunea anterioară.',
'too short': 'prea scurt',
'translation strings for the application': 'șiruri de caractere folosite la traducerea aplicației',
'True': 'Adevărat',
'try': 'încearcă',
'try something like': 'încearcă ceva de genul',
'Twitter': 'Twitter',
'Unable to check for upgrades': 'Imposibil de verificat dacă există actualizări',
'unable to create application "%s"': 'imposibil de creat aplicația "%s"',
'unable to delete file "%(filename)s"': 'imposibil de șters fișierul "%(filename)s"',
'Unable to download': 'Imposibil de descărcat',
'Unable to download app': 'Imposibil de descărcat aplicația',
'unable to parse csv file': 'imposibil de analizat fișierul csv',
'unable to uninstall "%s"': 'imposibil de dezinstalat "%s"',
'uncheck all': 'decoșează tot',
'uninstall': 'dezinstalează',
'update': 'actualizează',
'update all languages': 'actualizează toate limbile',
'Update:': 'Actualizare:',
'upload application:': 'incarcă aplicația:',
'Upload existing application': 'Încarcă aplicația existentă',
'upload file:': 'încarcă fișier:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Folosiți (...)&(...) pentru AND, (...)|(...) pentru OR, și ~(...) pentru NOT, pentru a crea interogări complexe.',
'User %(id)s Logged-in': 'Utilizator %(id)s autentificat',
'User %(id)s Logged-out': 'Utilizator %(id)s delogat',
'User %(id)s Password changed': 'Parola utilizatorului %(id)s a fost schimbată',
'User %(id)s Password reset': 'Resetare parola utilizator %(id)s',
'User %(id)s Profile updated': 'Profil utilizator %(id)s actualizat',
'User %(id)s Registered': 'Utilizator %(id)s înregistrat',
'User ID': 'ID utilizator',
'value already in database or empty': 'Valoare existentă în baza de date sau vidă',
'Verify Password': 'Verifică parola',
'versioning': 'versiuni',
'Videos': 'Video-uri',
'View': 'Vedere',
'view': 'vedere',
'Views': 'Vederi',
'views': 'vederi',
'web2py is up to date': 'web2py este la zi',
'web2py Recent Tweets': 'Ultimele tweet-uri web2py',
'Welcome': 'Bine ați venit',
'Welcome %s': 'Bine ați venit %s',
'Welcome to web2py': 'Bun venit la web2py',
'Welcome to web2py!': 'Bun venit la web2py!',
'Which called the function %s located in the file %s': 'Care a apelat funcția %s prezentă în fișierul %s',
'YES': 'DA',
'You are successfully running web2py': 'Rulați cu succes web2py',
'You can modify this application and adapt it to your needs': 'Puteți modifica și adapta aplicația nevoilor dvs.',
'You visited the url %s': 'Ați vizitat adresa %s',
}
|
arjunmenon/dokku | refs/heads/master | tests/apps/python-flask/hello.py | 236 | import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'python/flask'
|
siggame/siggame-vote | refs/heads/master | vote/accounts/urls.py | 1 | from django.conf.urls.defaults import patterns, url
from django.contrib.auth.views import logout
from .views import LoginStageOneView, LoginStageTwoView
urlpatterns = patterns(
'',
url('^login/$', LoginStageOneView.as_view(), name="login"),
url('^login/next/', LoginStageTwoView.as_view(), name="login2"),
url('^logout/$', logout, {"next_page": "http://siggame.github.com"}, name="logout"),
)
|
EndyKaufman/django-postgres-angularjs-blog | refs/heads/master | app/manager/properties/validator.py | 1 | # -*- coding: utf-8 -*-
from project import helpers
import resource
def create(request):
data = request.DATA
if data is False:
return {'code': 'no_data'}, 404, False
data = helpers.set_null_values_if_not_exist(data, resource.get_fields())
if data['name'] is None:
return {'code': 'properties/no_name'}, 404, False
user = helpers.get_user(request)
if not user or not request.user.is_superuser:
return {'code': 'no_access'}, 404, False
if user is None:
return {'code': 'account/not_active'}, 404, False
data, code, item = resource.get_item_by_name(request, data['name'])
if item is not False:
return {'code': 'properties/exists', 'values': [data['name']]}, 404, False
return {'code': 'ok'}, 200, True
def apply_on_site(request):
"""Update record"""
data = request.DATA
user = helpers.get_user(request)
if not user or not request.user.is_superuser:
return {'code': 'no_access'}, 404, False
if user is None:
return {'code': 'account/not_active'}, 404, False
return {'code': 'ok'}, 200, True
def update(request, properties_id):
"""Update record"""
data = request.DATA
if data is False:
return {'code': 'no_data'}, 404, False
data = helpers.set_null_values_if_not_exist(data, resource.get_fields())
if data['name'] is None:
return {'code': 'properties/no_name'}, 404, False
user = helpers.get_user(request)
if not user or not request.user.is_superuser:
return {'code': 'no_access'}, 404, False
if user is None:
return {'code': 'account/not_active'}, 404, False
data, code, item = resource.get_item_by_name(request, data['name'])
if (item is not False) and (int(item.id) != int(properties_id)):
return {'code': 'properties/exists', 'values': [data['text']]}, 404, False
return {'code': 'ok'}, 200, True
def delete(request):
"""Update record"""
data = request.DATA
if data is False:
return {'code': 'no_data'}, 404, False
user = helpers.get_user(request)
if not user or not request.user.is_superuser:
return {'code': 'no_access'}, 404, False
if user is None:
return {'code': 'account/not_active'}, 404, False
return {'code': 'ok'}, 200, True
|
Buguin/Mandarin | refs/heads/master | toolkit/common/folder_tools.py | 1 | # -*- codeing:utf-8 -*-
# __author__ = 'Buguin'
import os
def get_folder_size(dir_path):
"""
According to the param of dir_path, cacullate the size of folder and return it.
:param dir_path: The path of folder, like D:\Temp
:return: The size of folder
"""
size = 0
for root, dirs, files in os.walk(dir_path):
size += sum([os.path.getsize(os.path.join(root, name)) for name in files])
return size
def get_folder_status(fodler_path):
"""
According to the param of dir_path, get the status of folder and return it.
:param fodler_path: The path of folder, like D:\Temp
:return: The status of folder:
parm1 : The folder which on user computer is not create
parm2 : The folder which on user computer is created, but is empty (<30)
parm3 : The folder which on user computer is created and not empty
"""
if os.path.exists(fodler_path):
print("This size of repo is existed")
print(float(get_folder_size(fodler_path)))
if float(get_folder_size(fodler_path)) < 30:
print("This size of repo is ", get_folder_size(fodler_path) / 1024 / 1024)
return 2
else:
print("This size of repo is ", get_folder_size(fodler_path) / 1024 / 1024)
return 3
else:
print("This size of repo is not existed")
return 1
def get_folder_path(fodler_name):
folder_path = os.getcwd()
path_list = folder_path.split('\\')
num = path_list.index(fodler_name)
path_list = path_list[:num + 1]
folder_path = '\\'.join(path_list)
return folder_path
|
450W16/MODACT | refs/heads/master | src/characters/__init__.py | 1 | __all__ = ["basic_enemy", "biggie", "enemy", "player", "tracy", "transformed", "directions", "ghost", "monkey", "wolf"]
|
mou4e/zirconium | refs/heads/master | tools/telemetry/telemetry/core/local_server_unittest.py | 21 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import BaseHTTPServer
import SimpleHTTPServer
from telemetry.core import local_server
from telemetry.unittest_util import tab_test_case
class SimpleLocalServerBackendRequestHandler(
SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
msg = """<!DOCTYPE html>
<html>
<body>
hello world
</body>
"""
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(msg))
self.end_headers()
self.wfile.write(msg)
def log_request(self, code='-', size='-'):
pass
class SimpleLocalServerBackend(BaseHTTPServer.HTTPServer,
local_server.LocalServerBackend):
def __init__(self):
BaseHTTPServer.HTTPServer.__init__(
self, ('127.0.0.1', 0), SimpleLocalServerBackendRequestHandler)
local_server.LocalServerBackend.__init__(self)
def StartAndGetNamedPorts(self, args):
assert 'hello' in args
assert args['hello'] == 'world'
return [local_server.NamedPort('http', self.server_address[1])]
def ServeForever(self):
self.serve_forever()
class SimpleLocalServer(local_server.LocalServer):
def __init__(self):
super(SimpleLocalServer, self).__init__(SimpleLocalServerBackend)
def GetBackendStartupArgs(self):
return {'hello': 'world'}
@property
def url(self):
return self.forwarder.url + '/'
class LocalServerUnittest(tab_test_case.TabTestCase):
@classmethod
def setUpClass(cls):
super(LocalServerUnittest, cls).setUpClass()
cls._server = SimpleLocalServer()
cls._browser.StartLocalServer(cls._server)
def testLocalServer(self):
self.assertTrue(self._server in self._browser.local_servers)
self._tab.Navigate(self._server.url)
self._tab.WaitForDocumentReadyStateToBeComplete()
body_text = self._tab.EvaluateJavaScript('document.body.textContent')
body_text = body_text.strip()
self.assertEquals('hello world', body_text)
def testStartingAndRestarting(self):
server2 = SimpleLocalServer()
self.assertRaises(Exception,
lambda: self._browser.StartLocalServer(server2))
self._server.Close()
self.assertTrue(self._server not in self._browser.local_servers)
self._browser.StartLocalServer(server2)
|
glneo/gnuradio | refs/heads/master | gr-qtgui/examples/pyqt_time_c.py | 38 | #!/usr/bin/env python
#
# Copyright 2011,2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
from gnuradio import blocks
import sys
try:
from gnuradio import qtgui
from PyQt4 import QtGui, QtCore
import sip
except ImportError:
sys.stderr.write("Error: Program requires PyQt4 and gr-qtgui.\n")
sys.exit(1)
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
try:
from gnuradio import channels
except ImportError:
sys.stderr.write("Error: Program requires gr-channels.\n")
sys.exit(1)
class dialog_box(QtGui.QWidget):
def __init__(self, display, control):
QtGui.QWidget.__init__(self, None)
self.setWindowTitle('PyQt Test GUI')
self.boxlayout = QtGui.QBoxLayout(QtGui.QBoxLayout.LeftToRight, self)
self.boxlayout.addWidget(display, 1)
self.boxlayout.addWidget(control)
self.resize(800, 500)
class control_box(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.setWindowTitle('Control Panel')
self.setToolTip('Control the signals')
QtGui.QToolTip.setFont(QtGui.QFont('OldEnglish', 10))
self.layout = QtGui.QFormLayout(self)
# Control the first signal
self.freq1Edit = QtGui.QLineEdit(self)
self.freq1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Frequency:", self.freq1Edit)
self.connect(self.freq1Edit, QtCore.SIGNAL("editingFinished()"),
self.freq1EditText)
self.amp1Edit = QtGui.QLineEdit(self)
self.amp1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Amplitude:", self.amp1Edit)
self.connect(self.amp1Edit, QtCore.SIGNAL("editingFinished()"),
self.amp1EditText)
# Control the second signal
self.freq2Edit = QtGui.QLineEdit(self)
self.freq2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Frequency:", self.freq2Edit)
self.connect(self.freq2Edit, QtCore.SIGNAL("editingFinished()"),
self.freq2EditText)
self.amp2Edit = QtGui.QLineEdit(self)
self.amp2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Amplitude:", self.amp2Edit)
self.connect(self.amp2Edit, QtCore.SIGNAL("editingFinished()"),
self.amp2EditText)
self.quit = QtGui.QPushButton('Close', self)
self.quit.setMinimumWidth(100)
self.layout.addWidget(self.quit)
self.connect(self.quit, QtCore.SIGNAL('clicked()'),
QtGui.qApp, QtCore.SLOT('quit()'))
def attach_signal1(self, signal):
self.signal1 = signal
self.freq1Edit.setText(QtCore.QString("%1").arg(self.signal1.frequency()))
self.amp1Edit.setText(QtCore.QString("%1").arg(self.signal1.amplitude()))
def attach_signal2(self, signal):
self.signal2 = signal
self.freq2Edit.setText(QtCore.QString("%1").arg(self.signal2.frequency()))
self.amp2Edit.setText(QtCore.QString("%1").arg(self.signal2.amplitude()))
def freq1EditText(self):
try:
newfreq = float(self.freq1Edit.text())
self.signal1.set_frequency(newfreq)
except ValueError:
print "Bad frequency value entered"
def amp1EditText(self):
try:
newamp = float(self.amp1Edit.text())
self.signal1.set_amplitude(newamp)
except ValueError:
print "Bad amplitude value entered"
def freq2EditText(self):
try:
newfreq = float(self.freq2Edit.text())
self.signal2.set_frequency(newfreq)
except ValueError:
print "Bad frequency value entered"
def amp2EditText(self):
try:
newamp = float(self.amp2Edit.text())
self.signal2.set_amplitude(newamp)
except ValueError:
print "Bad amplitude value entered"
class my_top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
Rs = 8000
f1 = 100
f2 = 200
npts = 2048
self.qapp = QtGui.QApplication(sys.argv)
ss = open(gr.prefix() + '/share/gnuradio/themes/dark.qss')
sstext = ss.read()
ss.close()
self.qapp.setStyleSheet(sstext)
src1 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f1, 0.1, 0)
src2 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f2, 0.1, 0)
src = blocks.add_cc()
channel = channels.channel_model(0.01)
thr = blocks.throttle(gr.sizeof_gr_complex, 100*npts)
self.snk1 = qtgui.time_sink_c(npts, Rs,
"Complex Time Example", 1)
self.connect(src1, (src,0))
self.connect(src2, (src,1))
self.connect(src, channel, thr, (self.snk1, 0))
#self.connect(src1, (self.snk1, 1))
#self.connect(src2, (self.snk1, 2))
self.ctrl_win = control_box()
self.ctrl_win.attach_signal1(src1)
self.ctrl_win.attach_signal2(src2)
# Get the reference pointer to the SpectrumDisplayForm QWidget
pyQt = self.snk1.pyqwidget()
# Wrap the pointer as a PyQt SIP object
# This can now be manipulated as a PyQt4.QtGui.QWidget
pyWin = sip.wrapinstance(pyQt, QtGui.QWidget)
# Example of using signal/slot to set the title of a curve
pyWin.connect(pyWin, QtCore.SIGNAL("setLineLabel(int, QString)"),
pyWin, QtCore.SLOT("setLineLabel(int, QString)"))
pyWin.emit(QtCore.SIGNAL("setLineLabel(int, QString)"), 0, "Re{sum}")
self.snk1.set_line_label(1, "Im{Sum}")
#self.snk1.set_line_label(2, "Re{src1}")
#self.snk1.set_line_label(3, "Im{src1}")
#self.snk1.set_line_label(4, "Re{src2}")
#self.snk1.set_line_label(5, "Im{src2}")
# Can also set the color of a curve
#self.snk1.set_color(5, "blue")
self.snk1.set_update_time(0.5)
#pyWin.show()
self.main_box = dialog_box(pyWin, self.ctrl_win)
self.main_box.show()
if __name__ == "__main__":
tb = my_top_block();
tb.start()
tb.qapp.exec_()
tb.stop()
|
jisqyv/p2pool | refs/heads/master | p2pool/util/skiplist.py | 278 | from p2pool.util import math, memoize
class SkipList(object):
def __init__(self, p=0.5):
self.p = p
self.skips = {}
def forget_item(self, item):
self.skips.pop(item, None)
@memoize.memoize_with_backing(memoize.LRUDict(5))
def __call__(self, start, *args):
updates = {}
pos = start
sol = self.initial_solution(start, args)
if self.judge(sol, args) == 0:
return self.finalize(sol, args)
while True:
if pos not in self.skips:
self.skips[pos] = math.geometric(self.p), [(self.previous(pos), self.get_delta(pos))]
skip_length, skip = self.skips[pos]
# fill previous updates
for i in xrange(skip_length):
if i in updates:
that_hash, delta = updates.pop(i)
x, y = self.skips[that_hash]
assert len(y) == i
y.append((pos, delta))
# put desired skip nodes in updates
for i in xrange(len(skip), skip_length):
updates[i] = pos, None
#if skip_length + 1 in updates:
# updates[skip_length + 1] = self.combine(updates[skip_length + 1], updates[skip_length])
for jump, delta in reversed(skip):
sol_if = self.apply_delta(sol, delta, args)
decision = self.judge(sol_if, args)
#print pos, sol, jump, delta, sol_if, decision
if decision == 0:
return self.finalize(sol_if, args)
elif decision < 0:
sol = sol_if
break
else:
raise AssertionError()
sol = sol_if
pos = jump
# XXX could be better by combining updates
for x in updates:
updates[x] = updates[x][0], self.combine_deltas(updates[x][1], delta) if updates[x][1] is not None else delta
def finalize(self, sol, args):
return sol
|
Sumith1896/sympy | refs/heads/master | sympy/parsing/__init__.py | 135 | """Used for translating a string into a SymPy expression. """
|
pinkavaj/gnuradio | refs/heads/master | gr-fec/python/fec/qa_depuncture.py | 47 | #!/usr/bin/env python
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import fec_swig as fec
import blocks_swig as blocks
from collections import deque
class test_depuncture (gr_unittest.TestCase):
def depuncture_setup(self):
p = []
for i in range(self.puncsize):
p.append(self.puncpat >> (self.puncsize - 1 - i) & 1)
d = deque(p)
d.rotate(self.delay)
_puncpat = list(d)
k = 0
self.expected = []
for n in range(len(self.src_data)/(self.puncsize - self.puncholes)):
for i in range(self.puncsize):
if _puncpat[i] == 1:
self.expected.append(self.src_data[k]);
k+=1
else:
self.expected.append(self.sym)
def setUp(self):
self.src_data = 2000*range(64)
self.tb = gr.top_block ()
def tearDown(self):
self.tb = None
def test_000(self):
# Test normal operation of the depuncture block
self.puncsize = 8
self.puncpat = 0xEF
self.delay = 0
self.sym = 0
self.puncholes = 1
self.depuncture_setup()
src = blocks.vector_source_b(self.src_data)
op = fec.depuncture_bb(self.puncsize, self.puncpat,
self.delay, self.sym)
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
dst_data = list(dst.data())
for i in xrange(len(dst_data)):
dst_data[i] = int(dst_data[i])
self.assertEqual(self.expected, dst_data)
def test_001(self):
# Test normal operation of the depuncture block with a delay
self.puncsize = 8
self.puncpat = 0xEF
self.delay = 1
self.sym = 0
self.puncholes = 1
self.depuncture_setup()
src = blocks.vector_source_b(self.src_data)
op = fec.depuncture_bb(self.puncsize, self.puncpat,
self.delay, self.sym)
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
dst_data = list(dst.data())
for i in xrange(len(dst_data)):
dst_data[i] = int(dst_data[i])
self.assertEqual(self.expected, dst_data)
def test_002(self):
# Test scenario where we have defined a puncture pattern with
# more bits than the puncsize.
self.puncsize = 4
self.puncpat = 0x5555
self.delay = 0
self.sym = 0
self.puncholes = 2
self.depuncture_setup()
src = blocks.vector_source_b(self.src_data)
op = fec.depuncture_bb(self.puncsize, self.puncpat,
self.delay, self.sym)
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
dst_data = list(dst.data())
for i in xrange(len(dst_data)):
dst_data[i] = int(dst_data[i])
self.assertEqual(self.expected, dst_data)
def test_003(self):
# Test scenario where we have defined a puncture pattern with
# more bits than the puncsize with a delay. The python code
# doesn't account for this when creating self.expected, but
# this should be equivalent to a puncpat of the correct size.
self.puncsize = 4
self.puncpat0 = 0x5555 # too many bits set
self.puncpat1 = 0x55 # num bits = puncsize
self.delay = 1
self.sym = 0
src = blocks.vector_source_b(self.src_data)
op0 = fec.depuncture_bb(self.puncsize, self.puncpat0,
self.delay, self.sym)
op1 = fec.depuncture_bb(self.puncsize, self.puncpat1,
self.delay, self.sym)
dst0 = blocks.vector_sink_b()
dst1 = blocks.vector_sink_b()
self.tb.connect(src, op0, dst0)
self.tb.connect(src, op1, dst1)
self.tb.run()
dst_data0 = list(dst0.data())
for i in xrange(len(dst_data0)):
dst_data0[i] = int(dst_data0[i])
dst_data1 = list(dst1.data())
for i in xrange(len(dst_data1)):
dst_data1[i] = int(dst_data1[i])
self.assertEqual(dst_data1, dst_data0)
def test_004(self):
# Test normal operation of the depuncture block without
# specifying the fill symbol (defaults to 127).
self.puncsize = 8
self.puncpat = 0xEF
self.delay = 0
self.sym = 127
self.puncholes = 1
self.depuncture_setup()
src = blocks.vector_source_b(self.src_data)
op = fec.depuncture_bb(self.puncsize, self.puncpat,
self.delay)
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
dst_data = list(dst.data())
for i in xrange(len(dst_data)):
dst_data[i] = int(dst_data[i])
self.assertEqual(self.expected, dst_data)
if __name__ == '__main__':
gr_unittest.run(test_depuncture, "test_depuncture.xml")
|
yeyanchao/calibre | refs/heads/master | src/calibre/gui2/ebook_download.py | 5 | # -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
import os
import shutil
from contextlib import closing
from mechanize import MozillaCookieJar
from calibre import browser, get_download_filename
from calibre.ebooks import BOOK_EXTENSIONS
from calibre.gui2 import Dispatcher
from calibre.gui2.threaded_jobs import ThreadedJob
from calibre.ptempfile import PersistentTemporaryFile
from calibre.utils.filenames import ascii_filename
class EbookDownload(object):
def __call__(self, gui, cookie_file=None, url='', filename='', save_loc='', add_to_lib=True, tags=[], log=None, abort=None, notifications=None):
dfilename = ''
try:
dfilename = self._download(cookie_file, url, filename, save_loc, add_to_lib)
self._add(dfilename, gui, add_to_lib, tags)
self._save_as(dfilename, save_loc)
except Exception as e:
raise e
finally:
try:
if dfilename:
os.remove(dfilename)
except:
pass
def _download(self, cookie_file, url, filename, save_loc, add_to_lib):
dfilename = ''
if not url:
raise Exception(_('No file specified to download.'))
if not save_loc and not add_to_lib:
# Nothing to do.
return dfilename
if not filename:
filename = get_download_filename(url, cookie_file)
filename, ext = os.path.splitext(filename)
filename = filename[:60] + ext
filename = ascii_filename(filename)
br = browser()
if cookie_file:
cj = MozillaCookieJar()
cj.load(cookie_file)
br.set_cookiejar(cj)
with closing(br.open(url)) as r:
tf = PersistentTemporaryFile(suffix=filename)
tf.write(r.read())
dfilename = tf.name
return dfilename
def _add(self, filename, gui, add_to_lib, tags):
if not add_to_lib or not filename:
return
ext = os.path.splitext(filename)[1][1:].lower()
if ext not in BOOK_EXTENSIONS:
raise Exception(_('Not a support ebook format.'))
from calibre.ebooks.metadata.meta import get_metadata
with open(filename, 'rb') as f:
mi = get_metadata(f, ext, force_read_metadata=True)
mi.tags.extend(tags)
id = gui.library_view.model().db.create_book_entry(mi)
gui.library_view.model().db.add_format_with_hooks(id, ext.upper(), filename, index_is_id=True)
gui.library_view.model().books_added(1)
gui.library_view.model().count_changed()
def _save_as(self, dfilename, save_loc):
if not save_loc or not dfilename:
return
shutil.copy(dfilename, save_loc)
gui_ebook_download = EbookDownload()
def start_ebook_download(callback, job_manager, gui, cookie_file=None, url='', filename='', save_loc='', add_to_lib=True, tags=[]):
description = _('Downloading %s') % filename.decode('utf-8', 'ignore') if filename else url.decode('utf-8', 'ignore')
job = ThreadedJob('ebook_download', description, gui_ebook_download, (gui, cookie_file, url, filename, save_loc, add_to_lib, tags), {}, callback, max_concurrent_count=2, killable=False)
job_manager.run_threaded_job(job)
class EbookDownloadMixin(object):
def download_ebook(self, url='', cookie_file=None, filename='', save_loc='', add_to_lib=True, tags=[]):
if tags:
if isinstance(tags, basestring):
tags = tags.split(',')
start_ebook_download(Dispatcher(self.downloaded_ebook), self.job_manager, self, cookie_file, url, filename, save_loc, add_to_lib, tags)
self.status_bar.show_message(_('Downloading') + ' ' + filename.decode('utf-8', 'ignore') if filename else url.decode('utf-8', 'ignore'), 3000)
def downloaded_ebook(self, job):
if job.failed:
self.job_exception(job, dialog_title=_('Failed to download ebook'))
return
self.status_bar.show_message(job.description + ' ' + _('finished'), 5000)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.