repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
yamt/tempest
|
refs/heads/master
|
tempest/api/compute/servers/test_server_rescue_negative.py
|
11
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
import testtools
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class ServerRescueNegativeTestJSON(base.BaseV2ComputeTest):
@classmethod
def skip_checks(cls):
super(ServerRescueNegativeTestJSON, cls).skip_checks()
if not CONF.compute_feature_enabled.rescue:
msg = "Server rescue not available."
raise cls.skipException(msg)
@classmethod
def setup_credentials(cls):
cls.set_network_resources(network=True, subnet=True, router=True)
super(ServerRescueNegativeTestJSON, cls).setup_credentials()
@classmethod
def resource_setup(cls):
super(ServerRescueNegativeTestJSON, cls).resource_setup()
cls.device = CONF.compute.volume_device_name
# Server for negative tests
server = cls.create_test_server(wait_until='BUILD')
resc_server = cls.create_test_server(wait_until='ACTIVE')
cls.server_id = server['id']
cls.password = server['adminPass']
cls.rescue_id = resc_server['id']
rescue_password = resc_server['adminPass']
cls.servers_client.rescue_server(
cls.rescue_id, adminPass=rescue_password)
cls.servers_client.wait_for_server_status(cls.rescue_id, 'RESCUE')
cls.servers_client.wait_for_server_status(cls.server_id, 'ACTIVE')
def _create_volume(self):
volume = self.volumes_extensions_client.create_volume(
CONF.volume.volume_size, display_name=data_utils.rand_name(
self.__class__.__name__ + '_volume'))
self.addCleanup(self.delete_volume, volume['id'])
self.volumes_extensions_client.wait_for_volume_status(
volume['id'], 'available')
return volume
def _detach(self, server_id, volume_id):
self.servers_client.detach_volume(server_id, volume_id)
self.volumes_extensions_client.wait_for_volume_status(volume_id,
'available')
def _unrescue(self, server_id):
self.servers_client.unrescue_server(server_id)
self.servers_client.wait_for_server_status(server_id, 'ACTIVE')
def _unpause(self, server_id):
self.servers_client.unpause_server(server_id)
self.servers_client.wait_for_server_status(server_id, 'ACTIVE')
@test.idempotent_id('cc3a883f-43c0-4fb6-a9bb-5579d64984ed')
@testtools.skipUnless(CONF.compute_feature_enabled.pause,
'Pause is not available.')
@test.attr(type=['negative'])
def test_rescue_paused_instance(self):
# Rescue a paused server
self.servers_client.pause_server(self.server_id)
self.addCleanup(self._unpause, self.server_id)
self.servers_client.wait_for_server_status(self.server_id, 'PAUSED')
self.assertRaises(lib_exc.Conflict,
self.servers_client.rescue_server,
self.server_id)
@test.attr(type=['negative'])
@test.idempotent_id('db22b618-f157-4566-a317-1b6d467a8094')
def test_rescued_vm_reboot(self):
self.assertRaises(lib_exc.Conflict, self.servers_client.reboot,
self.rescue_id, 'HARD')
@test.attr(type=['negative'])
@test.idempotent_id('6dfc0a55-3a77-4564-a144-1587b7971dde')
def test_rescue_non_existent_server(self):
# Rescue a non-existing server
non_existent_server = data_utils.rand_uuid()
self.assertRaises(lib_exc.NotFound,
self.servers_client.rescue_server,
non_existent_server)
@test.attr(type=['negative'])
@test.idempotent_id('70cdb8a1-89f8-437d-9448-8844fd82bf46')
def test_rescued_vm_rebuild(self):
self.assertRaises(lib_exc.Conflict,
self.servers_client.rebuild,
self.rescue_id,
self.image_ref_alt)
@test.idempotent_id('d0ccac79-0091-4cf4-a1ce-26162d0cc55f')
@test.services('volume')
@test.attr(type=['negative'])
def test_rescued_vm_attach_volume(self):
volume = self._create_volume()
# Rescue the server
self.servers_client.rescue_server(self.server_id,
adminPass=self.password)
self.servers_client.wait_for_server_status(self.server_id, 'RESCUE')
self.addCleanup(self._unrescue, self.server_id)
# Attach the volume to the server
self.assertRaises(lib_exc.Conflict,
self.servers_client.attach_volume,
self.server_id,
volume['id'],
device='/dev/%s' % self.device)
@test.idempotent_id('f56e465b-fe10-48bf-b75d-646cda3a8bc9')
@test.services('volume')
@test.attr(type=['negative'])
def test_rescued_vm_detach_volume(self):
volume = self._create_volume()
# Attach the volume to the server
self.servers_client.attach_volume(self.server_id,
volume['id'],
device='/dev/%s' % self.device)
self.volumes_extensions_client.wait_for_volume_status(
volume['id'], 'in-use')
# Rescue the server
self.servers_client.rescue_server(self.server_id,
adminPass=self.password)
self.servers_client.wait_for_server_status(self.server_id, 'RESCUE')
# addCleanup is a LIFO queue
self.addCleanup(self._detach, self.server_id, volume['id'])
self.addCleanup(self._unrescue, self.server_id)
# Detach the volume from the server expecting failure
self.assertRaises(lib_exc.Conflict,
self.servers_client.detach_volume,
self.server_id,
volume['id'])
|
jcoady9/python-for-android
|
refs/heads/master
|
python-build/python-libs/gdata/tests/gdata_tests/docs_test.py
|
89
|
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = ('api.jfisher (Jeff Fisher), '
'api.eric@google.com (Eric Bidelman)')
import unittest
from gdata import test_data
import gdata.docs
class DocumentListEntryTest(unittest.TestCase):
def setUp(self):
self.dl_entry = gdata.docs.DocumentListEntryFromString(
test_data.DOCUMENT_LIST_ENTRY)
def testToAndFromStringWithData(self):
entry = gdata.docs.DocumentListEntryFromString(str(self.dl_entry))
self.assertEqual(entry.author[0].name.text, 'test.user')
self.assertEqual(entry.author[0].email.text, 'test.user@gmail.com')
self.assertEqual(entry.GetDocumentType(), 'spreadsheet')
self.assertEqual(entry.id.text,
'http://docs.google.com/feeds/documents/private/full/' +\
'spreadsheet%3Asupercalifragilisticexpealidocious')
self.assertEqual(entry.title.text,'Test Spreadsheet')
self.assertEqual(entry.resourceId.text,
'spreadsheet:supercalifragilisticexpealidocious')
self.assertEqual(entry.lastModifiedBy.name.text,'test.user')
self.assertEqual(entry.lastModifiedBy.email.text,'test.user@gmail.com')
self.assertEqual(entry.lastViewed.text,'2009-03-05T07:48:21.493Z')
self.assertEqual(entry.writersCanInvite.value, 'true')
class DocumentListFeedTest(unittest.TestCase):
def setUp(self):
self.dl_feed = gdata.docs.DocumentListFeedFromString(
test_data.DOCUMENT_LIST_FEED)
def testToAndFromString(self):
self.assert_(len(self.dl_feed.entry) == 2)
for an_entry in self.dl_feed.entry:
self.assert_(isinstance(an_entry, gdata.docs.DocumentListEntry))
new_dl_feed = gdata.docs.DocumentListFeedFromString(str(self.dl_feed))
for an_entry in new_dl_feed.entry:
self.assert_(isinstance(an_entry, gdata.docs.DocumentListEntry))
def testConvertActualData(self):
for an_entry in self.dl_feed.entry:
self.assertEqual(an_entry.author[0].name.text, 'test.user')
self.assertEqual(an_entry.author[0].email.text, 'test.user@gmail.com')
self.assertEqual(an_entry.lastModifiedBy.name.text, 'test.user')
self.assertEqual(an_entry.lastModifiedBy.email.text,
'test.user@gmail.com')
self.assertEqual(an_entry.lastViewed.text,'2009-03-05T07:48:21.493Z')
if(an_entry.GetDocumentType() == 'spreadsheet'):
self.assertEqual(an_entry.title.text, 'Test Spreadsheet')
self.assertEqual(an_entry.writersCanInvite.value, 'true')
elif(an_entry.GetDocumentType() == 'document'):
self.assertEqual(an_entry.title.text, 'Test Document')
self.assertEqual(an_entry.writersCanInvite.value, 'false')
def testLinkFinderFindsLinks(self):
for entry in self.dl_feed.entry:
# All Document List entries should have a self link
self.assert_(entry.GetSelfLink() is not None)
# All Document List entries should have an HTML link
self.assert_(entry.GetHtmlLink() is not None)
self.assert_(entry.feedLink.href is not None)
class DocumentListAclEntryTest(unittest.TestCase):
def setUp(self):
self.acl_entry = gdata.docs.DocumentListAclEntryFromString(
test_data.DOCUMENT_LIST_ACL_ENTRY)
def testToAndFromString(self):
self.assert_(isinstance(self.acl_entry, gdata.docs.DocumentListAclEntry))
self.assert_(isinstance(self.acl_entry.role, gdata.docs.Role))
self.assert_(isinstance(self.acl_entry.scope, gdata.docs.Scope))
self.assertEqual(self.acl_entry.scope.value, 'user@gmail.com')
self.assertEqual(self.acl_entry.scope.type, 'user')
self.assertEqual(self.acl_entry.role.value, 'writer')
acl_entry_str = str(self.acl_entry)
new_acl_entry = gdata.docs.DocumentListAclEntryFromString(acl_entry_str)
self.assert_(isinstance(new_acl_entry, gdata.docs.DocumentListAclEntry))
self.assert_(isinstance(new_acl_entry.role, gdata.docs.Role))
self.assert_(isinstance(new_acl_entry.scope, gdata.docs.Scope))
self.assertEqual(new_acl_entry.scope.value, self.acl_entry.scope.value)
self.assertEqual(new_acl_entry.scope.type, self.acl_entry.scope.type)
self.assertEqual(new_acl_entry.role.value, self.acl_entry.role.value)
def testCreateNewAclEntry(self):
cat = gdata.atom.Category(
term='http://schemas.google.com/acl/2007#accessRule',
scheme='http://schemas.google.com/g/2005#kind')
acl_entry = gdata.docs.DocumentListAclEntry(category=[cat])
acl_entry.scope = gdata.docs.Scope(value='user@gmail.com', type='user')
acl_entry.role = gdata.docs.Role(value='writer')
self.assert_(isinstance(acl_entry, gdata.docs.DocumentListAclEntry))
self.assert_(isinstance(acl_entry.role, gdata.docs.Role))
self.assert_(isinstance(acl_entry.scope, gdata.docs.Scope))
self.assertEqual(acl_entry.scope.value, 'user@gmail.com')
self.assertEqual(acl_entry.scope.type, 'user')
self.assertEqual(acl_entry.role.value, 'writer')
class DocumentListAclFeedTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.docs.DocumentListAclFeedFromString(
test_data.DOCUMENT_LIST_ACL_FEED)
def testToAndFromString(self):
for entry in self.feed.entry:
self.assert_(isinstance(entry, gdata.docs.DocumentListAclEntry))
feed = gdata.docs.DocumentListAclFeedFromString(str(self.feed))
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.docs.DocumentListAclEntry))
def testConvertActualData(self):
entries = self.feed.entry
self.assert_(len(entries) == 2)
self.assertEqual(entries[0].title.text,
'Document Permission - user@gmail.com')
self.assertEqual(entries[0].role.value, 'owner')
self.assertEqual(entries[0].scope.type, 'user')
self.assertEqual(entries[0].scope.value, 'user@gmail.com')
self.assert_(entries[0].GetSelfLink() is not None)
self.assert_(entries[0].GetEditLink() is not None)
self.assertEqual(entries[1].title.text,
'Document Permission - user2@google.com')
self.assertEqual(entries[1].role.value, 'writer')
self.assertEqual(entries[1].scope.type, 'domain')
self.assertEqual(entries[1].scope.value, 'google.com')
self.assert_(entries[1].GetSelfLink() is not None)
self.assert_(entries[1].GetEditLink() is not None)
if __name__ == '__main__':
unittest.main()
|
google/makani
|
refs/heads/master
|
analysis/util/validate_flight.py
|
1
|
#!/usr/bin/python
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""A module for detecting a wing crash in an HDF5 log file.
Determine if a wing crashed during flight, failed to complete
a flight as expected, or exceeded any safety criteria.
E.g.:
analysis/util/validate_flight.py a.h5
"""
import sys
import gflags
import h5py
from makani.config import mconfig
from makani.control import control_types
from makani.lib.python import c_helpers
from makani.lib.python.h5_utils import numpy_utils
import numpy
_FLIGHT_MODE_HELPER = c_helpers.EnumHelper('FlightMode', control_types)
FLAGS = gflags.FLAGS
class FlightError(Exception):
pass
def CheckOverTension(monitor_params, sim_telem):
"""Tests that the tether tension stays below upper limits."""
max_tension = monitor_params['tether']['tension']['very_high']
tether_force = sim_telem['tether']['end_force_g']
log_max_tension = numpy.max(numpy_utils.Vec3Norm(tether_force))
print ('Max tension in flight was %3.1f kN (limit %3.1f kN).'
% (log_max_tension / 1000.0, max_tension / 1000.0))
if log_max_tension > max_tension:
raise FlightError('Over tension (%3.1f > %3.1f [N]).' %
(log_max_tension / 1000.0, max_tension / 1000.0))
def CheckMinAltitude(system_params, sim_telem):
"""Checks that the wing does not penetrate the ground."""
# TODO: The 8 m threshold is arbitrarily selected to avoid indicating
# a crash when perching. This check doesn't eliminate all crashes. Come up
# with a better way to check this, possibly considering wing orientation.
min_altitude = 8.0
log_min_altitude = (system_params['ground_frame']['ground_z']
- numpy.max(sim_telem['wing']['Xg']['z']))
print ('Min altitude in flight was %4.2f m (limit %4.2f m)'
% (log_min_altitude, min_altitude))
if log_min_altitude < min_altitude:
raise FlightError('Possible crash detected (%4.2f < %4.2f [m])' %
(log_min_altitude, min_altitude))
def CheckFlightModeProgression(control_telem):
"""Verify that all of the required modes are reached in order."""
required_modes = [control_types.kFlightModePerched,
control_types.kFlightModeHoverAscend,
control_types.kFlightModeHoverPrepTransformGsUp,
control_types.kFlightModeHoverTransformGsUp,
control_types.kFlightModeHoverFullLength,
control_types.kFlightModeHoverAccel,
control_types.kFlightModeTransIn,
control_types.kFlightModeCrosswindNormal,
control_types.kFlightModeCrosswindPrepTransOut,
control_types.kFlightModeHoverTransOut,
control_types.kFlightModeHoverPrepTransformGsDown,
control_types.kFlightModeHoverTransformGsDown,
control_types.kFlightModeHoverReelIn,
control_types.kFlightModeHoverDescend]
min_index = 0
for mode in required_modes:
mode_index = (numpy.argmax(control_telem['flight_mode'][min_index:] == mode)
+ min_index)
if control_telem['flight_mode'][mode_index] != mode:
raise FlightError('Failed to reach mode %s in sequence.' %
_FLIGHT_MODE_HELPER.ShortName(mode))
print 'Reached flight mode %s' % _FLIGHT_MODE_HELPER.ShortName(mode)
min_index = mode_index
def _GetControlTelemetryCrosswindIndexes(control_telem):
return numpy.where(numpy.logical_or(
control_telem['flight_mode'] == control_types.kFlightModeCrosswindNormal,
control_telem['flight_mode'] ==
control_types.kFlightModeCrosswindPrepTransOut))[0]
def CheckCrosswindMinAltitude(system_params, control_telem, sim_telem):
"""Detect if wing got below 1.5 wingspans in crosswind."""
crosswind_control_inds = _GetControlTelemetryCrosswindIndexes(control_telem)
crosswind_start_time = control_telem['time'][crosswind_control_inds[0]]
crosswind_end_time = control_telem['time'][crosswind_control_inds[-1]]
crosswind_sim_inds = numpy.where(numpy.logical_and(
crosswind_start_time <= sim_telem['time'],
sim_telem['time'] <= crosswind_end_time))
crosswind_xg_z = (
sim_telem['wing']['Xg']['z'][crosswind_sim_inds])
min_altitude = system_params['wing']['b'] * 1.5
log_min_altitude = (system_params['ground_frame']['ground_z'] -
numpy.max(crosswind_xg_z))
print ('Min altitude in crosswind was %4.2f m (limit %4.2f m).'
% (log_min_altitude, min_altitude))
if log_min_altitude < min_altitude:
raise FlightError('Wing min altitude was %4.2f in crosswind (min = %4.2f).'
% (log_min_altitude, min_altitude))
def RunFlightChecks(log_file):
"""Run flight checks on an H5 log file."""
system_params = log_file['parameters']['system_params']
simulator = log_file['messages']['kAioNodeSimulator']
sim_telem = simulator['kMessageTypeSimTelemetry']['message']
controller = log_file['messages']['kAioNodeControllerA']
control_telem = controller['kMessageTypeControlDebug']['message']
monitor_params = mconfig.MakeParams('common.monitor.monitor_params')
CheckOverTension(monitor_params, sim_telem)
CheckMinAltitude(system_params, sim_telem)
CheckFlightModeProgression(control_telem)
CheckCrosswindMinAltitude(system_params, control_telem, sim_telem)
def _ShowUsageAndExit():
print '\nUsage: %s <log_file>\n%s' % (sys.argv[0], FLAGS)
sys.exit(1)
def main(argv):
# Parse flags.
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
print 'Error: %s' % e
_ShowUsageAndExit()
if len(argv) != 2:
_ShowUsageAndExit()
# Load data (may exit with IOError).
with h5py.File(argv[1], 'r') as log_file:
RunFlightChecks(log_file)
if __name__ == '__main__':
main(sys.argv)
|
jvkops/django
|
refs/heads/master
|
django/contrib/sessions/management/commands/clearsessions.py
|
729
|
from importlib import import_module
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = (
"Can be run as a cronjob or directly to clean out expired sessions "
"(only with the database backend at the moment)."
)
def handle(self, **options):
engine = import_module(settings.SESSION_ENGINE)
try:
engine.SessionStore.clear_expired()
except NotImplementedError:
self.stderr.write("Session engine '%s' doesn't support clearing "
"expired sessions.\n" % settings.SESSION_ENGINE)
|
rujiali/cooking4
|
refs/heads/master
|
core/vendor/guzzlehttp/guzzle/docs/conf.py
|
100
|
import sys, os
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True, linenos=1)
lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1)
primary_domain = 'php'
# -- General configuration -----------------------------------------------------
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Guzzle'
copyright = u'2012, Michael Dowling'
version = '3.0.0'
release = '3.0.0'
exclude_patterns = ['_build']
# -- Options for HTML output ---------------------------------------------------
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Guzzle documentation"
html_short_title = "Guzzle"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': ['localtoc.html', 'searchbox.html']
}
# Output file base name for HTML help builder.
htmlhelp_basename = 'Guzzledoc'
# -- Guzzle Sphinx theme setup ------------------------------------------------
sys.path.insert(0, '/Users/dowling/projects/guzzle_sphinx_theme')
import guzzle_sphinx_theme
html_translator_class = 'guzzle_sphinx_theme.HTMLTranslator'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = 'guzzle_sphinx_theme'
# Guzzle theme options (see theme.conf for more information)
html_theme_options = {
"project_nav_name": "Guzzle",
"github_user": "guzzle",
"github_repo": "guzzle",
"disqus_comments_shortname": "guzzle",
"google_analytics_account": "UA-22752917-1"
}
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Guzzle.tex', u'Guzzle Documentation',
u'Michael Dowling', 'manual'),
]
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'guzzle', u'Guzzle Documentation',
[u'Michael Dowling'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Guzzle', u'Guzzle Documentation',
u'Michael Dowling', 'Guzzle', 'One line description of project.',
'Miscellaneous'),
]
|
ancafarcas/liveblog
|
refs/heads/master
|
server/liveblog/items/items.py
|
2
|
from bson.objectid import ObjectId
from eve.utils import ParsedRequest
from superdesk.notification import push_notification
from superdesk.utc import utcnow
from superdesk.resource import Resource
from liveblog.common import get_user, update_dates_for
from apps.archive.archive import ArchiveResource, ArchiveService, ArchiveVersionsResource
from superdesk.services import BaseService
class ItemsVersionsResource(ArchiveVersionsResource):
"""
Resource class for versions of archive_media
"""
datasource = {
'source': 'archive' + '_versions'
}
class ItemsVersionsService(BaseService):
def get(self, req, lookup):
if req is None:
req = ParsedRequest()
return self.backend.get('archive_versions', req=req, lookup=lookup)
class ItemsResource(ArchiveResource):
datasource = {
'source': 'archive',
'elastic_filter': {'term': {'particular_type': 'item'}},
'default_sort': [('_updated', -1)]
}
item_methods = ['GET', 'PATCH', 'PUT', 'DELETE']
schema = ArchiveResource.schema
schema.update(schema)
schema.update({
'text': {
'type': 'string'
},
'blog': Resource.rel('blogs', True),
'particular_type': {
'type': 'string',
'allowed': ['post', 'item'],
'default': 'item'
},
'item_type': {
'type': 'string'
},
'meta': {
'type': 'dict'
},
'deleted': {
'type': 'string'
},
'commenter': {
'type': 'string',
'minlength': 1,
'maxlength': 30
}
})
privileges = {'GET': 'posts', 'POST': 'posts', 'PATCH': 'posts', 'DELETE': 'posts'}
class ItemsService(ArchiveService):
def get(self, req, lookup):
if req is None:
req = ParsedRequest()
docs = super().get(req, lookup)
return(docs)
def on_create(self, docs):
super().on_create(docs)
for doc in docs:
update_dates_for(doc)
doc['original_creator'] = str(get_user().get('_id'))
def on_created(self, docs):
super().on_created(docs)
push_notification('items', created=1)
def on_update(self, updates, original):
super().on_update(updates, original)
user = get_user()
updates['versioncreated'] = utcnow()
updates['version_creator'] = str(user.get('_id'))
def on_updated(self, updates, original):
super().on_updated(updates, original)
push_notification('items', updated=1)
def on_deleted(self, doc):
super().on_deleted(doc)
push_notification('items', deleted=1)
class BlogItemsResource(ArchiveResource):
url = 'blogs/<regex("[a-f0-9]{24}"):blog_id>/items'
schema = ItemsResource.schema
datasource = {
'source': 'archive',
'elastic_filter': {'term': {'particular_type': 'item'}},
'default_sort': [('_updated', -1)]
}
resource_methods = ['GET']
privileges = {'GET': 'posts'}
class BlogItemsService(ArchiveService):
def get(self, req, lookup):
if lookup.get('blog_id'):
lookup['blog'] = ObjectId(lookup['blog_id'])
del lookup['blog_id']
return super().get(req, lookup)
|
wpgallih/servo
|
refs/heads/master
|
python/mach/mach/test/test_logger.py
|
128
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, unicode_literals
import logging
import time
import unittest
from mach.logging import StructuredHumanFormatter
from mozunit import main
class DummyLogger(logging.Logger):
def __init__(self, cb):
logging.Logger.__init__(self, 'test')
self._cb = cb
def handle(self, record):
self._cb(record)
class TestStructuredHumanFormatter(unittest.TestCase):
def test_non_ascii_logging(self):
# Ensures the formatter doesn't choke when non-ASCII characters are
# present in printed parameters.
formatter = StructuredHumanFormatter(time.time())
def on_record(record):
result = formatter.format(record)
relevant = result[9:]
self.assertEqual(relevant, 'Test: s\xe9curit\xe9')
logger = DummyLogger(on_record)
value = 's\xe9curit\xe9'
logger.log(logging.INFO, 'Test: {utf}',
extra={'action': 'action', 'params': {'utf': value}})
if __name__ == '__main__':
main()
|
cosmiclattes/TPBviz
|
refs/heads/master
|
torrent/lib/python2.7/site-packages/django/contrib/admin/filters.py
|
101
|
"""
This encapsulates the logic for displaying filters in the Django admin.
Filters are specified in models with the "list_filter" option.
Each filter subclass knows how to display a filter for a field that passes a
certain test -- e.g. being a DateField or ForeignKey.
"""
import datetime
from django.db import models
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.utils.encoding import smart_text, force_text
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.admin.util import (get_model_from_relation,
reverse_field_path, get_limit_choices_to_from_path, prepare_lookup_value)
from django.contrib.admin.options import IncorrectLookupParameters
class ListFilter(object):
title = None # Human-readable title to appear in the right sidebar.
template = 'admin/filter.html'
def __init__(self, request, params, model, model_admin):
# This dictionary will eventually contain the request's query string
# parameters actually used by this filter.
self.used_parameters = {}
if self.title is None:
raise ImproperlyConfigured(
"The list filter '%s' does not specify "
"a 'title'." % self.__class__.__name__)
def has_output(self):
"""
Returns True if some choices would be output for this filter.
"""
raise NotImplementedError
def choices(self, cl):
"""
Returns choices ready to be output in the template.
"""
raise NotImplementedError
def queryset(self, request, queryset):
"""
Returns the filtered queryset.
"""
raise NotImplementedError
def expected_parameters(self):
"""
Returns the list of parameter names that are expected from the
request's query string and that will be used by this filter.
"""
raise NotImplementedError
class SimpleListFilter(ListFilter):
# The parameter that should be used in the query string for that filter.
parameter_name = None
def __init__(self, request, params, model, model_admin):
super(SimpleListFilter, self).__init__(
request, params, model, model_admin)
if self.parameter_name is None:
raise ImproperlyConfigured(
"The list filter '%s' does not specify "
"a 'parameter_name'." % self.__class__.__name__)
lookup_choices = self.lookups(request, model_admin)
if lookup_choices is None:
lookup_choices = ()
self.lookup_choices = list(lookup_choices)
if self.parameter_name in params:
value = params.pop(self.parameter_name)
self.used_parameters[self.parameter_name] = value
def has_output(self):
return len(self.lookup_choices) > 0
def value(self):
"""
Returns the value (in string format) provided in the request's
query string for this filter, if any. If the value wasn't provided then
returns None.
"""
return self.used_parameters.get(self.parameter_name, None)
def lookups(self, request, model_admin):
"""
Must be overriden to return a list of tuples (value, verbose value)
"""
raise NotImplementedError
def expected_parameters(self):
return [self.parameter_name]
def choices(self, cl):
yield {
'selected': self.value() is None,
'query_string': cl.get_query_string({}, [self.parameter_name]),
'display': _('All'),
}
for lookup, title in self.lookup_choices:
yield {
'selected': self.value() == force_text(lookup),
'query_string': cl.get_query_string({
self.parameter_name: lookup,
}, []),
'display': title,
}
class FieldListFilter(ListFilter):
_field_list_filters = []
_take_priority_index = 0
def __init__(self, field, request, params, model, model_admin, field_path):
self.field = field
self.field_path = field_path
self.title = getattr(field, 'verbose_name', field_path)
super(FieldListFilter, self).__init__(
request, params, model, model_admin)
for p in self.expected_parameters():
if p in params:
value = params.pop(p)
self.used_parameters[p] = prepare_lookup_value(p, value)
def has_output(self):
return True
def queryset(self, request, queryset):
try:
return queryset.filter(**self.used_parameters)
except ValidationError as e:
raise IncorrectLookupParameters(e)
@classmethod
def register(cls, test, list_filter_class, take_priority=False):
if take_priority:
# This is to allow overriding the default filters for certain types
# of fields with some custom filters. The first found in the list
# is used in priority.
cls._field_list_filters.insert(
cls._take_priority_index, (test, list_filter_class))
cls._take_priority_index += 1
else:
cls._field_list_filters.append((test, list_filter_class))
@classmethod
def create(cls, field, request, params, model, model_admin, field_path):
for test, list_filter_class in cls._field_list_filters:
if not test(field):
continue
return list_filter_class(field, request, params,
model, model_admin, field_path=field_path)
class RelatedFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
other_model = get_model_from_relation(field)
if hasattr(field, 'rel'):
rel_name = field.rel.get_related_field().name
else:
rel_name = other_model._meta.pk.name
self.lookup_kwarg = '%s__%s__exact' % (field_path, rel_name)
self.lookup_kwarg_isnull = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg, None)
self.lookup_val_isnull = request.GET.get(
self.lookup_kwarg_isnull, None)
self.lookup_choices = field.get_choices(include_blank=False)
super(RelatedFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
if hasattr(field, 'verbose_name'):
self.lookup_title = field.verbose_name
else:
self.lookup_title = other_model._meta.verbose_name
self.title = self.lookup_title
def has_output(self):
if (isinstance(self.field, models.related.RelatedObject)
and self.field.field.null or hasattr(self.field, 'rel')
and self.field.null):
extra = 1
else:
extra = 0
return len(self.lookup_choices) + extra > 1
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
def choices(self, cl):
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
yield {
'selected': self.lookup_val is None and not self.lookup_val_isnull,
'query_string': cl.get_query_string({},
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
'display': _('All'),
}
for pk_val, val in self.lookup_choices:
yield {
'selected': self.lookup_val == smart_text(pk_val),
'query_string': cl.get_query_string({
self.lookup_kwarg: pk_val,
}, [self.lookup_kwarg_isnull]),
'display': val,
}
if (isinstance(self.field, models.related.RelatedObject)
and self.field.field.null or hasattr(self.field, 'rel')
and self.field.null):
yield {
'selected': bool(self.lookup_val_isnull),
'query_string': cl.get_query_string({
self.lookup_kwarg_isnull: 'True',
}, [self.lookup_kwarg]),
'display': EMPTY_CHANGELIST_VALUE,
}
FieldListFilter.register(lambda f: (
bool(f.rel) if hasattr(f, 'rel') else
isinstance(f, models.related.RelatedObject)), RelatedFieldListFilter)
class BooleanFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = '%s__exact' % field_path
self.lookup_kwarg2 = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg, None)
self.lookup_val2 = request.GET.get(self.lookup_kwarg2, None)
super(BooleanFieldListFilter, self).__init__(field,
request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg2]
def choices(self, cl):
for lookup, title in (
(None, _('All')),
('1', _('Yes')),
('0', _('No'))):
yield {
'selected': self.lookup_val == lookup and not self.lookup_val2,
'query_string': cl.get_query_string({
self.lookup_kwarg: lookup,
}, [self.lookup_kwarg2]),
'display': title,
}
if isinstance(self.field, models.NullBooleanField):
yield {
'selected': self.lookup_val2 == 'True',
'query_string': cl.get_query_string({
self.lookup_kwarg2: 'True',
}, [self.lookup_kwarg]),
'display': _('Unknown'),
}
FieldListFilter.register(lambda f: isinstance(f,
(models.BooleanField, models.NullBooleanField)), BooleanFieldListFilter)
class ChoicesFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = '%s__exact' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg)
super(ChoicesFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg]
def choices(self, cl):
yield {
'selected': self.lookup_val is None,
'query_string': cl.get_query_string({}, [self.lookup_kwarg]),
'display': _('All')
}
for lookup, title in self.field.flatchoices:
yield {
'selected': smart_text(lookup) == self.lookup_val,
'query_string': cl.get_query_string({
self.lookup_kwarg: lookup}),
'display': title,
}
FieldListFilter.register(lambda f: bool(f.choices), ChoicesFieldListFilter)
class DateFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.field_generic = '%s__' % field_path
self.date_params = dict([(k, v) for k, v in params.items()
if k.startswith(self.field_generic)])
now = timezone.now()
# When time zone support is enabled, convert "now" to the user's time
# zone so Django's definition of "Today" matches what the user expects.
if timezone.is_aware(now):
now = timezone.localtime(now)
if isinstance(field, models.DateTimeField):
today = now.replace(hour=0, minute=0, second=0, microsecond=0)
else: # field is a models.DateField
today = now.date()
tomorrow = today + datetime.timedelta(days=1)
if today.month == 12:
next_month = today.replace(year=today.year + 1, month=1, day=1)
else:
next_month = today.replace(month=today.month + 1, day=1)
next_year = today.replace(year=today.year + 1, month=1, day=1)
self.lookup_kwarg_since = '%s__gte' % field_path
self.lookup_kwarg_until = '%s__lt' % field_path
self.links = (
(_('Any date'), {}),
(_('Today'), {
self.lookup_kwarg_since: str(today),
self.lookup_kwarg_until: str(tomorrow),
}),
(_('Past 7 days'), {
self.lookup_kwarg_since: str(today - datetime.timedelta(days=7)),
self.lookup_kwarg_until: str(tomorrow),
}),
(_('This month'), {
self.lookup_kwarg_since: str(today.replace(day=1)),
self.lookup_kwarg_until: str(next_month),
}),
(_('This year'), {
self.lookup_kwarg_since: str(today.replace(month=1, day=1)),
self.lookup_kwarg_until: str(next_year),
}),
)
super(DateFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg_since, self.lookup_kwarg_until]
def choices(self, cl):
for title, param_dict in self.links:
yield {
'selected': self.date_params == param_dict,
'query_string': cl.get_query_string(
param_dict, [self.field_generic]),
'display': title,
}
FieldListFilter.register(
lambda f: isinstance(f, models.DateField), DateFieldListFilter)
# This should be registered last, because it's a last resort. For example,
# if a field is eligible to use the BooleanFieldListFilter, that'd be much
# more appropriate, and the AllValuesFieldListFilter won't get used for it.
class AllValuesFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = field_path
self.lookup_kwarg_isnull = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg, None)
self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull,
None)
parent_model, reverse_path = reverse_field_path(model, field_path)
queryset = parent_model._default_manager.all()
# optional feature: limit choices base on existing relationships
# queryset = queryset.complex_filter(
# {'%s__isnull' % reverse_path: False})
limit_choices_to = get_limit_choices_to_from_path(model, field_path)
queryset = queryset.filter(limit_choices_to)
self.lookup_choices = (queryset
.distinct()
.order_by(field.name)
.values_list(field.name, flat=True))
super(AllValuesFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
def choices(self, cl):
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
yield {
'selected': (self.lookup_val is None
and self.lookup_val_isnull is None),
'query_string': cl.get_query_string({},
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
'display': _('All'),
}
include_none = False
for val in self.lookup_choices:
if val is None:
include_none = True
continue
val = smart_text(val)
yield {
'selected': self.lookup_val == val,
'query_string': cl.get_query_string({
self.lookup_kwarg: val,
}, [self.lookup_kwarg_isnull]),
'display': val,
}
if include_none:
yield {
'selected': bool(self.lookup_val_isnull),
'query_string': cl.get_query_string({
self.lookup_kwarg_isnull: 'True',
}, [self.lookup_kwarg]),
'display': EMPTY_CHANGELIST_VALUE,
}
FieldListFilter.register(lambda f: True, AllValuesFieldListFilter)
|
amenonsen/ansible
|
refs/heads/devel
|
test/units/plugins/inventory/test_inventory.py
|
49
|
# Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import string
import textwrap
from ansible import constants as C
from units.compat import mock
from units.compat import unittest
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_text
from units.mock.path import mock_unfrackpath_noop
from ansible.inventory.manager import InventoryManager, split_host_pattern
from units.mock.loader import DictDataLoader
class TestInventory(unittest.TestCase):
patterns = {
'a': ['a'],
'a, b': ['a', 'b'],
'a , b': ['a', 'b'],
' a,b ,c[1:2] ': ['a', 'b', 'c[1:2]'],
'9a01:7f8:191:7701::9': ['9a01:7f8:191:7701::9'],
'9a01:7f8:191:7701::9,9a01:7f8:191:7701::9': ['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9'],
'9a01:7f8:191:7701::9,9a01:7f8:191:7701::9,foo': ['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9', 'foo'],
'foo[1:2]': ['foo[1:2]'],
'a::b': ['a::b'],
'a:b': ['a', 'b'],
' a : b ': ['a', 'b'],
'foo:bar:baz[1:2]': ['foo', 'bar', 'baz[1:2]'],
}
pattern_lists = [
[['a'], ['a']],
[['a', 'b'], ['a', 'b']],
[['a, b'], ['a', 'b']],
[['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9,foo'],
['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9', 'foo']]
]
# pattern_string: [ ('base_pattern', (a,b)), ['x','y','z'] ]
# a,b are the bounds of the subscript; x..z are the results of the subscript
# when applied to string.ascii_letters.
subscripts = {
'a': [('a', None), list(string.ascii_letters)],
'a[0]': [('a', (0, None)), ['a']],
'a[1]': [('a', (1, None)), ['b']],
'a[2:3]': [('a', (2, 3)), ['c', 'd']],
'a[-1]': [('a', (-1, None)), ['Z']],
'a[-2]': [('a', (-2, None)), ['Y']],
'a[48:]': [('a', (48, -1)), ['W', 'X', 'Y', 'Z']],
'a[49:]': [('a', (49, -1)), ['X', 'Y', 'Z']],
'a[1:]': [('a', (1, -1)), list(string.ascii_letters[1:])],
}
ranges_to_expand = {
'a[1:2]': ['a1', 'a2'],
'a[1:10:2]': ['a1', 'a3', 'a5', 'a7', 'a9'],
'a[a:b]': ['aa', 'ab'],
'a[a:i:3]': ['aa', 'ad', 'ag'],
'a[a:b][c:d]': ['aac', 'aad', 'abc', 'abd'],
'a[0:1][2:3]': ['a02', 'a03', 'a12', 'a13'],
'a[a:b][2:3]': ['aa2', 'aa3', 'ab2', 'ab3'],
}
def setUp(self):
fake_loader = DictDataLoader({})
self.i = InventoryManager(loader=fake_loader, sources=[None])
def test_split_patterns(self):
for p in self.patterns:
r = self.patterns[p]
self.assertEqual(r, split_host_pattern(p))
for p, r in self.pattern_lists:
self.assertEqual(r, split_host_pattern(p))
def test_ranges(self):
for s in self.subscripts:
r = self.subscripts[s]
self.assertEqual(r[0], self.i._split_subscript(s))
self.assertEqual(
r[1],
self.i._apply_subscript(
list(string.ascii_letters),
r[0][1]
)
)
class TestInventoryPlugins(unittest.TestCase):
def test_empty_inventory(self):
inventory = self._get_inventory('')
self.assertIn('all', inventory.groups)
self.assertIn('ungrouped', inventory.groups)
self.assertFalse(inventory.groups['all'].get_hosts())
self.assertFalse(inventory.groups['ungrouped'].get_hosts())
def test_ini(self):
self._test_default_groups("""
host1
host2
host3
[servers]
host3
host4
host5
""")
def test_ini_explicit_ungrouped(self):
self._test_default_groups("""
[ungrouped]
host1
host2
host3
[servers]
host3
host4
host5
""")
def test_ini_variables_stringify(self):
values = ['string', 'no', 'No', 'false', 'FALSE', [], False, 0]
inventory_content = "host1 "
inventory_content += ' '.join(['var%s=%s' % (i, to_text(x)) for i, x in enumerate(values)])
inventory = self._get_inventory(inventory_content)
variables = inventory.get_host('host1').vars
for i in range(len(values)):
if isinstance(values[i], string_types):
self.assertIsInstance(variables['var%s' % i], string_types)
else:
self.assertIsInstance(variables['var%s' % i], type(values[i]))
@mock.patch('ansible.inventory.manager.unfrackpath', mock_unfrackpath_noop)
@mock.patch('os.path.exists', lambda x: True)
@mock.patch('os.access', lambda x, y: True)
def test_yaml_inventory(self, filename="test.yaml"):
inventory_content = {filename: textwrap.dedent("""\
---
all:
hosts:
test1:
test2:
""")}
C.INVENTORY_ENABLED = ['yaml']
fake_loader = DictDataLoader(inventory_content)
im = InventoryManager(loader=fake_loader, sources=filename)
self.assertTrue(im._inventory.hosts)
self.assertIn('test1', im._inventory.hosts)
self.assertIn('test2', im._inventory.hosts)
self.assertIn(im._inventory.get_host('test1'), im._inventory.groups['all'].hosts)
self.assertIn(im._inventory.get_host('test2'), im._inventory.groups['all'].hosts)
self.assertEqual(len(im._inventory.groups['all'].hosts), 2)
self.assertIn(im._inventory.get_host('test1'), im._inventory.groups['ungrouped'].hosts)
self.assertIn(im._inventory.get_host('test2'), im._inventory.groups['ungrouped'].hosts)
self.assertEqual(len(im._inventory.groups['ungrouped'].hosts), 2)
def _get_inventory(self, inventory_content):
fake_loader = DictDataLoader({__file__: inventory_content})
return InventoryManager(loader=fake_loader, sources=[__file__])
def _test_default_groups(self, inventory_content):
inventory = self._get_inventory(inventory_content)
self.assertIn('all', inventory.groups)
self.assertIn('ungrouped', inventory.groups)
all_hosts = set(host.name for host in inventory.groups['all'].get_hosts())
self.assertEqual(set(['host1', 'host2', 'host3', 'host4', 'host5']), all_hosts)
ungrouped_hosts = set(host.name for host in inventory.groups['ungrouped'].get_hosts())
self.assertEqual(set(['host1', 'host2']), ungrouped_hosts)
servers_hosts = set(host.name for host in inventory.groups['servers'].get_hosts())
self.assertEqual(set(['host3', 'host4', 'host5']), servers_hosts)
|
angstwad/ansible
|
refs/heads/devel
|
test/units/parsing/__init__.py
|
7690
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
mhue/scikit-learn
|
refs/heads/master
|
sklearn/ensemble/tests/test_voting_classifier.py
|
37
|
"""Testing for the boost module (sklearn.ensemble.boost)."""
import numpy as np
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.grid_search import GridSearchCV
from sklearn import datasets
from sklearn import cross_validation
from sklearn.datasets import make_multilabel_classification
from sklearn.svm import SVC
from sklearn.multiclass import OneVsRestClassifier
# Load the iris dataset and randomly permute it
iris = datasets.load_iris()
X, y = iris.data[:, 1:3], iris.target
def test_majority_label_iris():
"""Check classification by majority label on dataset iris."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='hard')
scores = cross_validation.cross_val_score(eclf,
X,
y,
cv=5,
scoring='accuracy')
assert_almost_equal(scores.mean(), 0.95, decimal=2)
def test_tie_situation():
"""Check voting classifier selects smaller class label in tie situation."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
eclf = VotingClassifier(estimators=[('lr', clf1), ('rf', clf2)],
voting='hard')
assert_equal(clf1.fit(X, y).predict(X)[73], 2)
assert_equal(clf2.fit(X, y).predict(X)[73], 1)
assert_equal(eclf.fit(X, y).predict(X)[73], 1)
def test_weights_iris():
"""Check classification by average probabilities on dataset iris."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[1, 2, 10])
scores = cross_validation.cross_val_score(eclf,
X,
y,
cv=5,
scoring='accuracy')
assert_almost_equal(scores.mean(), 0.93, decimal=2)
def test_predict_on_toy_problem():
"""Manually check predicted class labels for toy dataset."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
X = np.array([[-1.1, -1.5],
[-1.2, -1.4],
[-3.4, -2.2],
[1.1, 1.2],
[2.1, 1.4],
[3.1, 2.3]])
y = np.array([1, 1, 1, 2, 2, 2])
assert_equal(all(clf1.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
assert_equal(all(clf2.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
assert_equal(all(clf3.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='hard',
weights=[1, 1, 1])
assert_equal(all(eclf.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[1, 1, 1])
assert_equal(all(eclf.fit(X, y).predict(X)), all([1, 1, 1, 2, 2, 2]))
def test_predict_proba_on_toy_problem():
"""Calculate predicted probabilities on toy dataset."""
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
X = np.array([[-1.1, -1.5], [-1.2, -1.4], [-3.4, -2.2], [1.1, 1.2]])
y = np.array([1, 1, 2, 2])
clf1_res = np.array([[0.59790391, 0.40209609],
[0.57622162, 0.42377838],
[0.50728456, 0.49271544],
[0.40241774, 0.59758226]])
clf2_res = np.array([[0.8, 0.2],
[0.8, 0.2],
[0.2, 0.8],
[0.3, 0.7]])
clf3_res = np.array([[0.9985082, 0.0014918],
[0.99845843, 0.00154157],
[0., 1.],
[0., 1.]])
t00 = (2*clf1_res[0][0] + clf2_res[0][0] + clf3_res[0][0]) / 4
t11 = (2*clf1_res[1][1] + clf2_res[1][1] + clf3_res[1][1]) / 4
t21 = (2*clf1_res[2][1] + clf2_res[2][1] + clf3_res[2][1]) / 4
t31 = (2*clf1_res[3][1] + clf2_res[3][1] + clf3_res[3][1]) / 4
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[2, 1, 1])
eclf_res = eclf.fit(X, y).predict_proba(X)
assert_almost_equal(t00, eclf_res[0][0], decimal=1)
assert_almost_equal(t11, eclf_res[1][1], decimal=1)
assert_almost_equal(t21, eclf_res[2][1], decimal=1)
assert_almost_equal(t31, eclf_res[3][1], decimal=1)
try:
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='hard')
eclf.fit(X, y).predict_proba(X)
except AttributeError:
pass
else:
raise AssertionError('AttributeError for voting == "hard"'
' and with predict_proba not raised')
def test_multilabel():
"""Check if error is raised for multilabel classification."""
X, y = make_multilabel_classification(n_classes=2, n_labels=1,
allow_unlabeled=False,
return_indicator=True,
random_state=123)
clf = OneVsRestClassifier(SVC(kernel='linear'))
eclf = VotingClassifier(estimators=[('ovr', clf)], voting='hard')
try:
eclf.fit(X, y)
except NotImplementedError:
return
def test_gridsearch():
"""Check GridSearch support."""
clf1 = LogisticRegression(random_state=1)
clf2 = RandomForestClassifier(random_state=1)
clf3 = GaussianNB()
eclf = VotingClassifier(estimators=[
('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft')
params = {'lr__C': [1.0, 100.0],
'rf__n_estimators': [20, 200]}
grid = GridSearchCV(estimator=eclf, param_grid=params, cv=5)
grid.fit(iris.data, iris.target)
expect = [0.953, 0.960, 0.960, 0.953]
scores = [mean_score for params, mean_score, scores in grid.grid_scores_]
for e, s in zip(expect, scores):
assert_almost_equal(e, s, decimal=3)
|
crissmoldovan/tisip
|
refs/heads/master
|
iphone/pjsip/src/tests/pjsua/scripts-sendto/410_fmtp_amrnb_offer_octet_align.py
|
42
|
# $Id: 410_fmtp_amrnb_offer_octet_align.py 3664 2011-07-19 03:42:28Z nanang $
import inc_sip as sip
import inc_sdp as sdp
# Answer for codec AMR should contain fmtp octet-align=1
sdp = \
"""
v=0
o=- 3428650655 3428650655 IN IP4 192.168.1.9
s=pjmedia
c=IN IP4 192.168.1.9
t=0 0
a=X-nat:0
m=audio 4000 RTP/AVP 99 101
a=rtcp:4001 IN IP4 192.168.1.9
a=rtpmap:99 AMR/8000
a=fmtp:99 octet-align=1
a=sendrecv
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-15
"""
pjsua_args = "--null-audio --auto-answer 200 --add-codec AMR"
extra_headers = ""
include = ["octet-align=1"] # response must include 'octet-align=1'
exclude = []
sendto_cfg = sip.SendtoCfg("AMR negotiation should response with fmtp 'octet-align=1'", pjsua_args, sdp, 200,
extra_headers=extra_headers,
resp_inc=include, resp_exc=exclude)
|
kuri65536/python-for-android
|
refs/heads/master
|
python-build/python-libs/xmpppy/xmpp/protocol.py
|
199
|
## protocol.py
##
## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
# $Id: protocol.py,v 1.60 2009/04/07 11:14:28 snakeru Exp $
"""
Protocol module contains tools that is needed for processing of
xmpp-related data structures.
"""
from simplexml import Node,ustr
import time
NS_ACTIVITY ='http://jabber.org/protocol/activity' # XEP-0108
NS_ADDRESS ='http://jabber.org/protocol/address' # XEP-0033
NS_ADMIN ='http://jabber.org/protocol/admin' # XEP-0133
NS_ADMIN_ADD_USER =NS_ADMIN+'#add-user' # XEP-0133
NS_ADMIN_DELETE_USER =NS_ADMIN+'#delete-user' # XEP-0133
NS_ADMIN_DISABLE_USER =NS_ADMIN+'#disable-user' # XEP-0133
NS_ADMIN_REENABLE_USER =NS_ADMIN+'#reenable-user' # XEP-0133
NS_ADMIN_END_USER_SESSION =NS_ADMIN+'#end-user-session' # XEP-0133
NS_ADMIN_GET_USER_PASSWORD =NS_ADMIN+'#get-user-password' # XEP-0133
NS_ADMIN_CHANGE_USER_PASSWORD =NS_ADMIN+'#change-user-password' # XEP-0133
NS_ADMIN_GET_USER_ROSTER =NS_ADMIN+'#get-user-roster' # XEP-0133
NS_ADMIN_GET_USER_LASTLOGIN =NS_ADMIN+'#get-user-lastlogin' # XEP-0133
NS_ADMIN_USER_STATS =NS_ADMIN+'#user-stats' # XEP-0133
NS_ADMIN_EDIT_BLACKLIST =NS_ADMIN+'#edit-blacklist' # XEP-0133
NS_ADMIN_EDIT_WHITELIST =NS_ADMIN+'#edit-whitelist' # XEP-0133
NS_ADMIN_REGISTERED_USERS_NUM =NS_ADMIN+'#get-registered-users-num' # XEP-0133
NS_ADMIN_DISABLED_USERS_NUM =NS_ADMIN+'#get-disabled-users-num' # XEP-0133
NS_ADMIN_ONLINE_USERS_NUM =NS_ADMIN+'#get-online-users-num' # XEP-0133
NS_ADMIN_ACTIVE_USERS_NUM =NS_ADMIN+'#get-active-users-num' # XEP-0133
NS_ADMIN_IDLE_USERS_NUM =NS_ADMIN+'#get-idle-users-num' # XEP-0133
NS_ADMIN_REGISTERED_USERS_LIST =NS_ADMIN+'#get-registered-users-list' # XEP-0133
NS_ADMIN_DISABLED_USERS_LIST =NS_ADMIN+'#get-disabled-users-list' # XEP-0133
NS_ADMIN_ONLINE_USERS_LIST =NS_ADMIN+'#get-online-users-list' # XEP-0133
NS_ADMIN_ACTIVE_USERS_LIST =NS_ADMIN+'#get-active-users-list' # XEP-0133
NS_ADMIN_IDLE_USERS_LIST =NS_ADMIN+'#get-idle-users-list' # XEP-0133
NS_ADMIN_ANNOUNCE =NS_ADMIN+'#announce' # XEP-0133
NS_ADMIN_SET_MOTD =NS_ADMIN+'#set-motd' # XEP-0133
NS_ADMIN_EDIT_MOTD =NS_ADMIN+'#edit-motd' # XEP-0133
NS_ADMIN_DELETE_MOTD =NS_ADMIN+'#delete-motd' # XEP-0133
NS_ADMIN_SET_WELCOME =NS_ADMIN+'#set-welcome' # XEP-0133
NS_ADMIN_DELETE_WELCOME =NS_ADMIN+'#delete-welcome' # XEP-0133
NS_ADMIN_EDIT_ADMIN =NS_ADMIN+'#edit-admin' # XEP-0133
NS_ADMIN_RESTART =NS_ADMIN+'#restart' # XEP-0133
NS_ADMIN_SHUTDOWN =NS_ADMIN+'#shutdown' # XEP-0133
NS_AGENTS ='jabber:iq:agents' # XEP-0094 (historical)
NS_AMP ='http://jabber.org/protocol/amp' # XEP-0079
NS_AMP_ERRORS =NS_AMP+'#errors' # XEP-0079
NS_AUTH ='jabber:iq:auth' # XEP-0078
NS_AVATAR ='jabber:iq:avatar' # XEP-0008 (historical)
NS_BIND ='urn:ietf:params:xml:ns:xmpp-bind' # RFC 3920
NS_BROWSE ='jabber:iq:browse' # XEP-0011 (historical)
NS_BYTESTREAM ='http://jabber.org/protocol/bytestreams' # XEP-0065
NS_CAPS ='http://jabber.org/protocol/caps' # XEP-0115
NS_CHATSTATES ='http://jabber.org/protocol/chatstates' # XEP-0085
NS_CLIENT ='jabber:client' # RFC 3921
NS_COMMANDS ='http://jabber.org/protocol/commands' # XEP-0050
NS_COMPONENT_ACCEPT ='jabber:component:accept' # XEP-0114
NS_COMPONENT_1 ='http://jabberd.jabberstudio.org/ns/component/1.0' # Jabberd2
NS_COMPRESS ='http://jabber.org/protocol/compress' # XEP-0138
NS_DATA ='jabber:x:data' # XEP-0004
NS_DATA_LAYOUT ='http://jabber.org/protocol/xdata-layout' # XEP-0141
NS_DATA_VALIDATE ='http://jabber.org/protocol/xdata-validate' # XEP-0122
NS_DELAY ='jabber:x:delay' # XEP-0091 (deprecated)
NS_DIALBACK ='jabber:server:dialback' # RFC 3921
NS_DISCO ='http://jabber.org/protocol/disco' # XEP-0030
NS_DISCO_INFO =NS_DISCO+'#info' # XEP-0030
NS_DISCO_ITEMS =NS_DISCO+'#items' # XEP-0030
NS_ENCRYPTED ='jabber:x:encrypted' # XEP-0027
NS_EVENT ='jabber:x:event' # XEP-0022 (deprecated)
NS_FEATURE ='http://jabber.org/protocol/feature-neg' # XEP-0020
NS_FILE ='http://jabber.org/protocol/si/profile/file-transfer' # XEP-0096
NS_GATEWAY ='jabber:iq:gateway' # XEP-0100
NS_GEOLOC ='http://jabber.org/protocol/geoloc' # XEP-0080
NS_GROUPCHAT ='gc-1.0' # XEP-0045
NS_HTTP_BIND ='http://jabber.org/protocol/httpbind' # XEP-0124
NS_IBB ='http://jabber.org/protocol/ibb' # XEP-0047
NS_INVISIBLE ='presence-invisible' # Jabberd2
NS_IQ ='iq' # Jabberd2
NS_LAST ='jabber:iq:last' # XEP-0012
NS_MESSAGE ='message' # Jabberd2
NS_MOOD ='http://jabber.org/protocol/mood' # XEP-0107
NS_MUC ='http://jabber.org/protocol/muc' # XEP-0045
NS_MUC_ADMIN =NS_MUC+'#admin' # XEP-0045
NS_MUC_OWNER =NS_MUC+'#owner' # XEP-0045
NS_MUC_UNIQUE =NS_MUC+'#unique' # XEP-0045
NS_MUC_USER =NS_MUC+'#user' # XEP-0045
NS_MUC_REGISTER =NS_MUC+'#register' # XEP-0045
NS_MUC_REQUEST =NS_MUC+'#request' # XEP-0045
NS_MUC_ROOMCONFIG =NS_MUC+'#roomconfig' # XEP-0045
NS_MUC_ROOMINFO =NS_MUC+'#roominfo' # XEP-0045
NS_MUC_ROOMS =NS_MUC+'#rooms' # XEP-0045
NS_MUC_TRAFIC =NS_MUC+'#traffic' # XEP-0045
NS_NICK ='http://jabber.org/protocol/nick' # XEP-0172
NS_OFFLINE ='http://jabber.org/protocol/offline' # XEP-0013
NS_PHYSLOC ='http://jabber.org/protocol/physloc' # XEP-0112
NS_PRESENCE ='presence' # Jabberd2
NS_PRIVACY ='jabber:iq:privacy' # RFC 3921
NS_PRIVATE ='jabber:iq:private' # XEP-0049
NS_PUBSUB ='http://jabber.org/protocol/pubsub' # XEP-0060
NS_REGISTER ='jabber:iq:register' # XEP-0077
NS_RC ='http://jabber.org/protocol/rc' # XEP-0146
NS_ROSTER ='jabber:iq:roster' # RFC 3921
NS_ROSTERX ='http://jabber.org/protocol/rosterx' # XEP-0144
NS_RPC ='jabber:iq:rpc' # XEP-0009
NS_SASL ='urn:ietf:params:xml:ns:xmpp-sasl' # RFC 3920
NS_SEARCH ='jabber:iq:search' # XEP-0055
NS_SERVER ='jabber:server' # RFC 3921
NS_SESSION ='urn:ietf:params:xml:ns:xmpp-session' # RFC 3921
NS_SI ='http://jabber.org/protocol/si' # XEP-0096
NS_SI_PUB ='http://jabber.org/protocol/sipub' # XEP-0137
NS_SIGNED ='jabber:x:signed' # XEP-0027
NS_STANZAS ='urn:ietf:params:xml:ns:xmpp-stanzas' # RFC 3920
NS_STREAMS ='http://etherx.jabber.org/streams' # RFC 3920
NS_TIME ='jabber:iq:time' # XEP-0090 (deprecated)
NS_TLS ='urn:ietf:params:xml:ns:xmpp-tls' # RFC 3920
NS_VACATION ='http://jabber.org/protocol/vacation' # XEP-0109
NS_VCARD ='vcard-temp' # XEP-0054
NS_VCARD_UPDATE ='vcard-temp:x:update' # XEP-0153
NS_VERSION ='jabber:iq:version' # XEP-0092
NS_WAITINGLIST ='http://jabber.org/protocol/waitinglist' # XEP-0130
NS_XHTML_IM ='http://jabber.org/protocol/xhtml-im' # XEP-0071
NS_XMPP_STREAMS ='urn:ietf:params:xml:ns:xmpp-streams' # RFC 3920
xmpp_stream_error_conditions="""
bad-format -- -- -- The entity has sent XML that cannot be processed.
bad-namespace-prefix -- -- -- The entity has sent a namespace prefix that is unsupported, or has sent no namespace prefix on an element that requires such a prefix.
conflict -- -- -- The server is closing the active stream for this entity because a new stream has been initiated that conflicts with the existing stream.
connection-timeout -- -- -- The entity has not generated any traffic over the stream for some period of time.
host-gone -- -- -- The value of the 'to' attribute provided by the initiating entity in the stream header corresponds to a hostname that is no longer hosted by the server.
host-unknown -- -- -- The value of the 'to' attribute provided by the initiating entity in the stream header does not correspond to a hostname that is hosted by the server.
improper-addressing -- -- -- A stanza sent between two servers lacks a 'to' or 'from' attribute (or the attribute has no value).
internal-server-error -- -- -- The server has experienced a misconfiguration or an otherwise-undefined internal error that prevents it from servicing the stream.
invalid-from -- cancel -- -- The JID or hostname provided in a 'from' address does not match an authorized JID or validated domain negotiated between servers via SASL or dialback, or between a client and a server via authentication and resource authorization.
invalid-id -- -- -- The stream ID or dialback ID is invalid or does not match an ID previously provided.
invalid-namespace -- -- -- The streams namespace name is something other than "http://etherx.jabber.org/streams" or the dialback namespace name is something other than "jabber:server:dialback".
invalid-xml -- -- -- The entity has sent invalid XML over the stream to a server that performs validation.
not-authorized -- -- -- The entity has attempted to send data before the stream has been authenticated, or otherwise is not authorized to perform an action related to stream negotiation.
policy-violation -- -- -- The entity has violated some local service policy.
remote-connection-failed -- -- -- The server is unable to properly connect to a remote resource that is required for authentication or authorization.
resource-constraint -- -- -- The server lacks the system resources necessary to service the stream.
restricted-xml -- -- -- The entity has attempted to send restricted XML features such as a comment, processing instruction, DTD, entity reference, or unescaped character.
see-other-host -- -- -- The server will not provide service to the initiating entity but is redirecting traffic to another host.
system-shutdown -- -- -- The server is being shut down and all active streams are being closed.
undefined-condition -- -- -- The error condition is not one of those defined by the other conditions in this list.
unsupported-encoding -- -- -- The initiating entity has encoded the stream in an encoding that is not supported by the server.
unsupported-stanza-type -- -- -- The initiating entity has sent a first-level child of the stream that is not supported by the server.
unsupported-version -- -- -- The value of the 'version' attribute provided by the initiating entity in the stream header specifies a version of XMPP that is not supported by the server.
xml-not-well-formed -- -- -- The initiating entity has sent XML that is not well-formed."""
xmpp_stanza_error_conditions="""
bad-request -- 400 -- modify -- The sender has sent XML that is malformed or that cannot be processed.
conflict -- 409 -- cancel -- Access cannot be granted because an existing resource or session exists with the same name or address.
feature-not-implemented -- 501 -- cancel -- The feature requested is not implemented by the recipient or server and therefore cannot be processed.
forbidden -- 403 -- auth -- The requesting entity does not possess the required permissions to perform the action.
gone -- 302 -- modify -- The recipient or server can no longer be contacted at this address.
internal-server-error -- 500 -- wait -- The server could not process the stanza because of a misconfiguration or an otherwise-undefined internal server error.
item-not-found -- 404 -- cancel -- The addressed JID or item requested cannot be found.
jid-malformed -- 400 -- modify -- The value of the 'to' attribute in the sender's stanza does not adhere to the syntax defined in Addressing Scheme.
not-acceptable -- 406 -- cancel -- The recipient or server understands the request but is refusing to process it because it does not meet criteria defined by the recipient or server.
not-allowed -- 405 -- cancel -- The recipient or server does not allow any entity to perform the action.
not-authorized -- 401 -- auth -- The sender must provide proper credentials before being allowed to perform the action, or has provided improper credentials.
payment-required -- 402 -- auth -- The requesting entity is not authorized to access the requested service because payment is required.
recipient-unavailable -- 404 -- wait -- The intended recipient is temporarily unavailable.
redirect -- 302 -- modify -- The recipient or server is redirecting requests for this information to another entity.
registration-required -- 407 -- auth -- The requesting entity is not authorized to access the requested service because registration is required.
remote-server-not-found -- 404 -- cancel -- A remote server or service specified as part or all of the JID of the intended recipient does not exist.
remote-server-timeout -- 504 -- wait -- A remote server or service specified as part or all of the JID of the intended recipient could not be contacted within a reasonable amount of time.
resource-constraint -- 500 -- wait -- The server or recipient lacks the system resources necessary to service the request.
service-unavailable -- 503 -- cancel -- The server or recipient does not currently provide the requested service.
subscription-required -- 407 -- auth -- The requesting entity is not authorized to access the requested service because a subscription is required.
undefined-condition -- 500 -- --
unexpected-request -- 400 -- wait -- The recipient or server understood the request but was not expecting it at this time (e.g., the request was out of order)."""
sasl_error_conditions="""
aborted -- -- -- The receiving entity acknowledges an <abort/> element sent by the initiating entity; sent in reply to the <abort/> element.
incorrect-encoding -- -- -- The data provided by the initiating entity could not be processed because the [BASE64]Josefsson, S., The Base16, Base32, and Base64 Data Encodings, July 2003. encoding is incorrect (e.g., because the encoding does not adhere to the definition in Section 3 of [BASE64]Josefsson, S., The Base16, Base32, and Base64 Data Encodings, July 2003.); sent in reply to a <response/> element or an <auth/> element with initial response data.
invalid-authzid -- -- -- The authzid provided by the initiating entity is invalid, either because it is incorrectly formatted or because the initiating entity does not have permissions to authorize that ID; sent in reply to a <response/> element or an <auth/> element with initial response data.
invalid-mechanism -- -- -- The initiating entity did not provide a mechanism or requested a mechanism that is not supported by the receiving entity; sent in reply to an <auth/> element.
mechanism-too-weak -- -- -- The mechanism requested by the initiating entity is weaker than server policy permits for that initiating entity; sent in reply to a <response/> element or an <auth/> element with initial response data.
not-authorized -- -- -- The authentication failed because the initiating entity did not provide valid credentials (this includes but is not limited to the case of an unknown username); sent in reply to a <response/> element or an <auth/> element with initial response data.
temporary-auth-failure -- -- -- The authentication failed because of a temporary error condition within the receiving entity; sent in reply to an <auth/> element or <response/> element."""
ERRORS,_errorcodes={},{}
for ns,errname,errpool in [(NS_XMPP_STREAMS,'STREAM',xmpp_stream_error_conditions),
(NS_STANZAS ,'ERR' ,xmpp_stanza_error_conditions),
(NS_SASL ,'SASL' ,sasl_error_conditions)]:
for err in errpool.split('\n')[1:]:
cond,code,typ,text=err.split(' -- ')
name=errname+'_'+cond.upper().replace('-','_')
locals()[name]=ns+' '+cond
ERRORS[ns+' '+cond]=[code,typ,text]
if code: _errorcodes[code]=cond
del ns,errname,errpool,err,cond,code,typ,text
def isResultNode(node):
""" Returns true if the node is a positive reply. """
return node and node.getType()=='result'
def isErrorNode(node):
""" Returns true if the node is a negative reply. """
return node and node.getType()=='error'
class NodeProcessed(Exception):
""" Exception that should be raised by handler when the handling should be stopped. """
class StreamError(Exception):
""" Base exception class for stream errors."""
class BadFormat(StreamError): pass
class BadNamespacePrefix(StreamError): pass
class Conflict(StreamError): pass
class ConnectionTimeout(StreamError): pass
class HostGone(StreamError): pass
class HostUnknown(StreamError): pass
class ImproperAddressing(StreamError): pass
class InternalServerError(StreamError): pass
class InvalidFrom(StreamError): pass
class InvalidID(StreamError): pass
class InvalidNamespace(StreamError): pass
class InvalidXML(StreamError): pass
class NotAuthorized(StreamError): pass
class PolicyViolation(StreamError): pass
class RemoteConnectionFailed(StreamError): pass
class ResourceConstraint(StreamError): pass
class RestrictedXML(StreamError): pass
class SeeOtherHost(StreamError): pass
class SystemShutdown(StreamError): pass
class UndefinedCondition(StreamError): pass
class UnsupportedEncoding(StreamError): pass
class UnsupportedStanzaType(StreamError): pass
class UnsupportedVersion(StreamError): pass
class XMLNotWellFormed(StreamError): pass
stream_exceptions = {'bad-format': BadFormat,
'bad-namespace-prefix': BadNamespacePrefix,
'conflict': Conflict,
'connection-timeout': ConnectionTimeout,
'host-gone': HostGone,
'host-unknown': HostUnknown,
'improper-addressing': ImproperAddressing,
'internal-server-error': InternalServerError,
'invalid-from': InvalidFrom,
'invalid-id': InvalidID,
'invalid-namespace': InvalidNamespace,
'invalid-xml': InvalidXML,
'not-authorized': NotAuthorized,
'policy-violation': PolicyViolation,
'remote-connection-failed': RemoteConnectionFailed,
'resource-constraint': ResourceConstraint,
'restricted-xml': RestrictedXML,
'see-other-host': SeeOtherHost,
'system-shutdown': SystemShutdown,
'undefined-condition': UndefinedCondition,
'unsupported-encoding': UnsupportedEncoding,
'unsupported-stanza-type': UnsupportedStanzaType,
'unsupported-version': UnsupportedVersion,
'xml-not-well-formed': XMLNotWellFormed}
class JID:
""" JID object. JID can be built from string, modified, compared, serialised into string. """
def __init__(self, jid=None, node='', domain='', resource=''):
""" Constructor. JID can be specified as string (jid argument) or as separate parts.
Examples:
JID('node@domain/resource')
JID(node='node',domain='domain.org')
"""
if not jid and not domain: raise ValueError('JID must contain at least domain name')
elif type(jid)==type(self): self.node,self.domain,self.resource=jid.node,jid.domain,jid.resource
elif domain: self.node,self.domain,self.resource=node,domain,resource
else:
if jid.find('@')+1: self.node,jid=jid.split('@',1)
else: self.node=''
if jid.find('/')+1: self.domain,self.resource=jid.split('/',1)
else: self.domain,self.resource=jid,''
def getNode(self):
""" Return the node part of the JID """
return self.node
def setNode(self,node):
""" Set the node part of the JID to new value. Specify None to remove the node part."""
self.node=node.lower()
def getDomain(self):
""" Return the domain part of the JID """
return self.domain
def setDomain(self,domain):
""" Set the domain part of the JID to new value."""
self.domain=domain.lower()
def getResource(self):
""" Return the resource part of the JID """
return self.resource
def setResource(self,resource):
""" Set the resource part of the JID to new value. Specify None to remove the resource part."""
self.resource=resource
def getStripped(self):
""" Return the bare representation of JID. I.e. string value w/o resource. """
return self.__str__(0)
def __eq__(self, other):
""" Compare the JID to another instance or to string for equality. """
try: other=JID(other)
except ValueError: return 0
return self.resource==other.resource and self.__str__(0) == other.__str__(0)
def __ne__(self, other):
""" Compare the JID to another instance or to string for non-equality. """
return not self.__eq__(other)
def bareMatch(self, other):
""" Compare the node and domain parts of the JID's for equality. """
return self.__str__(0) == JID(other).__str__(0)
def __str__(self,wresource=1):
""" Serialise JID into string. """
if self.node: jid=self.node+'@'+self.domain
else: jid=self.domain
if wresource and self.resource: return jid+'/'+self.resource
return jid
def __hash__(self):
""" Produce hash of the JID, Allows to use JID objects as keys of the dictionary. """
return hash(self.__str__())
class Protocol(Node):
""" A "stanza" object class. Contains methods that are common for presences, iqs and messages. """
def __init__(self, name=None, to=None, typ=None, frm=None, attrs={}, payload=[], timestamp=None, xmlns=None, node=None):
""" Constructor, name is the name of the stanza i.e. 'message' or 'presence' or 'iq'.
to is the value of 'to' attribure, 'typ' - 'type' attribute
frn - from attribure, attrs - other attributes mapping, payload - same meaning as for simplexml payload definition
timestamp - the time value that needs to be stamped over stanza
xmlns - namespace of top stanza node
node - parsed or unparsed stana to be taken as prototype.
"""
if not attrs: attrs={}
if to: attrs['to']=to
if frm: attrs['from']=frm
if typ: attrs['type']=typ
Node.__init__(self, tag=name, attrs=attrs, payload=payload, node=node)
if not node and xmlns: self.setNamespace(xmlns)
if self['to']: self.setTo(self['to'])
if self['from']: self.setFrom(self['from'])
if node and type(self)==type(node) and self.__class__==node.__class__ and self.attrs.has_key('id'): del self.attrs['id']
self.timestamp=None
for x in self.getTags('x',namespace=NS_DELAY):
try:
if not self.getTimestamp() or x.getAttr('stamp')<self.getTimestamp(): self.setTimestamp(x.getAttr('stamp'))
except: pass
if timestamp is not None: self.setTimestamp(timestamp) # To auto-timestamp stanza just pass timestamp=''
def getTo(self):
""" Return value of the 'to' attribute. """
try: return self['to']
except: return None
def getFrom(self):
""" Return value of the 'from' attribute. """
try: return self['from']
except: return None
def getTimestamp(self):
""" Return the timestamp in the 'yyyymmddThhmmss' format. """
return self.timestamp
def getID(self):
""" Return the value of the 'id' attribute. """
return self.getAttr('id')
def setTo(self,val):
""" Set the value of the 'to' attribute. """
self.setAttr('to', JID(val))
def getType(self):
""" Return the value of the 'type' attribute. """
return self.getAttr('type')
def setFrom(self,val):
""" Set the value of the 'from' attribute. """
self.setAttr('from', JID(val))
def setType(self,val):
""" Set the value of the 'type' attribute. """
self.setAttr('type', val)
def setID(self,val):
""" Set the value of the 'id' attribute. """
self.setAttr('id', val)
def getError(self):
""" Return the error-condition (if present) or the textual description of the error (otherwise). """
errtag=self.getTag('error')
if errtag:
for tag in errtag.getChildren():
if tag.getName()<>'text': return tag.getName()
return errtag.getData()
def getErrorCode(self):
""" Return the error code. Obsolette. """
return self.getTagAttr('error','code')
def setError(self,error,code=None):
""" Set the error code. Obsolette. Use error-conditions instead. """
if code:
if str(code) in _errorcodes.keys(): error=ErrorNode(_errorcodes[str(code)],text=error)
else: error=ErrorNode(ERR_UNDEFINED_CONDITION,code=code,typ='cancel',text=error)
elif type(error) in [type(''),type(u'')]: error=ErrorNode(error)
self.setType('error')
self.addChild(node=error)
def setTimestamp(self,val=None):
"""Set the timestamp. timestamp should be the yyyymmddThhmmss string."""
if not val: val=time.strftime('%Y%m%dT%H:%M:%S', time.gmtime())
self.timestamp=val
self.setTag('x',{'stamp':self.timestamp},namespace=NS_DELAY)
def getProperties(self):
""" Return the list of namespaces to which belongs the direct childs of element"""
props=[]
for child in self.getChildren():
prop=child.getNamespace()
if prop not in props: props.append(prop)
return props
def __setitem__(self,item,val):
""" Set the item 'item' to the value 'val'."""
if item in ['to','from']: val=JID(val)
return self.setAttr(item,val)
class Message(Protocol):
""" XMPP Message stanza - "push" mechanism."""
def __init__(self, to=None, body=None, typ=None, subject=None, attrs={}, frm=None, payload=[], timestamp=None, xmlns=NS_CLIENT, node=None):
""" Create message object. You can specify recipient, text of message, type of message
any additional attributes, sender of the message, any additional payload (f.e. jabber:x:delay element) and namespace in one go.
Alternatively you can pass in the other XML object as the 'node' parameted to replicate it as message. """
Protocol.__init__(self, 'message', to=to, typ=typ, attrs=attrs, frm=frm, payload=payload, timestamp=timestamp, xmlns=xmlns, node=node)
if body: self.setBody(body)
if subject: self.setSubject(subject)
def getBody(self):
""" Returns text of the message. """
return self.getTagData('body')
def getSubject(self):
""" Returns subject of the message. """
return self.getTagData('subject')
def getThread(self):
""" Returns thread of the message. """
return self.getTagData('thread')
def setBody(self,val):
""" Sets the text of the message. """
self.setTagData('body',val)
def setSubject(self,val):
""" Sets the subject of the message. """
self.setTagData('subject',val)
def setThread(self,val):
""" Sets the thread of the message. """
self.setTagData('thread',val)
def buildReply(self,text=None):
""" Builds and returns another message object with specified text.
The to, from and thread properties of new message are pre-set as reply to this message. """
m=Message(to=self.getFrom(),frm=self.getTo(),body=text)
th=self.getThread()
if th: m.setThread(th)
return m
class Presence(Protocol):
""" XMPP Presence object."""
def __init__(self, to=None, typ=None, priority=None, show=None, status=None, attrs={}, frm=None, timestamp=None, payload=[], xmlns=NS_CLIENT, node=None):
""" Create presence object. You can specify recipient, type of message, priority, show and status values
any additional attributes, sender of the presence, timestamp, any additional payload (f.e. jabber:x:delay element) and namespace in one go.
Alternatively you can pass in the other XML object as the 'node' parameted to replicate it as presence. """
Protocol.__init__(self, 'presence', to=to, typ=typ, attrs=attrs, frm=frm, payload=payload, timestamp=timestamp, xmlns=xmlns, node=node)
if priority: self.setPriority(priority)
if show: self.setShow(show)
if status: self.setStatus(status)
def getPriority(self):
""" Returns the priority of the message. """
return self.getTagData('priority')
def getShow(self):
""" Returns the show value of the message. """
return self.getTagData('show')
def getStatus(self):
""" Returns the status string of the message. """
return self.getTagData('status')
def setPriority(self,val):
""" Sets the priority of the message. """
self.setTagData('priority',val)
def setShow(self,val):
""" Sets the show value of the message. """
self.setTagData('show',val)
def setStatus(self,val):
""" Sets the status string of the message. """
self.setTagData('status',val)
def _muc_getItemAttr(self,tag,attr):
for xtag in self.getTags('x'):
for child in xtag.getTags(tag):
return child.getAttr(attr)
def _muc_getSubTagDataAttr(self,tag,attr):
for xtag in self.getTags('x'):
for child in xtag.getTags('item'):
for cchild in child.getTags(tag):
return cchild.getData(),cchild.getAttr(attr)
return None,None
def getRole(self):
"""Returns the presence role (for groupchat)"""
return self._muc_getItemAttr('item','role')
def getAffiliation(self):
"""Returns the presence affiliation (for groupchat)"""
return self._muc_getItemAttr('item','affiliation')
def getNick(self):
"""Returns the nick value (for nick change in groupchat)"""
return self._muc_getItemAttr('item','nick')
def getJid(self):
"""Returns the presence jid (for groupchat)"""
return self._muc_getItemAttr('item','jid')
def getReason(self):
"""Returns the reason of the presence (for groupchat)"""
return self._muc_getSubTagDataAttr('reason','')[0]
def getActor(self):
"""Returns the reason of the presence (for groupchat)"""
return self._muc_getSubTagDataAttr('actor','jid')[1]
def getStatusCode(self):
"""Returns the status code of the presence (for groupchat)"""
return self._muc_getItemAttr('status','code')
class Iq(Protocol):
""" XMPP Iq object - get/set dialog mechanism. """
def __init__(self, typ=None, queryNS=None, attrs={}, to=None, frm=None, payload=[], xmlns=NS_CLIENT, node=None):
""" Create Iq object. You can specify type, query namespace
any additional attributes, recipient of the iq, sender of the iq, any additional payload (f.e. jabber:x:data node) and namespace in one go.
Alternatively you can pass in the other XML object as the 'node' parameted to replicate it as an iq. """
Protocol.__init__(self, 'iq', to=to, typ=typ, attrs=attrs, frm=frm, xmlns=xmlns, node=node)
if payload: self.setQueryPayload(payload)
if queryNS: self.setQueryNS(queryNS)
def getQueryNS(self):
""" Return the namespace of the 'query' child element."""
tag=self.getTag('query')
if tag: return tag.getNamespace()
def getQuerynode(self):
""" Return the 'node' attribute value of the 'query' child element."""
return self.getTagAttr('query','node')
def getQueryPayload(self):
""" Return the 'query' child element payload."""
tag=self.getTag('query')
if tag: return tag.getPayload()
def getQueryChildren(self):
""" Return the 'query' child element child nodes."""
tag=self.getTag('query')
if tag: return tag.getChildren()
def setQueryNS(self,namespace):
""" Set the namespace of the 'query' child element."""
self.setTag('query').setNamespace(namespace)
def setQueryPayload(self,payload):
""" Set the 'query' child element payload."""
self.setTag('query').setPayload(payload)
def setQuerynode(self,node):
""" Set the 'node' attribute value of the 'query' child element."""
self.setTagAttr('query','node',node)
def buildReply(self,typ):
""" Builds and returns another Iq object of specified type.
The to, from and query child node of new Iq are pre-set as reply to this Iq. """
iq=Iq(typ,to=self.getFrom(),frm=self.getTo(),attrs={'id':self.getID()})
if self.getTag('query'): iq.setQueryNS(self.getQueryNS())
return iq
class ErrorNode(Node):
""" XMPP-style error element.
In the case of stanza error should be attached to XMPP stanza.
In the case of stream-level errors should be used separately. """
def __init__(self,name,code=None,typ=None,text=None):
""" Create new error node object.
Mandatory parameter: name - name of error condition.
Optional parameters: code, typ, text. Used for backwards compartibility with older jabber protocol."""
if ERRORS.has_key(name):
cod,type,txt=ERRORS[name]
ns=name.split()[0]
else: cod,ns,type,txt='500',NS_STANZAS,'cancel',''
if typ: type=typ
if code: cod=code
if text: txt=text
Node.__init__(self,'error',{},[Node(name)])
if type: self.setAttr('type',type)
if not cod: self.setName('stream:error')
if txt: self.addChild(node=Node(ns+' text',{},[txt]))
if cod: self.setAttr('code',cod)
class Error(Protocol):
""" Used to quickly transform received stanza into error reply."""
def __init__(self,node,error,reply=1):
""" Create error reply basing on the received 'node' stanza and the 'error' error condition.
If the 'node' is not the received stanza but locally created ('to' and 'from' fields needs not swapping)
specify the 'reply' argument as false."""
if reply: Protocol.__init__(self,to=node.getFrom(),frm=node.getTo(),node=node)
else: Protocol.__init__(self,node=node)
self.setError(error)
if node.getType()=='error': self.__str__=self.__dupstr__
def __dupstr__(self,dup1=None,dup2=None):
""" Dummy function used as preventor of creating error node in reply to error node.
I.e. you will not be able to serialise "double" error into string.
"""
return ''
class DataField(Node):
""" This class is used in the DataForm class to describe the single data item.
If you are working with jabber:x:data (XEP-0004, XEP-0068, XEP-0122)
then you will need to work with instances of this class. """
def __init__(self,name=None,value=None,typ=None,required=0,label=None,desc=None,options=[],node=None):
""" Create new data field of specified name,value and type.
Also 'required','desc' and 'options' fields can be set.
Alternatively other XML object can be passed in as the 'node' parameted to replicate it as a new datafiled.
"""
Node.__init__(self,'field',node=node)
if name: self.setVar(name)
if type(value) in [list,tuple]: self.setValues(value)
elif value: self.setValue(value)
if typ: self.setType(typ)
elif not typ and not node: self.setType('text-single')
if required: self.setRequired(required)
if label: self.setLabel(label)
if desc: self.setDesc(desc)
if options: self.setOptions(options)
def setRequired(self,req=1):
""" Change the state of the 'required' flag. """
if req: self.setTag('required')
else:
try: self.delChild('required')
except ValueError: return
def isRequired(self):
""" Returns in this field a required one. """
return self.getTag('required')
def setLabel(self,label):
""" Set the label of this field. """
self.setAttr('label',label)
def getLabel(self):
""" Return the label of this field. """
return self.getAttr('label')
def setDesc(self,desc):
""" Set the description of this field. """
self.setTagData('desc',desc)
def getDesc(self):
""" Return the description of this field. """
return self.getTagData('desc')
def setValue(self,val):
""" Set the value of this field. """
self.setTagData('value',val)
def getValue(self):
return self.getTagData('value')
def setValues(self,lst):
""" Set the values of this field as values-list.
Replaces all previous filed values! If you need to just add a value - use addValue method."""
while self.getTag('value'): self.delChild('value')
for val in lst: self.addValue(val)
def addValue(self,val):
""" Add one more value to this field. Used in 'get' iq's or such."""
self.addChild('value',{},[val])
def getValues(self):
""" Return the list of values associated with this field."""
ret=[]
for tag in self.getTags('value'): ret.append(tag.getData())
return ret
def getOptions(self):
""" Return label-option pairs list associated with this field."""
ret=[]
for tag in self.getTags('option'): ret.append([tag.getAttr('label'),tag.getTagData('value')])
return ret
def setOptions(self,lst):
""" Set label-option pairs list associated with this field."""
while self.getTag('option'): self.delChild('option')
for opt in lst: self.addOption(opt)
def addOption(self,opt):
""" Add one more label-option pair to this field."""
if type(opt) in [str,unicode]: self.addChild('option').setTagData('value',opt)
else: self.addChild('option',{'label':opt[0]}).setTagData('value',opt[1])
def getType(self):
""" Get type of this field. """
return self.getAttr('type')
def setType(self,val):
""" Set type of this field. """
return self.setAttr('type',val)
def getVar(self):
""" Get 'var' attribute value of this field. """
return self.getAttr('var')
def setVar(self,val):
""" Set 'var' attribute value of this field. """
return self.setAttr('var',val)
class DataReported(Node):
""" This class is used in the DataForm class to describe the 'reported data field' data items which are used in
'multiple item form results' (as described in XEP-0004).
Represents the fields that will be returned from a search. This information is useful when
you try to use the jabber:iq:search namespace to return dynamic form information.
"""
def __init__(self,node=None):
""" Create new empty 'reported data' field. However, note that, according XEP-0004:
* It MUST contain one or more DataFields.
* Contained DataFields SHOULD possess a 'type' and 'label' attribute in addition to 'var' attribute
* Contained DataFields SHOULD NOT contain a <value/> element.
Alternatively other XML object can be passed in as the 'node' parameted to replicate it as a new
dataitem.
"""
Node.__init__(self,'reported',node=node)
if node:
newkids=[]
for n in self.getChildren():
if n.getName()=='field': newkids.append(DataField(node=n))
else: newkids.append(n)
self.kids=newkids
def getField(self,name):
""" Return the datafield object with name 'name' (if exists). """
return self.getTag('field',attrs={'var':name})
def setField(self,name,typ=None,label=None):
""" Create if nessessary or get the existing datafield object with name 'name' and return it.
If created, attributes 'type' and 'label' are applied to new datafield."""
f=self.getField(name)
if f: return f
return self.addChild(node=DataField(name,None,typ,0,label))
def asDict(self):
""" Represent dataitem as simple dictionary mapping of datafield names to their values."""
ret={}
for field in self.getTags('field'):
name=field.getAttr('var')
typ=field.getType()
if isinstance(typ,(str,unicode)) and typ[-6:]=='-multi':
val=[]
for i in field.getTags('value'): val.append(i.getData())
else: val=field.getTagData('value')
ret[name]=val
if self.getTag('instructions'): ret['instructions']=self.getInstructions()
return ret
def __getitem__(self,name):
""" Simple dictionary interface for getting datafields values by their names."""
item=self.getField(name)
if item: return item.getValue()
raise IndexError('No such field')
def __setitem__(self,name,val):
""" Simple dictionary interface for setting datafields values by their names."""
return self.setField(name).setValue(val)
class DataItem(Node):
""" This class is used in the DataForm class to describe data items which are used in 'multiple
item form results' (as described in XEP-0004).
"""
def __init__(self,node=None):
""" Create new empty data item. However, note that, according XEP-0004, DataItem MUST contain ALL
DataFields described in DataReported.
Alternatively other XML object can be passed in as the 'node' parameted to replicate it as a new
dataitem.
"""
Node.__init__(self,'item',node=node)
if node:
newkids=[]
for n in self.getChildren():
if n.getName()=='field': newkids.append(DataField(node=n))
else: newkids.append(n)
self.kids=newkids
def getField(self,name):
""" Return the datafield object with name 'name' (if exists). """
return self.getTag('field',attrs={'var':name})
def setField(self,name):
""" Create if nessessary or get the existing datafield object with name 'name' and return it. """
f=self.getField(name)
if f: return f
return self.addChild(node=DataField(name))
def asDict(self):
""" Represent dataitem as simple dictionary mapping of datafield names to their values."""
ret={}
for field in self.getTags('field'):
name=field.getAttr('var')
typ=field.getType()
if isinstance(typ,(str,unicode)) and typ[-6:]=='-multi':
val=[]
for i in field.getTags('value'): val.append(i.getData())
else: val=field.getTagData('value')
ret[name]=val
if self.getTag('instructions'): ret['instructions']=self.getInstructions()
return ret
def __getitem__(self,name):
""" Simple dictionary interface for getting datafields values by their names."""
item=self.getField(name)
if item: return item.getValue()
raise IndexError('No such field')
def __setitem__(self,name,val):
""" Simple dictionary interface for setting datafields values by their names."""
return self.setField(name).setValue(val)
class DataForm(Node):
""" DataForm class. Used for manipulating dataforms in XMPP.
Relevant XEPs: 0004, 0068, 0122.
Can be used in disco, pub-sub and many other applications."""
def __init__(self, typ=None, data=[], title=None, node=None):
"""
Create new dataform of type 'typ'; 'data' is the list of DataReported,
DataItem and DataField instances that this dataform contains; 'title'
is the title string.
You can specify the 'node' argument as the other node to be used as
base for constructing this dataform.
title and instructions is optional and SHOULD NOT contain newlines.
Several instructions MAY be present.
'typ' can be one of ('form' | 'submit' | 'cancel' | 'result' )
'typ' of reply iq can be ( 'result' | 'set' | 'set' | 'result' ) respectively.
'cancel' form can not contain any fields. All other forms contains AT LEAST one field.
'title' MAY be included in forms of type "form" and "result"
"""
Node.__init__(self,'x',node=node)
if node:
newkids=[]
for n in self.getChildren():
if n.getName()=='field': newkids.append(DataField(node=n))
elif n.getName()=='item': newkids.append(DataItem(node=n))
elif n.getName()=='reported': newkids.append(DataReported(node=n))
else: newkids.append(n)
self.kids=newkids
if typ: self.setType(typ)
self.setNamespace(NS_DATA)
if title: self.setTitle(title)
if type(data)==type({}):
newdata=[]
for name in data.keys(): newdata.append(DataField(name,data[name]))
data=newdata
for child in data:
if type(child) in [type(''),type(u'')]: self.addInstructions(child)
elif child.__class__.__name__=='DataField': self.kids.append(child)
elif child.__class__.__name__=='DataItem': self.kids.append(child)
elif child.__class__.__name__=='DataReported': self.kids.append(child)
else: self.kids.append(DataField(node=child))
def getType(self):
""" Return the type of dataform. """
return self.getAttr('type')
def setType(self,typ):
""" Set the type of dataform. """
self.setAttr('type',typ)
def getTitle(self):
""" Return the title of dataform. """
return self.getTagData('title')
def setTitle(self,text):
""" Set the title of dataform. """
self.setTagData('title',text)
def getInstructions(self):
""" Return the instructions of dataform. """
return self.getTagData('instructions')
def setInstructions(self,text):
""" Set the instructions of dataform. """
self.setTagData('instructions',text)
def addInstructions(self,text):
""" Add one more instruction to the dataform. """
self.addChild('instructions',{},[text])
def getField(self,name):
""" Return the datafield object with name 'name' (if exists). """
return self.getTag('field',attrs={'var':name})
def setField(self,name):
""" Create if nessessary or get the existing datafield object with name 'name' and return it. """
f=self.getField(name)
if f: return f
return self.addChild(node=DataField(name))
def asDict(self):
""" Represent dataform as simple dictionary mapping of datafield names to their values."""
ret={}
for field in self.getTags('field'):
name=field.getAttr('var')
typ=field.getType()
if isinstance(typ,(str,unicode)) and typ[-6:]=='-multi':
val=[]
for i in field.getTags('value'): val.append(i.getData())
else: val=field.getTagData('value')
ret[name]=val
if self.getTag('instructions'): ret['instructions']=self.getInstructions()
return ret
def __getitem__(self,name):
""" Simple dictionary interface for getting datafields values by their names."""
item=self.getField(name)
if item: return item.getValue()
raise IndexError('No such field')
def __setitem__(self,name,val):
""" Simple dictionary interface for setting datafields values by their names."""
return self.setField(name).setValue(val)
|
numerigraphe/odoo
|
refs/heads/8.0
|
addons/stock_dropshipping/stock_dropshipping.py
|
46
|
# coding: utf-8
from openerp import models, api, _
from openerp.exceptions import Warning
class sale_order_line(models.Model):
_inherit = 'sale.order.line'
@api.multi
def _check_routing(self, product, warehouse):
""" skip stock verification if the route goes from supplier to customer
As the product never goes in stock, no need to verify it's availibility
"""
res = super(sale_order_line, self)._check_routing(product, warehouse)
if not res:
for line in self:
for pull_rule in line.route_id.pull_ids:
if (pull_rule.picking_type_id.default_location_src_id.usage == 'supplier' and
pull_rule.picking_type_id.default_location_dest_id.usage == 'customer'):
res = True
break
return res
class purchase_order(models.Model):
_inherit = 'purchase.order'
@api.one
def _check_invoice_policy(self):
if self.invoice_method == 'picking' and self.location_id.usage == 'customer':
for proc in self.order_line.mapped('procurement_ids'):
if proc.sale_line_id.order_id.order_policy == 'picking':
raise Warning(_('In the case of a dropship route, it is not possible to have an invoicing control set on "Based on incoming shipments" and a sale order with an invoice creation on "On Delivery Order"'))
@api.multi
def wkf_confirm_order(self):
""" Raise a warning to forbid to have both purchase and sale invoices
policies at delivery in dropshipping. As it is not implemented.
This check can be disabled setting 'no_invoice_policy_check' in context
"""
if not self.env.context.get('no_invoice_policy_check'):
self._check_invoice_policy()
super(purchase_order, self).wkf_confirm_order()
|
nmrugg/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/footyroom.py
|
104
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class FootyRoomIE(InfoExtractor):
_VALID_URL = r'http://footyroom\.com/(?P<id>[^/]+)'
_TESTS = [{
'url': 'http://footyroom.com/schalke-04-0-2-real-madrid-2015-02/',
'info_dict': {
'id': 'schalke-04-0-2-real-madrid-2015-02',
'title': 'Schalke 04 0 – 2 Real Madrid',
},
'playlist_count': 3,
}, {
'url': 'http://footyroom.com/georgia-0-2-germany-2015-03/',
'info_dict': {
'id': 'georgia-0-2-germany-2015-03',
'title': 'Georgia 0 – 2 Germany',
},
'playlist_count': 1,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
playlist = self._parse_json(
self._search_regex(
r'VideoSelector\.load\((\[.+?\])\);', webpage, 'video selector'),
playlist_id)
playlist_title = self._og_search_title(webpage)
entries = []
for video in playlist:
payload = video.get('payload')
if not payload:
continue
playwire_url = self._search_regex(
r'data-config="([^"]+)"', payload,
'playwire url', default=None)
if playwire_url:
entries.append(self.url_result(self._proto_relative_url(
playwire_url, 'http:'), 'Playwire'))
return self.playlist_result(entries, playlist_id, playlist_title)
|
jobovy/tact
|
refs/heads/master
|
aa/genfunc/const_energy_surface.py
|
1
|
import numpy as np
import matplotlib.pyplot as plt
from scipy.integrate import odeint
from matplotlib.ticker import MaxNLocator
import matplotlib.cm as cm
from genfunc_3d import find_actions, check_angle_solution
from scipy.optimize import brentq
import test_potentials as pot
import solver
# For plot used YMax = 18, theta_min = 0.05, 10 by 10 , t_max=10
LM = pot.LMPot()
def rotate_coords(X):
xy = LM.invcoordrot(X[0],X[1])
vxvy = LM.invcoordrot(X[3],X[4])
return np.array([xy[0],xy[1],X[2],vxvy[0],vxvy[1],X[5]])
def unrotate_coords(X):
xy = LM.coordrot(X[0],X[1])
vxvy = LM.coordrot(X[3],X[4])
return np.array([xy[0],xy[1],X[2],vxvy[0],vxvy[1],X[5]])
zlittle = 0.05
YMax = 18.
xymax = LM.coordrot(0.,YMax)
y = np.linspace(0.2,YMax-0.2,50)
E_cons = LM.tot_pot(xymax[0],xymax[1],zlittle)
# print(E_cons)
timeseries=np.linspace(0.,10.,1000)
# timeseries_long=np.linspace(0.,50.,500)
from solver import check_each_direction as ced
from solver import unroll_angles as ua
from genfunc_3d import assess_angmom, eval_mean_error_functions
for i in y:
# i = 8.11111111111
# i = 1.27755102041
xy = LM.coordrot(0.,i)
Phi = LM.tot_pot(xy[0],xy[1],zlittle)
pylittle = 0.01*np.sqrt(2.*(E_cons-Phi))
P = np.sqrt(2.*(E_cons-Phi)-pylittle**2)
for NN,theta in enumerate(np.linspace(0.05,np.pi/2.-0.05,50)):
px = P*np.cos(theta)
pz = P*np.sin(theta)
# px,pz = 116.216249287,200.519902867
# px,pz = 156.389296517, 471.688963618
pxpy = LM.coordrot(px,pylittle)
initial = np.array([xy[0],xy[1],zlittle,pxpy[0],pxpy[1],pz])
results = np.array([rotate_coords(p) for p in odeint(pot.orbit_derivs2,initial,timeseries,args=(LM,),rtol=1e-10,atol=1e-10)])
# results_long = np.array([rotate_coords(p) for p in odeint(pot.orbit_derivs2,initial,timeseries_long,args=(LM,),rtol=1e-5,atol=1e-5)])
# loop = assess_angmom(results_long)
# LL = np.any(loop>0)
# BOX = not LL
L = find_actions(results, timeseries, N_matrix = 6, ifloop=True,ifprint = False)
if(L==None):
print LM.H(initial),i,px,pz,loop
continue
(act,ang,n_vec,toy_aa,para),loop = L
# print(eval_mean_error_functions(act,ang,n_vec,toy_aa,timeseries,withplot=True))
checks,maxgap = ced(n_vec,ua(toy_aa.T[3:].T,np.ones(3)))
TMAX = 1000
counter = 0
while(len(checks)>0 and counter<10):
# print(checks)
if(np.any(maxgap<0.)):
TMAX=TMAX*2.
results = np.array([rotate_coords(p) for p in odeint(pot.orbit_derivs2,initial,np.linspace(0.,10.,TMAX),args=(LM,),rtol=1e-10,atol=1e-10)])
L = find_actions(results, np.linspace(0.,10.,TMAX), N_matrix = 6, ifloop=True,ifprint = False)
print("Finer sampling")
else:
results2 = np.array([rotate_coords(p) for p in odeint(pot.orbit_derivs2,unrotate_coords(results[-1]),timeseries,args=(LM,),rtol=1e-10,atol=1e-10)])
results = np.vstack((results,results2))
print("Longer integration")
# print(results.T[-1])
L = find_actions(results, np.linspace(0.,len(results)/100.,len(results)), N_matrix = 6, ifloop=True,ifprint = False)
# print(len(results))
(act,ang,n_vec,toy_aa,para),loop = L
print(len(results),loop)
checks,maxgap = ced(n_vec,ua(toy_aa.T[3:].T,np.ones(3)))
counter=counter+1
# if(counter==10):
# continue
# print(ang)
minfreq = np.min(np.abs(ang[3:6]))
if(8.*2.*np.pi/minfreq>10.):
print(i,px,pz,"Fewer than 8 fundamental periods")
T = np.linspace(0.,len(results)/100.,len(results))
errors = eval_mean_error_functions(act,ang,n_vec,toy_aa,T,withplot=False)/np.sqrt(len(T))
print LM.H(initial),i,px,pz,act[0],act[1],act[2],' '.join(map(str, loop)),ang[3],ang[4],ang[5], errors[0],errors[1], LM.H(unrotate_coords(results[-1]))
# check_angle_solution(ang,n_vec,toy_aa,timeseries)
# plt.plot(toy_aa.T[3],toy_aa.T[4],'.')
# plt.show()
# plt.plot(toy_aa.T[3],toy_aa.T[5],'.')
# plt.show()
# plt.plot(results.T[0],results.T[1],'.');plt.show()
# F = pot.orbit_integrate(initial,80.,LM)
# results = np.array([rotate_coords(p) for p in F[0]])
# print(len(results))
# plt.plot(results.T[0],results.T[1])
# plt.show()
# timeseries = F[1]
# plt.plot(np.sqrt(results.T[0]**2+results.T[1]**2),results.T[2])
# plt.show()
# timeseries=np.linspace(0.,15.,500)
# results = odeint(pot.orbit_derivs,np.array([1.21,1.,0.6,200.,200.,200.]),timeseries,args=(LM,))
# print LM.H(results[0])
# plt.plot(results.T[0],results.T[1])
# plt.show()
|
kevenli/scrapydd
|
refs/heads/master
|
tests/handlers/test_webui.py
|
1
|
from os import path
from io import BytesIO
from six.moves.urllib.parse import urlencode
from scrapydd.storage import ProjectStorage
from scrapydd.models import session_scope, Project, Spider
from scrapydd.poster.encode import multipart_encode
from scrapydd.schedule import JOB_STATUS_SUCCESS
from ..base import AppTest
class TestDeleteProjectHandler(AppTest):
def test_post(self):
project_name = 'test_project'
self._upload_test_project()
with session_scope() as session:
project = session.query(Project).filter_by(name=project_name).first()
project_storage = ProjectStorage(self._app.settings.get('project_storage_dir'), project)
self.assertTrue(path.exists(project_storage.storage_provider.get_project_eggs_dir(project)))
headers = {'Cookie': "_xsrf=dummy"}
post_data = {'_xsrf': 'dummy'}
res = self.fetch('/projects/%s/delete' % project.id, method="POST", headers=headers,
body=urlencode(post_data))
self.assertEqual(200, res.code)
# do not delete folder
# self.assertFalse(path.exists(project_storage.storage_provider.get_project_eggs_dir(project)))
self.assertEqual(len(project_storage.list_egg_versions()), 0)
self.assertIsNone(session.query(Project).filter_by(name=project_name).first())
self.assertEqual(0, len(session.query(Spider).filter_by(project_id=project.id).all()))
def test_post_with_triggers(self):
project_name = 'test_project'
spider_name = 'error_spider'
self._upload_test_project()
headers = {'Cookie': "_xsrf=dummy"}
with session_scope() as session:
project = session.query(Project)\
.filter_by(name=project_name)\
.first()
spider = list(filter(lambda x: x.name==spider_name, project.spiders))[0]
post_data = {'_xsrf': 'dummy', 'cron': '0 0 0 0 0'}
res = self.fetch('/projects/%s/spiders/%s/triggers' % (project.id,
spider.id),
method='POST',
headers=headers,
body=urlencode(post_data))
self.assertEqual(200, res.code)
post_data = {'_xsrf': 'dummy'}
res = self.fetch('/projects/%s/delete' % project.id,
method="POST",
headers=headers,
body=urlencode(post_data))
self.assertEqual(200, res.code)
class RunSpiderHandlerTest(AppTest):
def init_project(self, project_name):
with session_scope() as session:
project = session.query(Project).filter_by(name=project_name).first()
if project:
self.project_manager.delete_project('', project.id)
self.project = AppTest.init_project()
def test_post_insecure(self):
project_name = 'test_project'
spider_name = 'error_spider'
url = '/projects/%s/spiders/%s/run' % (project_name, spider_name)
res = self.fetch(url, method='POST', body=b'')
self.assertEqual(403, res.code)
def test_post(self):
project_name = 'test_project'
spider_name = 'error_spider'
self.init_project(project_name)
with session_scope() as session:
project = session.query(Project).get(self.project.id)
spider = list(filter(lambda x: x.name == spider_name, project.spiders))[0]
url = '/projects/%s/spiders/%s/run' % (self.project.id, spider.id)
headers = {'Cookie': "_xsrf=dummy"}
post_data = {'_xsrf': 'dummy'}
res = self.fetch(url, method='POST', headers=headers, body=urlencode(post_data))
self.assertEqual(200, res.code)
def test_post_no_project(self):
project_name = 'PROJECT_NOT_EXIST'
spider_name = 'error_spider'
self.init_project(project_name)
url = '/projects/%s/spiders/%s/run' % (project_name, spider_name)
headers = {'Cookie': "_xsrf=dummy"}
post_data = {'_xsrf': 'dummy'}
res = self.fetch(url, method='POST', headers=headers, body=urlencode(post_data))
self.assertEqual(404, res.code)
def test_post_no_spider(self):
project_name = 'test_project'
spider_name = 'SPIDER_NOT_EXIST'
self.init_project(project_name)
url = '/projects/%s/spiders/%s/run' % (project_name, spider_name)
headers = {'Cookie': "_xsrf=dummy"}
post_data = {'_xsrf': 'dummy'}
res = self.fetch(url, method='POST', headers=headers, body=urlencode(post_data))
self.assertEqual(404, res.code)
class ProjectSettingsHandlerTest(AppTest):
def test_get(self):
project_name = 'test_project'
self.init_project()
url = '/projects/%s/settings' % (self.project.id, )
res = self.fetch(url, method='GET')
self.assertEqual(200, res.code)
class UploadProjectTest(AppTest):
def test_get(self):
url = '/uploadproject'
res = self.fetch(url, method='GET')
self.assertEqual(200, res.code)
def test_post(self):
project_name = 'test_project'
post_data = {}
post_data['egg'] = open(path.join(path.dirname(__file__), '..', 'test_project-1.0-py2.7.egg'), 'rb')
post_data['project'] = project_name
post_data['version'] = '1.0'
post_data['_xsrf'] = 'dummy'
datagen, headers = multipart_encode(post_data)
databuffer = b''.join(datagen)
headers['Cookie'] = "_xsrf=dummy"
response = self.fetch('/uploadproject', method='POST', headers=headers, body=databuffer)
self.assertEqual(200, response.code)
with session_scope() as session:
project = session.query(Project).filter_by(name=project_name).first()
self.assertIsNotNone(project)
self.assertEqual(project.name, project_name)
class ItemsFileHandlerTest(AppTest):
def test_get(self):
project = self.init_project()
with session_scope() as session:
project = session.query(Project).get(self.project.id)
spider = list(filter(lambda x: x.name == 'error_spider', project.spiders))[0]
project_id = None
spider_id = None
job_id = None
job = self.scheduler_manager.add_spider_task(session, spider)
items_content = b'{"id":"123"}'
items_file = BytesIO(items_content)
self.scheduler_manager.jobs_running(1, [job.id])
job.status = JOB_STATUS_SUCCESS
self.scheduler_manager.job_finished(job, items_file=items_file)
response = self.fetch('/items/%s/%s/%s.jl' % (project.id, spider.id, job.id))
self.assertEqual(200, response.code)
|
Phoenix-CJ23/stockkernel
|
refs/heads/master
|
tools/perf/scripts/python/sctop.py
|
11180
|
# system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
AmrThabet/CouchPotatoServer
|
refs/heads/master
|
libs/xmpp/dispatcher.py
|
200
|
## transports.py
##
## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
# $Id: dispatcher.py,v 1.42 2007/05/18 23:18:36 normanr Exp $
"""
Main xmpppy mechanism. Provides library with methods to assign different handlers
to different XMPP stanzas.
Contains one tunable attribute: DefaultTimeout (25 seconds by default). It defines time that
Dispatcher.SendAndWaitForResponce method will wait for reply stanza before giving up.
"""
import simplexml,time,sys
from protocol import *
from client import PlugIn
DefaultTimeout=25
ID=0
class Dispatcher(PlugIn):
""" Ancestor of PlugIn class. Handles XMPP stream, i.e. aware of stream headers.
Can be plugged out/in to restart these headers (used for SASL f.e.). """
def __init__(self):
PlugIn.__init__(self)
DBG_LINE='dispatcher'
self.handlers={}
self._expected={}
self._defaultHandler=None
self._pendingExceptions=[]
self._eventHandler=None
self._cycleHandlers=[]
self._exported_methods=[self.Process,self.RegisterHandler,self.RegisterDefaultHandler,\
self.RegisterEventHandler,self.UnregisterCycleHandler,self.RegisterCycleHandler,\
self.RegisterHandlerOnce,self.UnregisterHandler,self.RegisterProtocol,\
self.WaitForResponse,self.SendAndWaitForResponse,self.send,self.disconnect,\
self.SendAndCallForResponse, ]
def dumpHandlers(self):
""" Return set of user-registered callbacks in it's internal format.
Used within the library to carry user handlers set over Dispatcher replugins. """
return self.handlers
def restoreHandlers(self,handlers):
""" Restores user-registered callbacks structure from dump previously obtained via dumpHandlers.
Used within the library to carry user handlers set over Dispatcher replugins. """
self.handlers=handlers
def _init(self):
""" Registers default namespaces/protocols/handlers. Used internally. """
self.RegisterNamespace('unknown')
self.RegisterNamespace(NS_STREAMS)
self.RegisterNamespace(self._owner.defaultNamespace)
self.RegisterProtocol('iq',Iq)
self.RegisterProtocol('presence',Presence)
self.RegisterProtocol('message',Message)
self.RegisterDefaultHandler(self.returnStanzaHandler)
self.RegisterHandler('error',self.streamErrorHandler,xmlns=NS_STREAMS)
def plugin(self, owner):
""" Plug the Dispatcher instance into Client class instance and send initial stream header. Used internally."""
self._init()
for method in self._old_owners_methods:
if method.__name__=='send': self._owner_send=method; break
self._owner.lastErrNode=None
self._owner.lastErr=None
self._owner.lastErrCode=None
self.StreamInit()
def plugout(self):
""" Prepares instance to be destructed. """
self.Stream.dispatch=None
self.Stream.DEBUG=None
self.Stream.features=None
self.Stream.destroy()
def StreamInit(self):
""" Send an initial stream header. """
self.Stream=simplexml.NodeBuilder()
self.Stream._dispatch_depth=2
self.Stream.dispatch=self.dispatch
self.Stream.stream_header_received=self._check_stream_start
self._owner.debug_flags.append(simplexml.DBG_NODEBUILDER)
self.Stream.DEBUG=self._owner.DEBUG
self.Stream.features=None
self._metastream=Node('stream:stream')
self._metastream.setNamespace(self._owner.Namespace)
self._metastream.setAttr('version','1.0')
self._metastream.setAttr('xmlns:stream',NS_STREAMS)
self._metastream.setAttr('to',self._owner.Server)
self._owner.send("<?xml version='1.0'?>%s>"%str(self._metastream)[:-2])
def _check_stream_start(self,ns,tag,attrs):
if ns<>NS_STREAMS or tag<>'stream':
raise ValueError('Incorrect stream start: (%s,%s). Terminating.'%(tag,ns))
def Process(self, timeout=0):
""" Check incoming stream for data waiting. If "timeout" is positive - block for as max. this time.
Returns:
1) length of processed data if some data were processed;
2) '0' string if no data were processed but link is alive;
3) 0 (zero) if underlying connection is closed.
Take note that in case of disconnection detect during Process() call
disconnect handlers are called automatically.
"""
for handler in self._cycleHandlers: handler(self)
if len(self._pendingExceptions) > 0:
_pendingException = self._pendingExceptions.pop()
raise _pendingException[0], _pendingException[1], _pendingException[2]
if self._owner.Connection.pending_data(timeout):
try: data=self._owner.Connection.receive()
except IOError: return
self.Stream.Parse(data)
if len(self._pendingExceptions) > 0:
_pendingException = self._pendingExceptions.pop()
raise _pendingException[0], _pendingException[1], _pendingException[2]
if data: return len(data)
return '0' # It means that nothing is received but link is alive.
def RegisterNamespace(self,xmlns,order='info'):
""" Creates internal structures for newly registered namespace.
You can register handlers for this namespace afterwards. By default one namespace
already registered (jabber:client or jabber:component:accept depending on context. """
self.DEBUG('Registering namespace "%s"'%xmlns,order)
self.handlers[xmlns]={}
self.RegisterProtocol('unknown',Protocol,xmlns=xmlns)
self.RegisterProtocol('default',Protocol,xmlns=xmlns)
def RegisterProtocol(self,tag_name,Proto,xmlns=None,order='info'):
""" Used to declare some top-level stanza name to dispatcher.
Needed to start registering handlers for such stanzas.
Iq, message and presence protocols are registered by default. """
if not xmlns: xmlns=self._owner.defaultNamespace
self.DEBUG('Registering protocol "%s" as %s(%s)'%(tag_name,Proto,xmlns), order)
self.handlers[xmlns][tag_name]={type:Proto, 'default':[]}
def RegisterNamespaceHandler(self,xmlns,handler,typ='',ns='', makefirst=0, system=0):
""" Register handler for processing all stanzas for specified namespace. """
self.RegisterHandler('default', handler, typ, ns, xmlns, makefirst, system)
def RegisterHandler(self,name,handler,typ='',ns='',xmlns=None, makefirst=0, system=0):
"""Register user callback as stanzas handler of declared type. Callback must take
(if chained, see later) arguments: dispatcher instance (for replying), incomed
return of previous handlers.
The callback must raise xmpp.NodeProcessed just before return if it want preven
callbacks to be called with the same stanza as argument _and_, more importantly
library from returning stanza to sender with error set (to be enabled in 0.2 ve
Arguments:
"name" - name of stanza. F.e. "iq".
"handler" - user callback.
"typ" - value of stanza's "type" attribute. If not specified any value match
"ns" - namespace of child that stanza must contain.
"chained" - chain together output of several handlers.
"makefirst" - insert handler in the beginning of handlers list instead of
adding it to the end. Note that more common handlers (i.e. w/o "typ" and "
will be called first nevertheless.
"system" - call handler even if NodeProcessed Exception were raised already.
"""
if not xmlns: xmlns=self._owner.defaultNamespace
self.DEBUG('Registering handler %s for "%s" type->%s ns->%s(%s)'%(handler,name,typ,ns,xmlns), 'info')
if not typ and not ns: typ='default'
if not self.handlers.has_key(xmlns): self.RegisterNamespace(xmlns,'warn')
if not self.handlers[xmlns].has_key(name): self.RegisterProtocol(name,Protocol,xmlns,'warn')
if not self.handlers[xmlns][name].has_key(typ+ns): self.handlers[xmlns][name][typ+ns]=[]
if makefirst: self.handlers[xmlns][name][typ+ns].insert(0,{'func':handler,'system':system})
else: self.handlers[xmlns][name][typ+ns].append({'func':handler,'system':system})
def RegisterHandlerOnce(self,name,handler,typ='',ns='',xmlns=None,makefirst=0, system=0):
""" Unregister handler after first call (not implemented yet). """
if not xmlns: xmlns=self._owner.defaultNamespace
self.RegisterHandler(name, handler, typ, ns, xmlns, makefirst, system)
def UnregisterHandler(self,name,handler,typ='',ns='',xmlns=None):
""" Unregister handler. "typ" and "ns" must be specified exactly the same as with registering."""
if not xmlns: xmlns=self._owner.defaultNamespace
if not self.handlers.has_key(xmlns): return
if not typ and not ns: typ='default'
for pack in self.handlers[xmlns][name][typ+ns]:
if handler==pack['func']: break
else: pack=None
try: self.handlers[xmlns][name][typ+ns].remove(pack)
except ValueError: pass
def RegisterDefaultHandler(self,handler):
""" Specify the handler that will be used if no NodeProcessed exception were raised.
This is returnStanzaHandler by default. """
self._defaultHandler=handler
def RegisterEventHandler(self,handler):
""" Register handler that will process events. F.e. "FILERECEIVED" event. """
self._eventHandler=handler
def returnStanzaHandler(self,conn,stanza):
""" Return stanza back to the sender with <feature-not-implemennted/> error set. """
if stanza.getType() in ['get','set']:
conn.send(Error(stanza,ERR_FEATURE_NOT_IMPLEMENTED))
def streamErrorHandler(self,conn,error):
name,text='error',error.getData()
for tag in error.getChildren():
if tag.getNamespace()==NS_XMPP_STREAMS:
if tag.getName()=='text': text=tag.getData()
else: name=tag.getName()
if name in stream_exceptions.keys(): exc=stream_exceptions[name]
else: exc=StreamError
raise exc((name,text))
def RegisterCycleHandler(self,handler):
""" Register handler that will be called on every Dispatcher.Process() call. """
if handler not in self._cycleHandlers: self._cycleHandlers.append(handler)
def UnregisterCycleHandler(self,handler):
""" Unregister handler that will is called on every Dispatcher.Process() call."""
if handler in self._cycleHandlers: self._cycleHandlers.remove(handler)
def Event(self,realm,event,data):
""" Raise some event. Takes three arguments:
1) "realm" - scope of event. Usually a namespace.
2) "event" - the event itself. F.e. "SUCESSFULL SEND".
3) data that comes along with event. Depends on event."""
if self._eventHandler: self._eventHandler(realm,event,data)
def dispatch(self,stanza,session=None,direct=0):
""" Main procedure that performs XMPP stanza recognition and calling apppropriate handlers for it.
Called internally. """
if not session: session=self
session.Stream._mini_dom=None
name=stanza.getName()
if not direct and self._owner._route:
if name == 'route':
if stanza.getAttr('error') == None:
if len(stanza.getChildren()) == 1:
stanza = stanza.getChildren()[0]
name=stanza.getName()
else:
for each in stanza.getChildren():
self.dispatch(each,session,direct=1)
return
elif name == 'presence':
return
elif name in ('features','bind'):
pass
else:
raise UnsupportedStanzaType(name)
if name=='features': session.Stream.features=stanza
xmlns=stanza.getNamespace()
if not self.handlers.has_key(xmlns):
self.DEBUG("Unknown namespace: " + xmlns,'warn')
xmlns='unknown'
if not self.handlers[xmlns].has_key(name):
self.DEBUG("Unknown stanza: " + name,'warn')
name='unknown'
else:
self.DEBUG("Got %s/%s stanza"%(xmlns,name), 'ok')
if stanza.__class__.__name__=='Node': stanza=self.handlers[xmlns][name][type](node=stanza)
typ=stanza.getType()
if not typ: typ=''
stanza.props=stanza.getProperties()
ID=stanza.getID()
session.DEBUG("Dispatching %s stanza with type->%s props->%s id->%s"%(name,typ,stanza.props,ID),'ok')
list=['default'] # we will use all handlers:
if self.handlers[xmlns][name].has_key(typ): list.append(typ) # from very common...
for prop in stanza.props:
if self.handlers[xmlns][name].has_key(prop): list.append(prop)
if typ and self.handlers[xmlns][name].has_key(typ+prop): list.append(typ+prop) # ...to very particular
chain=self.handlers[xmlns]['default']['default']
for key in list:
if key: chain = chain + self.handlers[xmlns][name][key]
output=''
if session._expected.has_key(ID):
user=0
if type(session._expected[ID])==type(()):
cb,args=session._expected[ID]
session.DEBUG("Expected stanza arrived. Callback %s(%s) found!"%(cb,args),'ok')
try: cb(session,stanza,**args)
except Exception, typ:
if typ.__class__.__name__<>'NodeProcessed': raise
else:
session.DEBUG("Expected stanza arrived!",'ok')
session._expected[ID]=stanza
else: user=1
for handler in chain:
if user or handler['system']:
try:
handler['func'](session,stanza)
except Exception, typ:
if typ.__class__.__name__<>'NodeProcessed':
self._pendingExceptions.insert(0, sys.exc_info())
return
user=0
if user and self._defaultHandler: self._defaultHandler(session,stanza)
def WaitForResponse(self, ID, timeout=DefaultTimeout):
""" Block and wait until stanza with specific "id" attribute will come.
If no such stanza is arrived within timeout, return None.
If operation failed for some reason then owner's attributes
lastErrNode, lastErr and lastErrCode are set accordingly. """
self._expected[ID]=None
has_timed_out=0
abort_time=time.time() + timeout
self.DEBUG("Waiting for ID:%s with timeout %s..." % (ID,timeout),'wait')
while not self._expected[ID]:
if not self.Process(0.04):
self._owner.lastErr="Disconnect"
return None
if time.time() > abort_time:
self._owner.lastErr="Timeout"
return None
response=self._expected[ID]
del self._expected[ID]
if response.getErrorCode():
self._owner.lastErrNode=response
self._owner.lastErr=response.getError()
self._owner.lastErrCode=response.getErrorCode()
return response
def SendAndWaitForResponse(self, stanza, timeout=DefaultTimeout):
""" Put stanza on the wire and wait for recipient's response to it. """
return self.WaitForResponse(self.send(stanza),timeout)
def SendAndCallForResponse(self, stanza, func, args={}):
""" Put stanza on the wire and call back when recipient replies.
Additional callback arguments can be specified in args. """
self._expected[self.send(stanza)]=(func,args)
def send(self,stanza):
""" Serialise stanza and put it on the wire. Assign an unique ID to it before send.
Returns assigned ID."""
if type(stanza) in [type(''), type(u'')]: return self._owner_send(stanza)
if not isinstance(stanza,Protocol): _ID=None
elif not stanza.getID():
global ID
ID+=1
_ID=`ID`
stanza.setID(_ID)
else: _ID=stanza.getID()
if self._owner._registered_name and not stanza.getAttr('from'): stanza.setAttr('from',self._owner._registered_name)
if self._owner._route and stanza.getName()!='bind':
to=self._owner.Server
if stanza.getTo() and stanza.getTo().getDomain():
to=stanza.getTo().getDomain()
frm=stanza.getFrom()
if frm.getDomain():
frm=frm.getDomain()
route=Protocol('route',to=to,frm=frm,payload=[stanza])
stanza=route
stanza.setNamespace(self._owner.Namespace)
stanza.setParent(self._metastream)
self._owner_send(stanza)
return _ID
def disconnect(self):
""" Send a stream terminator and and handle all incoming stanzas before stream closure. """
self._owner_send('</stream:stream>')
while self.Process(1): pass
|
DBMandrake/osmc
|
refs/heads/master
|
package/mediacenter-skin-osmc/files/usr/share/kodi/addons/script.module.unidecode/lib/unidecode/x07d.py
|
253
|
data = (
'Ji ', # 0x00
'Cha ', # 0x01
'Zhou ', # 0x02
'Xun ', # 0x03
'Yue ', # 0x04
'Hong ', # 0x05
'Yu ', # 0x06
'He ', # 0x07
'Wan ', # 0x08
'Ren ', # 0x09
'Wen ', # 0x0a
'Wen ', # 0x0b
'Qiu ', # 0x0c
'Na ', # 0x0d
'Zi ', # 0x0e
'Tou ', # 0x0f
'Niu ', # 0x10
'Fou ', # 0x11
'Jie ', # 0x12
'Shu ', # 0x13
'Chun ', # 0x14
'Pi ', # 0x15
'Yin ', # 0x16
'Sha ', # 0x17
'Hong ', # 0x18
'Zhi ', # 0x19
'Ji ', # 0x1a
'Fen ', # 0x1b
'Yun ', # 0x1c
'Ren ', # 0x1d
'Dan ', # 0x1e
'Jin ', # 0x1f
'Su ', # 0x20
'Fang ', # 0x21
'Suo ', # 0x22
'Cui ', # 0x23
'Jiu ', # 0x24
'Zha ', # 0x25
'Kinu ', # 0x26
'Jin ', # 0x27
'Fu ', # 0x28
'Zhi ', # 0x29
'Ci ', # 0x2a
'Zi ', # 0x2b
'Chou ', # 0x2c
'Hong ', # 0x2d
'Zha ', # 0x2e
'Lei ', # 0x2f
'Xi ', # 0x30
'Fu ', # 0x31
'Xie ', # 0x32
'Shen ', # 0x33
'Bei ', # 0x34
'Zhu ', # 0x35
'Qu ', # 0x36
'Ling ', # 0x37
'Zhu ', # 0x38
'Shao ', # 0x39
'Gan ', # 0x3a
'Yang ', # 0x3b
'Fu ', # 0x3c
'Tuo ', # 0x3d
'Zhen ', # 0x3e
'Dai ', # 0x3f
'Zhuo ', # 0x40
'Shi ', # 0x41
'Zhong ', # 0x42
'Xian ', # 0x43
'Zu ', # 0x44
'Jiong ', # 0x45
'Ban ', # 0x46
'Ju ', # 0x47
'Mo ', # 0x48
'Shu ', # 0x49
'Zui ', # 0x4a
'Wata ', # 0x4b
'Jing ', # 0x4c
'Ren ', # 0x4d
'Heng ', # 0x4e
'Xie ', # 0x4f
'Jie ', # 0x50
'Zhu ', # 0x51
'Chou ', # 0x52
'Gua ', # 0x53
'Bai ', # 0x54
'Jue ', # 0x55
'Kuang ', # 0x56
'Hu ', # 0x57
'Ci ', # 0x58
'Geng ', # 0x59
'Geng ', # 0x5a
'Tao ', # 0x5b
'Xie ', # 0x5c
'Ku ', # 0x5d
'Jiao ', # 0x5e
'Quan ', # 0x5f
'Gai ', # 0x60
'Luo ', # 0x61
'Xuan ', # 0x62
'Bing ', # 0x63
'Xian ', # 0x64
'Fu ', # 0x65
'Gei ', # 0x66
'Tong ', # 0x67
'Rong ', # 0x68
'Tiao ', # 0x69
'Yin ', # 0x6a
'Lei ', # 0x6b
'Xie ', # 0x6c
'Quan ', # 0x6d
'Xu ', # 0x6e
'Lun ', # 0x6f
'Die ', # 0x70
'Tong ', # 0x71
'Si ', # 0x72
'Jiang ', # 0x73
'Xiang ', # 0x74
'Hui ', # 0x75
'Jue ', # 0x76
'Zhi ', # 0x77
'Jian ', # 0x78
'Juan ', # 0x79
'Chi ', # 0x7a
'Mian ', # 0x7b
'Zhen ', # 0x7c
'Lu ', # 0x7d
'Cheng ', # 0x7e
'Qiu ', # 0x7f
'Shu ', # 0x80
'Bang ', # 0x81
'Tong ', # 0x82
'Xiao ', # 0x83
'Wan ', # 0x84
'Qin ', # 0x85
'Geng ', # 0x86
'Xiu ', # 0x87
'Ti ', # 0x88
'Xiu ', # 0x89
'Xie ', # 0x8a
'Hong ', # 0x8b
'Xi ', # 0x8c
'Fu ', # 0x8d
'Ting ', # 0x8e
'Sui ', # 0x8f
'Dui ', # 0x90
'Kun ', # 0x91
'Fu ', # 0x92
'Jing ', # 0x93
'Hu ', # 0x94
'Zhi ', # 0x95
'Yan ', # 0x96
'Jiong ', # 0x97
'Feng ', # 0x98
'Ji ', # 0x99
'Sok ', # 0x9a
'Kase ', # 0x9b
'Zong ', # 0x9c
'Lin ', # 0x9d
'Duo ', # 0x9e
'Li ', # 0x9f
'Lu ', # 0xa0
'Liang ', # 0xa1
'Chou ', # 0xa2
'Quan ', # 0xa3
'Shao ', # 0xa4
'Qi ', # 0xa5
'Qi ', # 0xa6
'Zhun ', # 0xa7
'Qi ', # 0xa8
'Wan ', # 0xa9
'Qian ', # 0xaa
'Xian ', # 0xab
'Shou ', # 0xac
'Wei ', # 0xad
'Qi ', # 0xae
'Tao ', # 0xaf
'Wan ', # 0xb0
'Gang ', # 0xb1
'Wang ', # 0xb2
'Beng ', # 0xb3
'Zhui ', # 0xb4
'Cai ', # 0xb5
'Guo ', # 0xb6
'Cui ', # 0xb7
'Lun ', # 0xb8
'Liu ', # 0xb9
'Qi ', # 0xba
'Zhan ', # 0xbb
'Bei ', # 0xbc
'Chuo ', # 0xbd
'Ling ', # 0xbe
'Mian ', # 0xbf
'Qi ', # 0xc0
'Qie ', # 0xc1
'Tan ', # 0xc2
'Zong ', # 0xc3
'Gun ', # 0xc4
'Zou ', # 0xc5
'Yi ', # 0xc6
'Zi ', # 0xc7
'Xing ', # 0xc8
'Liang ', # 0xc9
'Jin ', # 0xca
'Fei ', # 0xcb
'Rui ', # 0xcc
'Min ', # 0xcd
'Yu ', # 0xce
'Zong ', # 0xcf
'Fan ', # 0xd0
'Lu ', # 0xd1
'Xu ', # 0xd2
'Yingl ', # 0xd3
'Zhang ', # 0xd4
'Kasuri ', # 0xd5
'Xu ', # 0xd6
'Xiang ', # 0xd7
'Jian ', # 0xd8
'Ke ', # 0xd9
'Xian ', # 0xda
'Ruan ', # 0xdb
'Mian ', # 0xdc
'Qi ', # 0xdd
'Duan ', # 0xde
'Zhong ', # 0xdf
'Di ', # 0xe0
'Min ', # 0xe1
'Miao ', # 0xe2
'Yuan ', # 0xe3
'Xie ', # 0xe4
'Bao ', # 0xe5
'Si ', # 0xe6
'Qiu ', # 0xe7
'Bian ', # 0xe8
'Huan ', # 0xe9
'Geng ', # 0xea
'Cong ', # 0xeb
'Mian ', # 0xec
'Wei ', # 0xed
'Fu ', # 0xee
'Wei ', # 0xef
'Yu ', # 0xf0
'Gou ', # 0xf1
'Miao ', # 0xf2
'Xie ', # 0xf3
'Lian ', # 0xf4
'Zong ', # 0xf5
'Bian ', # 0xf6
'Yun ', # 0xf7
'Yin ', # 0xf8
'Ti ', # 0xf9
'Gua ', # 0xfa
'Zhi ', # 0xfb
'Yun ', # 0xfc
'Cheng ', # 0xfd
'Chan ', # 0xfe
'Dai ', # 0xff
)
|
ActiveState/code
|
refs/heads/master
|
recipes/Python/128243_Numerical_inversiLaplace_transforms_using_FFT/recipe-128243.py
|
1
|
#################################################################
# Function InvLap(t,omega,sigma,nint), numerically inverts a #
# Laplace transform F(s) into f(t) using the Fast Fourier #
# Transform (FFT) algorithm for a specific time "t", an #
# upper frequency limit "omega", a real parameter "sigma" #
# and the number of integration intervals "nint" . #
# #
# Function F(s) is defined in separate as Fs(s) (see code #
# below). Fs(s) has to be changed accordingly everytime the #
# user wants to invert a different function. #
# #
# I suggest to use omega>100 and nint=50*omega. The higher #
# the values for omega, the more accurate the results will be #
# in general, but at the expense of longer processing times. #
# #
# Sigma is a real number which must be a little bigger than #
# the real part of rightmost pole of the function F(s). For #
# example, F(s) = 1/s + 1/(s-2) + 1/(s+1) has poles for s=0, #
# s=2 and s=-1. Hence, sigma must be made equal to, say, #
# 2.05 so as to keep all poles at the left of this value. #
# The analytical inverse for this simple function is #
# f(t) = 1 + exp(-t) + exp(2t). For t=1.25, omega=200, #
# nint=10000 and sigma=2.05, the numerical inversion yields #
# f(1.25) ~= 13.456844516, or -0.09% away from the actual #
# analytical result, 13.468998757 (results truncated to 9 #
# decimal places). This is the example used in this code. #
# #
# Creator: Fausto Arinos de Almeida Barbuto (Calgary, Canada) #
# Date: May 18, 2002 #
# E-mail: fausto_barbuto@yahoo.ca #
# #
# Reference: #
# Huddleston, T. and Byrne, P: "Numerical Inversion of #
# Laplace Transforms", University of South Alabama, April #
# 1999 (found at http://www.eng.usouthal.edu/huddleston/ #
# SoftwareSupport/Download/Inversion99.doc) #
# #
# Usage: invoke InvLap(t,omega,sigma,nint), for t>0. #
# #
#################################################################
# We need cmath because F(s) is a function operating on the
# complex argument s = a + bj
from math import ceil
from cmath import *
# *** Driver InvLap function ***
def InvLap(t,omega,sigma,nint):
# Sanity check on some parameters.
omega = ceil(omega)
nint = ceil(nint)
if omega <= 0:
omega = 200
if nint <= 0:
nint = 10000
return (trapezoid(t,omega,sigma,nint))
# *** Function trapezoid computes the numerical inversion. ***
def trapezoid(t,omega,sigma,nint):
sum = 0.0
delta = float(omega)/nint
wi = 0.0
# The for-loop below computes the FFT Inversion Algorithm.
# It is in fact the trapezoidal rule for numerical integration.
for i in range(1,(nint+1)):
witi = complex(0,wi*t)
wf = wi + delta
wfti = complex(0,wf*t)
fi = (exp(witi)*Fs(complex(sigma,wi))).real
ff = (exp(wfti)*Fs(complex(sigma,wf))).real
sum = sum + 0.5*(wf-wi)*(fi+ff)
wi = wf
return ((sum*exp(sigma*t)/pi).real)
# *** The Laplace function F(s) is defined here. ***
def Fs(s):
return (1.0/s + 1.0/(s+1.0) + 1.0/(s-2.0))
# Function InvLap(t,omega,sigma,nint) is invoked.
print InvLap(1.25,200,2.05,10000)
|
atosatto/ansible-minio
|
refs/heads/master
|
molecule/default/tests/test_minio_default.py
|
1
|
import os
import yaml
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.fixture()
def AnsibleDefaults():
with open('../../defaults/main.yml', 'r') as stream:
return yaml.load(stream)
@pytest.mark.parametrize('minio_bin_var', [
'minio_server_bin',
'minio_client_bin',
])
def test_minio_installed(host, AnsibleDefaults, minio_bin_var):
f = host.file(AnsibleDefaults[minio_bin_var])
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
assert oct(f.mode) == '0755'
def test_minio_server_data_directories(host, AnsibleDefaults):
for datadir in AnsibleDefaults['minio_server_datadirs']:
d = host.file(datadir)
assert d.is_directory
assert d.exists
assert d.user == AnsibleDefaults['minio_user']
assert d.group == AnsibleDefaults['minio_group']
assert oct(d.mode) == '0750'
def test_minio_server_webserver(host):
host.socket('tcp://127.0.0.1:9091').is_listening
|
allotria/intellij-community
|
refs/heads/master
|
python/testData/postfix/isNotNone/nonApplicable.py
|
39
|
a = 1.ifnn<caret>
|
Denisolt/Tensorflow_Chat_Bot
|
refs/heads/master
|
local/lib/python2.7/site-packages/tensorflow/python/ops/embedding_ops.py
|
4
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations for embeddings."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
def embedding_lookup(params, ids, partition_strategy="mod", name=None,
validate_indices=True, max_norm=None):
"""Looks up `ids` in a list of embedding tensors.
This function is used to perform parallel lookups on the list of
tensors in `params`. It is a generalization of
[`tf.gather()`](../../api_docs/python/array_ops.md#gather), where `params` is
interpreted as a partitioning of a large embedding tensor. `params` may be
a `PartitionedVariable` as returned by using `tf.get_variable()` with a
partitioner.
If `len(params) > 1`, each element `id` of `ids` is partitioned between
the elements of `params` according to the `partition_strategy`.
In all strategies, if the id space does not evenly divide the number of
partitions, each of the first `(max_id + 1) % len(params)` partitions will
be assigned one more id.
If `partition_strategy` is `"mod"`, we assign each id to partition
`p = id % len(params)`. For instance,
13 ids are split across 5 partitions as:
`[[0, 5, 10], [1, 6, 11], [2, 7, 12], [3, 8], [4, 9]]`
If `partition_strategy` is `"div"`, we assign ids to partitions in a
contiguous manner. In this case, 13 ids are split across 5 partitions as:
`[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10], [11, 12]]`
The results of the lookup are concatenated into a dense
tensor. The returned tensor has shape `shape(ids) + shape(params)[1:]`.
Args:
params: A list of tensors with the same type and which can be concatenated
along dimension 0. Alternatively, a `PartitionedVariable`, created by
partitioning along dimension 0. Each element must be appropriately sized
for the given `partition_strategy`.
ids: A `Tensor` with type `int32` or `int64` containing the ids to be looked
up in `params`.
partition_strategy: A string specifying the partitioning strategy, relevant
if `len(params) > 1`. Currently `"div"` and `"mod"` are supported. Default
is `"mod"`.
name: A name for the operation (optional).
validate_indices: Whether or not to validate gather indices.
max_norm: If not None, embedding values are l2-normalized to the value of
max_norm.
Returns:
A `Tensor` with the same type as the tensors in `params`.
Raises:
ValueError: If `params` is empty.
"""
if params is None or params == []: # pylint: disable=g-explicit-bool-comparison
raise ValueError("Need at least one param")
if isinstance(params, variables.PartitionedVariable):
params = list(params) # Iterate to get the underlying Variables.
if not isinstance(params, list):
params = [params]
def maybe_normalize(x):
if max_norm is not None:
if x.get_shape().ndims is not None:
ndims = x.get_shape().ndims
else:
ndims = array_ops.size(array_ops.shape(x))
return clip_ops.clip_by_norm(x, max_norm, axes=list(range(1, ndims)))
return x
with ops.name_scope(name, "embedding_lookup", params + [ids]) as name:
np = len(params) # Number of partitions
params = ops.convert_n_to_tensor_or_indexed_slices(params, name="params")
if np == 1:
with ops.colocate_with(params[0]):
# TODO(apassos): implement the sharded version as well.
if isinstance(params[0], resource_variable_ops.ResourceVariable):
ret = params[0].sparse_read(ids, name=name)
else:
ret = array_ops.gather(params[0], ids, name=name,
validate_indices=validate_indices)
return maybe_normalize(ret)
else:
ids = ops.convert_to_tensor(ids, name="ids")
flat_ids = array_ops.reshape(ids, [-1])
original_indices = math_ops.range(array_ops.size(flat_ids))
# Create p_assignments and set new_ids depending on the strategy.
if partition_strategy == "mod":
p_assignments = flat_ids % np
new_ids = flat_ids // np
elif partition_strategy == "div":
# Compute num_total_ids as the sum of dim-0 of params, then assign to
# partitions based on a constant number of ids per partition. Optimize
# if we already know the full shape statically.
dim_0_size = params[0].get_shape()[0]
for p in xrange(1, np):
dim_0_size += params[p].get_shape()[0]
if dim_0_size.value:
num_total_ids = constant_op.constant(dim_0_size.value, flat_ids.dtype)
else:
dim_0_sizes = []
for p in xrange(np):
if params[p].get_shape()[0].value is not None:
dim_0_sizes.append(params[p].get_shape()[0].value)
else:
with ops.colocate_with(params[p]):
dim_0_sizes.append(array_ops.shape(params[p])[0])
num_total_ids = math_ops.reduce_sum(
math_ops.cast(array_ops.pack(dim_0_sizes), flat_ids.dtype))
ids_per_partition = num_total_ids // np
extras = num_total_ids % np
p_assignments = math_ops.maximum(
flat_ids // (ids_per_partition + 1),
(flat_ids - extras) // ids_per_partition)
# Emulate a conditional using a boolean indicator tensor
is_in_first_extras_partitions = math_ops.cast(
p_assignments < extras, flat_ids.dtype)
new_ids = (
is_in_first_extras_partitions * (
flat_ids % (ids_per_partition + 1)) +
(1 - is_in_first_extras_partitions) * (
(flat_ids - extras) % ids_per_partition))
else:
raise ValueError("Unrecognized partition strategy: " +
partition_strategy)
# Cast partition assignments to int32 for use in dynamic_partition.
# There really should not be more than 2^32 partitions.
p_assignments = math_ops.cast(p_assignments, dtypes.int32)
# Partition list of ids based on assignments into np separate lists
gather_ids = data_flow_ops.dynamic_partition(new_ids, p_assignments, np)
# Similarly, partition the original indices.
pindices = data_flow_ops.dynamic_partition(original_indices,
p_assignments, np)
# Do np separate lookups, finding embeddings for plist[p] in params[p]
partitioned_result = []
for p in xrange(np):
with ops.colocate_with(params[p]):
partitioned_result.append(array_ops.gather(
params[p], gather_ids[p],
validate_indices=validate_indices))
# Stitch these back together
ret = data_flow_ops.dynamic_stitch(pindices, partitioned_result,
name=name)
# Reshape to reverse the flattening of ids.
element_shape = params[0].get_shape()[1:]
for p in params[1:]:
element_shape = element_shape.merge_with(p.get_shape()[1:])
if element_shape.is_fully_defined():
ret = array_ops.reshape(ret, array_ops.concat(0, [
array_ops.shape(ids), element_shape]))
else:
# It's important that we compute params[0].shape on the right device
# to avoid data motion.
with ops.colocate_with(params[0]):
params_shape = array_ops.shape(params[0])
ret = array_ops.reshape(ret, array_ops.concat(0, [
array_ops.shape(ids), array_ops.slice(params_shape, [1], [-1])]))
# output shape = ids.shape + params[*].shape[1:]
# Normally the reshape is sufficient, but setting shape explicitly
# teaches shape inference that params[1:].get_shape() matters.
ret.set_shape(ids.get_shape().concatenate(element_shape))
return maybe_normalize(ret)
def embedding_lookup_sparse(params, sp_ids, sp_weights,
partition_strategy="mod",
name=None,
combiner=None,
max_norm=None):
"""Computes embeddings for the given ids and weights.
This op assumes that there is at least one id for each row in the dense tensor
represented by sp_ids (i.e. there are no rows with empty features), and that
all the indices of sp_ids are in canonical row-major order.
It also assumes that all id values lie in the range [0, p0), where p0
is the sum of the size of params along dimension 0.
Args:
params: A single tensor representing the complete embedding tensor,
or a list of P tensors all of same shape except for the first dimension,
representing sharded embedding tensors. Alternatively, a
`PartitionedVariable`, created by partitioning along dimension 0.
sp_ids: N x M SparseTensor of int64 ids (typically from FeatureValueToId),
where N is typically batch size and M is arbitrary.
sp_weights: either a SparseTensor of float / double weights, or None to
indicate all weights should be taken to be 1. If specified, sp_weights
must have exactly the same shape and indices as sp_ids.
partition_strategy: A string specifying the partitioning strategy, relevant
if `len(params) > 1`. Currently `"div"` and `"mod"` are supported. Default
is `"mod"`. See `tf.nn.embedding_lookup` for more details.
name: Optional name for the op.
combiner: A string specifying the reduction op. Currently "mean", "sqrtn"
and "sum" are supported.
"sum" computes the weighted sum of the embedding results for each row.
"mean" is the weighted sum divided by the total weight.
"sqrtn" is the weighted sum divided by the square root of the sum of the
squares of the weights.
max_norm: If not None, each embedding is normalized to have l2 norm equal
to max_norm before combining.
Returns:
A dense tensor representing the combined embeddings for the
sparse ids. For each row in the dense tensor represented by sp_ids, the op
looks up the embeddings for all ids in that row, multiplies them by the
corresponding weight, and combines these embeddings as specified.
In other words, if
shape(combined params) = [p0, p1, ..., pm]
and
shape(sp_ids) = shape(sp_weights) = [d0, d1, ..., dn]
then
shape(output) = [d0, d1, ..., dn-1, p1, ..., pm].
For instance, if params is a 10x20 matrix, and sp_ids / sp_weights are
[0, 0]: id 1, weight 2.0
[0, 1]: id 3, weight 0.5
[1, 0]: id 0, weight 1.0
[2, 3]: id 1, weight 3.0
with `combiner`="mean", then the output will be a 3x20 matrix where
output[0, :] = (params[1, :] * 2.0 + params[3, :] * 0.5) / (2.0 + 0.5)
output[1, :] = params[0, :] * 1.0
output[2, :] = params[1, :] * 3.0
Raises:
TypeError: If sp_ids is not a SparseTensor, or if sp_weights is neither
None nor SparseTensor.
ValueError: If combiner is not one of {"mean", "sqrtn", "sum"}.
"""
if combiner is None:
logging.warn("The default value of combiner will change from \"mean\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "mean"
if combiner not in ("mean", "sqrtn", "sum"):
raise ValueError("combiner must be one of 'mean', 'sqrtn' or 'sum'")
if isinstance(params, variables.PartitionedVariable):
params = list(params) # Iterate to get the underlying Variables.
if not isinstance(params, list):
params = [params]
if not isinstance(sp_ids, sparse_tensor.SparseTensor):
raise TypeError("sp_ids must be SparseTensor")
ignore_weights = sp_weights is None
if not ignore_weights:
if not isinstance(sp_weights, sparse_tensor.SparseTensor):
raise TypeError("sp_weights must be either None or SparseTensor")
sp_ids.values.get_shape().assert_is_compatible_with(
sp_weights.values.get_shape())
sp_ids.indices.get_shape().assert_is_compatible_with(
sp_weights.indices.get_shape())
sp_ids.shape.get_shape().assert_is_compatible_with(
sp_weights.shape.get_shape())
# TODO(yleon): Add enhanced node assertions to verify that sp_ids and
# sp_weights have equal indices and shapes.
with ops.name_scope(name, "embedding_lookup_sparse",
params + [sp_ids]) as name:
segment_ids = sp_ids.indices[:, 0]
if segment_ids.dtype != dtypes.int32:
segment_ids = math_ops.cast(segment_ids, dtypes.int32)
ids = sp_ids.values
if ignore_weights:
ids, idx = array_ops.unique(ids)
else:
idx = None
embeddings = embedding_lookup(
params, ids, partition_strategy=partition_strategy, max_norm=max_norm)
if not ignore_weights:
weights = sp_weights.values
if weights.dtype != embeddings.dtype:
weights = math_ops.cast(weights, embeddings.dtype)
# Reshape weights to allow broadcast
ones = array_ops.fill(
array_ops.expand_dims(array_ops.rank(embeddings) - 1, 0), 1)
bcast_weights_shape = array_ops.concat(0, [
array_ops.shape(weights), ones])
orig_weights_shape = weights.get_shape()
weights = array_ops.reshape(weights, bcast_weights_shape)
# Set the weight shape, since after reshaping to bcast_weights_shape,
# the shape becomes None.
if embeddings.get_shape().ndims is not None:
weights.set_shape(orig_weights_shape.concatenate(
[1 for _ in range(embeddings.get_shape().ndims - 1)]))
embeddings *= weights
if combiner == "sum":
embeddings = math_ops.segment_sum(embeddings, segment_ids, name=name)
elif combiner == "mean":
embeddings = math_ops.segment_sum(embeddings, segment_ids)
weight_sum = math_ops.segment_sum(weights, segment_ids)
embeddings = math_ops.div(embeddings, weight_sum, name=name)
elif combiner == "sqrtn":
embeddings = math_ops.segment_sum(embeddings, segment_ids)
weights_squared = math_ops.pow(weights, 2)
weight_sum = math_ops.segment_sum(weights_squared, segment_ids)
weight_sum_sqrt = math_ops.sqrt(weight_sum)
embeddings = math_ops.div(embeddings, weight_sum_sqrt, name=name)
else:
assert False, "Unrecognized combiner"
else:
assert idx is not None
if combiner == "sum":
embeddings = math_ops.sparse_segment_sum(embeddings, idx, segment_ids,
name=name)
elif combiner == "mean":
embeddings = math_ops.sparse_segment_mean(embeddings, idx, segment_ids,
name=name)
elif combiner == "sqrtn":
embeddings = math_ops.sparse_segment_sqrt_n(embeddings, idx,
segment_ids, name=name)
else:
assert False, "Unrecognized combiner"
return embeddings
|
StoicLoofah/sc2reader
|
refs/heads/master
|
sc2reader/events/__init__.py
|
4
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals, division
# Export all events of all types to the package interface
from sc2reader.events import base, game, message, tracker
from sc2reader.events.base import *
from sc2reader.events.game import *
from sc2reader.events.message import *
from sc2reader.events.tracker import *
|
stackforge/networking-mlnx
|
refs/heads/master
|
networking_mlnx/plugins/ml2/drivers/mlnx/agent/config.py
|
2
|
# Copyright 2013 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron.conf.agent import common as config
from oslo_config import cfg
from networking_mlnx._i18n import _
DEFAULT_INTERFACE_MAPPINGS = []
eswitch_opts = [
cfg.ListOpt('physical_interface_mappings',
default=DEFAULT_INTERFACE_MAPPINGS,
help=_("List of <physical_network>:<physical_interface>")),
cfg.StrOpt('daemon_endpoint',
default='tcp://127.0.0.1:60001',
help=_('eswitch daemon end point')),
cfg.IntOpt('request_timeout', default=3000,
help=_("The number of milliseconds the agent will wait for "
"response on request to daemon.")),
cfg.IntOpt('retries', default=3,
help=_("The number of retries the agent will send request "
"to daemon before giving up")),
cfg.IntOpt('backoff_rate', default=2,
help=_("backoff rate multiplier for waiting period between "
"retries for request to daemon, i.e. value of 2 will "
" double the request timeout each retry")),
]
agent_opts = [
cfg.IntOpt('polling_interval', default=2,
help=_("The number of seconds the agent will wait between "
"polling for local device changes.")),
]
cfg.CONF.register_opts(eswitch_opts, "ESWITCH")
cfg.CONF.register_opts(agent_opts, "AGENT")
config.register_agent_state_opts_helper(cfg.CONF)
|
markmc/rhevm-api
|
refs/heads/master
|
python/lib/rhev/test/test_error.py
|
1
|
#
# This file is part of python-rhev. python-rhev is free software that is
# made available under the MIT license. Consult the file "LICENSE" that
# is distributed together with this file for the exact licensing terms.
#
# python-rhev is copyright (c) 2010 by the python-rhev authors. See the
# file "AUTHORS" for a complete overview.
from rhev import schema
from rhev.object import create
from rhev.error import *
from rhev.test.base import BaseTest
from rhev.test import util
from nose.tools import assert_raises
class TestError(BaseTest):
def test_prepare(self):
dc = schema.new(schema.DataCenter)
dc.name = util.random_name('dc')
dc.storage_type = 'NFS'
dc = self.api.create(dc)
assert dc is not None
self.store.dc = dc
def test_error(self):
error = Error('foo')
assert str(error) == 'foo'
error = Error('foo', arg='value', arg2='value2')
assert str(error) == 'foo'
assert error.arg == 'value'
assert error.arg2 == 'value2'
def test_create(self):
error = create(Error, 'foo')
assert isinstance(error, Error)
assert str(error) == 'foo'
def test_illegal_action(self):
dc = self.store.dc
assert_raises(IllegalAction, self.api.action, dc, 'foo')
def test_finalize(self):
dc = self.store.dc
self.api.delete(dc)
|
scott-abt/junos-interface-utilization
|
refs/heads/master
|
example_creds.py
|
1
|
#!/usr/bin/env python3
'''
Copy this file to mycreds.py and change the details to something that works for
your environment.
'''
default = {"root": "password",}
the_creds = [default, ]
|
ivanbusthomi/inasafe
|
refs/heads/develop
|
safe/definitions/metadata.py
|
8
|
# coding=utf-8
"""Metadata Constants."""
import os
from safe.metadata.property import (
CharacterStringProperty,
DateProperty,
UrlProperty,
DictionaryProperty,
IntegerProperty,
BooleanProperty,
FloatProperty,
ListProperty,
)
__copyright__ = "Copyright 2017, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
# XML to python types conversions
TYPE_CONVERSIONS = {
'gco:CharacterString': CharacterStringProperty,
'gco:Date': DateProperty,
'gmd:URL': UrlProperty,
'gco:Dictionary': DictionaryProperty,
'gco:Integer': IntegerProperty,
'gco:Boolean': BooleanProperty,
'gco:Float': FloatProperty,
'gco:List': ListProperty,
}
# XML Namespaces
METADATA_XML_TEMPLATE = os.path.join(
os.path.dirname(__file__),
'..',
'metadata',
'iso_19115_template.xml')
|
Deepakpatle/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/TestResultServer/model/datastorefile.py
|
144
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from datetime import datetime
import logging
from google.appengine.ext import db
MAX_DATA_ENTRY_PER_FILE = 10
MAX_ENTRY_LEN = 1000 * 1000
class DataEntry(db.Model):
"""Datastore entry that stores one segmant of file data
(<1000*1000 bytes).
"""
data = db.BlobProperty()
@classmethod
def get(cls, key):
return db.get(key)
def get_data(self, key):
return db.get(key)
class DataStoreFile(db.Model):
"""This class stores file in datastore.
If a file is oversize (>1000*1000 bytes), the file is split into
multiple segments and stored in multiple datastore entries.
"""
name = db.StringProperty()
data_keys = db.ListProperty(db.Key)
# keys to the data store entries that can be reused for new data.
# If it is emtpy, create new DataEntry.
new_data_keys = db.ListProperty(db.Key)
date = db.DateTimeProperty(auto_now_add=True)
data = None
def delete_data(self, keys=None):
if not keys:
keys = self.data_keys
for key in keys:
data_entry = DataEntry.get(key)
if data_entry:
data_entry.delete()
def save_data(self, data):
if not data:
logging.warning("No data to save.")
return False
if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN):
logging.error("File too big, can't save to datastore: %dK",
len(data) / 1024)
return False
start = 0
# Use the new_data_keys to store new data. If all new data are saved
# successfully, swap new_data_keys and data_keys so we can reuse the
# data_keys entries in next run. If unable to save new data for any
# reason, only the data pointed by new_data_keys may be corrupted,
# the existing data_keys data remains untouched. The corrupted data
# in new_data_keys will be overwritten in next update.
keys = self.new_data_keys
self.new_data_keys = []
while start < len(data):
if keys:
key = keys[0]
data_entry = DataEntry.get(key)
if not data_entry:
logging.warning("Found key, but no data entry: %s", key)
data_entry = DataEntry()
else:
data_entry = DataEntry()
data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN])
try:
data_entry.put()
except Exception, err:
logging.error("Failed to save data store entry: %s", err)
if keys:
self.delete_data(keys)
return False
logging.info("Data saved: %s.", data_entry.key())
self.new_data_keys.append(data_entry.key())
if keys:
keys.pop(0)
start = start + MAX_ENTRY_LEN
if keys:
self.delete_data(keys)
temp_keys = self.data_keys
self.data_keys = self.new_data_keys
self.new_data_keys = temp_keys
self.data = data
return True
def load_data(self):
if not self.data_keys:
logging.warning("No data to load.")
return None
data = []
for key in self.data_keys:
logging.info("Loading data for key: %s.", key)
data_entry = DataEntry.get(key)
if not data_entry:
logging.error("No data found for key: %s.", key)
return None
data.append(data_entry.data)
self.data = "".join(data)
return self.data
|
robertjacobs/zuros
|
refs/heads/master
|
zuros_control/zuros_emergency_stop/src/emergency_stop.py
|
1
|
#!/usr/bin/env python
# Copyright (c) 2013-2014 ZUYD Research
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author Robert Jacobs/info@rjpjacobs.nl
"""
This will check the scan topic and looks if there are any -inf (minus infinite) values
If there are too much -infs there is probably an object nearby -> emergency stop
"""
import roslib; roslib.load_manifest('zuros_emergency_stop')
import rospy
from sensor_msgs.msg import LaserScan
from std_msgs.msg import Bool
import os
import math
## Class for checking the status of the scan topic depth points
class EmergencyChecker(object):
## Constructor
def __init__(self):
self.nan_count = 0
self.emergency_stop = False
# Create a publisher for the emergency stop topic
self.pub = rospy.Publisher('emergency_stop', Bool)
## Callback method
# Gets called when there is new data in the scan topic
def callback_scan(self, data):
self.inf_count = 0
for r in range (0,640):
#the scan publisher (zuros_depth) publishes -inf values for each point < 0.8
if(math.isinf(data.ranges[r])):
self.inf_count = self.inf_count + 1
if(self.inf_count >= 5 and self.emergency_stop == False):
self.emergency_stop = True
rospy.loginfo("EMERGENCY STOP ISSUED")
else:
if(self.inf_count < 5 and self.emergency_stop == True):
self.emergency_stop = False
rospy.loginfo("EMERGENCY STOP RELEASED")
self.pub.publish(Bool(self.emergency_stop))
## Check if this is a class call or a program call
if __name__ == '__main__':
rospy.init_node('emergency_scanner', anonymous=False)
# Start
emc = EmergencyChecker()
rospy.Subscriber("scan", LaserScan, emc.callback_scan)
# Spin
rospy.spin()
|
mheap/ansible
|
refs/heads/devel
|
test/integration/targets/wait_for/files/testserver.py
|
222
|
import sys
if __name__ == '__main__':
if sys.version_info[0] >= 3:
import http.server
import socketserver
PORT = int(sys.argv[1])
Handler = http.server.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer(("", PORT), Handler)
httpd.serve_forever()
else:
import mimetypes
mimetypes.init()
mimetypes.add_type('application/json', '.json')
import SimpleHTTPServer
SimpleHTTPServer.test()
|
niktre/espressopp
|
refs/heads/master
|
contrib/mpi4py/mpi4py-2.0.0/demo/wrap-swig/test.py
|
54
|
from mpi4py import MPI
import helloworld as hw
null = MPI.COMM_NULL
hw.sayhello(null)
comm = MPI.COMM_WORLD
hw.sayhello(comm)
try:
hw.sayhello(list())
except:
pass
else:
assert 0, "exception not raised"
|
tvtsoft/odoo8
|
refs/heads/master
|
addons/account/models/account_payment.py
|
1
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api, _
from openerp.exceptions import UserError, ValidationError
MAP_INVOICE_TYPE_PARTNER_TYPE = {
'out_invoice': 'customer',
'out_refund': 'customer',
'in_invoice': 'supplier',
'in_refund': 'supplier',
}
# Since invoice amounts are unsigned, this is how we know if money comes in or goes out
MAP_INVOICE_TYPE_PAYMENT_SIGN = {
'out_invoice': 1,
'in_refund': 1,
'in_invoice': -1,
'out_refund': -1,
}
class account_payment_method(models.Model):
_name = "account.payment.method"
_description = "Payment Methods"
name = fields.Char(required=True)
code = fields.Char(required=True) # For internal identification
payment_type = fields.Selection([('inbound', 'Inbound'), ('outbound', 'Outbound')], required=True)
class account_abstract_payment(models.AbstractModel):
_name = "account.abstract.payment"
_description = "Contains the logic shared between models which allows to register payments"
payment_type = fields.Selection([('outbound', 'Send Money'), ('inbound', 'Receive Money')], string='Payment Type', required=True)
payment_method_id = fields.Many2one('account.payment.method', string='Payment Type', required=True, oldname="payment_method")
payment_method_code = fields.Char(related='payment_method_id.code',
help="Technical field used to adapt the interface to the payment type selected.")
partner_type = fields.Selection([('customer', 'Customer'), ('supplier', 'Vendor')])
partner_id = fields.Many2one('res.partner', string='Partner')
amount = fields.Monetary(string='Payment Amount', required=True)
currency_id = fields.Many2one('res.currency', string='Currency', required=True, default=lambda self: self.env.user.company_id.currency_id)
payment_date = fields.Date(string='Payment Date', default=fields.Date.context_today, required=True, copy=False)
communication = fields.Char(string='Memo')
journal_id = fields.Many2one('account.journal', string='Payment Method', required=True, domain=[('type', 'in', ('bank', 'cash'))])
company_id = fields.Many2one('res.company', related='journal_id.company_id', string='Company', readonly=True)
hide_payment_method = fields.Boolean(compute='_compute_hide_payment_method',
help="Technical field used to hide the payment method if the selected journal has only one available which is 'manual'")
@api.one
@api.constrains('amount')
def _check_amount(self):
if not self.amount > 0.0:
raise ValidationError('The payment amount must be strictly positive.')
@api.one
@api.depends('payment_type', 'journal_id')
def _compute_hide_payment_method(self):
if not self.journal_id:
self.hide_payment_method = True
return
journal_payment_methods = self.payment_type == 'inbound' and self.journal_id.inbound_payment_method_ids or self.journal_id.outbound_payment_method_ids
self.hide_payment_method = len(journal_payment_methods) == 1 and journal_payment_methods[0].code == 'manual'
@api.onchange('journal_id')
def _onchange_journal(self):
if self.journal_id:
self.currency_id = self.journal_id.currency_id or self.company_id.currency_id
# Set default payment method (we consider the first to be the default one)
payment_methods = self.payment_type == 'inbound' and self.journal_id.inbound_payment_method_ids or self.journal_id.outbound_payment_method_ids
self.payment_method_id = payment_methods and payment_methods[0] or False
# Set payment method domain (restrict to methods enabled for the journal and to selected payment type)
payment_type = self.payment_type in ('outbound', 'transfer') and 'outbound' or 'inbound'
return {'domain': {'payment_method_id': [('payment_type', '=', payment_type), ('id', 'in', payment_methods.ids)]}}
return {}
def _get_invoices(self):
""" Return the invoices of the payment. Must be overridden """
raise NotImplementedError
def _compute_total_invoices_amount(self):
""" Compute the sum of the residual of invoices, expressed in the payment currency """
total = 0
payment_currency = self.currency_id or self.journal_id.currency_id or self.journal_id.company_id.currency_id
for inv in self._get_invoices():
total += inv.residual_signed
if self.company_id and self.company_id.currency_id != payment_currency:
total = self.company_id.currency_id.with_context(date=self.payment_date).compute(total, payment_currency)
return abs(total)
class account_register_payments(models.TransientModel):
_name = "account.register.payments"
_inherit = 'account.abstract.payment'
_description = "Register payments on multiple invoices"
@api.onchange('payment_type')
def _onchange_payment_type(self):
if self.payment_type:
return {'domain': {'payment_method_id': [('payment_type', '=', self.payment_type)]}}
def _get_invoices(self):
return self.env['account.invoice'].browse(self._context.get('active_ids'))
@api.model
def default_get(self, fields):
rec = super(account_register_payments, self).default_get(fields)
context = dict(self._context or {})
active_model = context.get('active_model')
active_ids = context.get('active_ids')
# Checks on context parameters
if not active_model or not active_ids:
raise UserError(_("Programmation error: wizard action executed without active_model or active_ids in context."))
if active_model != 'account.invoice':
raise UserError(_("Programmation error: the expected model for this action is 'account.invoice'. The provided one is '%d'." % active_model))
# Checks on received invoice records
invoices = self.env[active_model].browse(active_ids)
if any(invoice.state != 'open' for invoice in invoices):
raise UserError(_("You can only register payments for open invoices"))
if any(inv.partner_id != invoices[0].partner_id for inv in invoices):
raise UserError(_("In order to pay multiple invoices at once, they must belong to the same partner."))
if any(MAP_INVOICE_TYPE_PARTNER_TYPE[inv.type] != MAP_INVOICE_TYPE_PARTNER_TYPE[invoices[0].type] for inv in invoices):
raise UserError(_("You cannot mix customer invoices and vendor bills in a single payment."))
if any(inv.currency_id != invoices[0].currency_id for inv in invoices):
raise UserError(_("In order to pay multiple invoices at once, they must use the same currency."))
total_amount = sum(inv.residual * MAP_INVOICE_TYPE_PAYMENT_SIGN[inv.type] for inv in invoices)
rec.update({
'amount': abs(total_amount),
'currency_id': invoices[0].currency_id.id,
'payment_type': total_amount > 0 and 'inbound' or 'outbound',
'partner_id': invoices[0].partner_id.id,
'partner_type': MAP_INVOICE_TYPE_PARTNER_TYPE[invoices[0].type],
})
return rec
def get_payment_vals(self):
""" Hook for extension """
return {
'journal_id': self.journal_id.id,
'payment_method_id': self.payment_method_id.id,
'payment_date': self.payment_date,
'communication': self.communication,
'invoice_ids': [(4, inv.id, None) for inv in self._get_invoices()],
'payment_type': self.payment_type,
'amount': self.amount,
'currency_id': self.currency_id.id,
'partner_id': self.partner_id.id,
'partner_type': self.partner_type,
}
@api.multi
def create_payment(self):
payment = self.env['account.payment'].create(self.get_payment_vals())
payment.post()
return {'type': 'ir.actions.act_window_close'}
class account_payment(models.Model):
_name = "account.payment"
_inherit = 'account.abstract.payment'
_description = "Payments"
_order = "payment_date desc, name desc"
@api.one
@api.depends('invoice_ids')
def _get_has_invoices(self):
self.has_invoices = bool(self.invoice_ids)
@api.one
@api.depends('invoice_ids', 'amount', 'payment_date', 'currency_id')
def _compute_payment_difference(self):
if len(self.invoice_ids) == 0:
return
self.payment_difference = self._compute_total_invoices_amount() - self.amount
company_id = fields.Many2one(store=True)
name = fields.Char(readonly=True, copy=False, default="Draft Payment") # The name is attributed upon post()
state = fields.Selection([('draft', 'Draft'), ('posted', 'Posted'), ('sent', 'Sent'), ('reconciled', 'Reconciled')], readonly=True, default='draft', copy=False, string="Status")
payment_type = fields.Selection(selection_add=[('transfer', 'Internal Transfer')])
payment_reference = fields.Char(copy=False, readonly=True, help="Reference of the document used to issue this payment. Eg. check number, file name, etc.")
# Money flows from the journal_id's default_debit_account_id or default_credit_account_id to the destination_account_id
destination_account_id = fields.Many2one('account.account', compute='_compute_destination_account_id', readonly=True)
# For money transfer, money goes from journal_id to a transfer account, then from the transfer account to destination_journal_id
destination_journal_id = fields.Many2one('account.journal', string='Transfer To', domain=[('type', 'in', ('bank', 'cash'))])
invoice_ids = fields.Many2many('account.invoice', 'account_invoice_payment_rel', 'payment_id', 'invoice_id', string="Invoices", copy=False, readonly=True)
has_invoices = fields.Boolean(compute="_get_has_invoices", help="Technical field used for usablity purposes")
payment_difference = fields.Monetary(compute='_compute_payment_difference', readonly=True)
payment_difference_handling = fields.Selection([('open', 'Keep open'), ('reconcile', 'Mark invoice as fully paid')], default='open', string="Payment Difference", copy=False)
writeoff_account_id = fields.Many2one('account.account', string="Difference Account", domain=[('deprecated', '=', False)], copy=False)
# FIXME: ondelete='restrict' not working (eg. cancel a bank statement reconciliation with a payment)
move_line_ids = fields.One2many('account.move.line', 'payment_id', readonly=True, copy=False, ondelete='restrict')
@api.one
@api.depends('invoice_ids', 'payment_type', 'partner_type', 'partner_id')
def _compute_destination_account_id(self):
if self.invoice_ids:
self.destination_account_id = self.invoice_ids[0].account_id.id
elif self.payment_type == 'transfer':
if not self.company_id.transfer_account_id.id:
raise UserError(_('Transfer account not defined on the company.'))
self.destination_account_id = self.company_id.transfer_account_id.id
elif self.partner_id:
if self.partner_type == 'customer':
self.destination_account_id = self.partner_id.property_account_receivable_id.id
else:
self.destination_account_id = self.partner_id.property_account_payable_id.id
@api.onchange('partner_type')
def _onchange_partner_type(self):
# Set partner_id domain
if self.partner_type:
if getattr(self.partner_id, self.partner_type) is False:
self.partner_id = False
return {'domain': {'partner_id': [(self.partner_type, '=', True)]}}
@api.onchange('payment_type')
def _onchange_payment_type(self):
# Set default partner type for the payment type
if self.payment_type == 'inbound':
self.partner_type = 'customer'
elif self.payment_type == 'outbound':
self.partner_type = 'supplier'
# Set payment method domain
res = self._onchange_journal()
if not res.get('domain', {}):
res['domain'] = {}
res['domain']['journal_id'] = self.payment_type == 'inbound' and [('at_least_one_inbound', '=', True)] or [('at_least_one_outbound', '=', True)]
res['domain']['journal_id'].append(('type', 'in', ('bank', 'cash')))
return res
@api.model
def default_get(self, fields):
rec = super(account_payment, self).default_get(fields)
invoice_ids = rec.get('invoice_ids') and rec['invoice_ids'][0][2] or None
if invoice_ids and len(invoice_ids) == 1:
invoice = self.env['account.invoice'].browse(invoice_ids)
rec['communication'] = invoice.reference
rec['currency_id'] = invoice.currency_id.id
rec['payment_type'] = invoice.type in ('out_invoice', 'in_refund') and 'inbound' or 'outbound'
rec['partner_type'] = MAP_INVOICE_TYPE_PARTNER_TYPE[invoice.type]
rec['partner_id'] = invoice.partner_id.id
rec['amount'] = invoice.residual_signed
return rec
def _get_invoices(self):
return self.invoice_ids
@api.model
def create(self, vals):
self._check_communication(vals['payment_method_id'], vals.get('communication', ''))
return super(account_payment, self).create(vals)
def _check_communication(self, payment_method_id, communication):
""" This method is to be overwritten by payment type modules. The method body would look like :
if payment_method_id == self.env.ref('my_module.payment_method_id').id:
try:
communication.decode('ascii')
except UnicodeError:
raise ValidationError(_("The communication cannot contain any special character"))
"""
pass
@api.multi
def button_journal_entries(self):
return {
'name': _('Journal Items'),
'view_type': 'form',
'view_mode': 'tree',
'res_model': 'account.move.line',
'view_id': False,
'type': 'ir.actions.act_window',
'domain': [('payment_id', 'in', self.ids)],
}
@api.multi
def button_invoices(self):
return {
'name': _('Paid Invoices'),
'view_type': 'form',
'view_mode': 'tree',
'res_model': 'account.invoice',
'view_id': False,
'type': 'ir.actions.act_window',
'domain': [('id', 'in', [x.id for x in self.invoice_ids])],
}
@api.multi
def button_dummy(self):
return True
@api.multi
def cancel(self):
for rec in self:
for move in rec.move_line_ids.mapped('move_id'):
if rec.invoice_ids:
move.line_ids.remove_move_reconcile()
move.button_cancel()
move.unlink()
rec.state = 'draft'
@api.multi
def unlink(self):
if any(rec.state != 'draft' for rec in self):
raise UserError(_("You can not delete a payment that is already posted"))
return super(account_payment, self).unlink()
@api.multi
def post(self):
""" Create the journal items for the payment and update the payment's state to 'posted'.
A journal entry is created containing an item in the source liquidity account (selected journal's default_debit or default_credit)
and another in the destination reconciliable account (see _compute_destination_account_id).
If invoice_ids is not empty, there will be one reconciliable move line per invoice to reconcile with.
If the payment is a transfer, a second journal entry is created in the destination journal to receive money from the transfer account.
"""
for rec in self:
if rec.state != 'draft':
raise UserError(_("Only a draft payment can be posted. Trying to post a payment in state %s.") % rec.state)
if any(inv.state != 'open' for inv in rec.invoice_ids):
raise ValidationError(_("The payment cannot be processed because an invoice of the payment is not open !"))
# Use the right sequence to set the name
if rec.payment_type == 'transfer':
sequence = rec.env.ref('account.sequence_payment_transfer')
else:
if rec.partner_type == 'customer':
if rec.payment_type == 'inbound':
sequence = rec.env.ref('account.sequence_payment_customer_invoice')
if rec.payment_type == 'outbound':
sequence = rec.env.ref('account.sequence_payment_customer_refund')
if rec.partner_type == 'supplier':
if rec.payment_type == 'inbound':
sequence = rec.env.ref('account.sequence_payment_supplier_refund')
if rec.payment_type == 'outbound':
sequence = rec.env.ref('account.sequence_payment_supplier_invoice')
rec.name = sequence.with_context(ir_sequence_date=rec.payment_date).next_by_id()
# Create the journal entry
amount = rec.amount * (rec.payment_type in ('outbound', 'transfer') and 1 or -1)
move = rec._create_payment_entry(amount)
# In case of a transfer, the first journal entry created debited the source liquidity account and credited
# the transfer account. Now we debit the transfer account and credit the destination liquidity account.
if rec.payment_type == 'transfer':
transfer_credit_aml = move.line_ids.filtered(lambda r: r.account_id == rec.company_id.transfer_account_id)
transfer_debit_aml = rec._create_transfer_entry(amount)
(transfer_credit_aml + transfer_debit_aml).reconcile()
rec.state = 'posted'
def _create_payment_entry(self, amount):
""" Create a journal entry corresponding to a payment, if the payment references invoice(s) they are reconciled.
Return the journal entry.
"""
aml_obj = self.env['account.move.line'].with_context(check_move_validity=False)
debit, credit, amount_currency = aml_obj.with_context(date=self.payment_date).compute_amount_fields(amount, self.currency_id, self.company_id.currency_id)
move = self.env['account.move'].create(self._get_move_vals())
#Write line corresponding to invoice payment
counterpart_aml_dict = self._get_shared_move_line_vals(debit, credit, amount_currency, move.id, False)
counterpart_aml_dict.update(self._get_counterpart_move_line_vals(self.invoice_ids))
counterpart_aml_dict.update({'currency_id': self.currency_id != self.company_id.currency_id and self.currency_id.id or False})
counterpart_aml = aml_obj.create(counterpart_aml_dict)
#Reconcile with the invoices
if self.payment_difference_handling == 'reconcile':
self.invoice_ids.register_payment(counterpart_aml, self.writeoff_account_id, self.journal_id)
else:
self.invoice_ids.register_payment(counterpart_aml)
#Write counterpart lines
liquidity_aml_dict = self._get_shared_move_line_vals(credit, debit, -amount_currency, move.id, False)
liquidity_aml_dict.update(self._get_liquidity_move_line_vals(-amount))
aml_obj.create(liquidity_aml_dict)
move.post()
return move
def _create_transfer_entry(self, amount):
""" Create the journal entry corresponding to the 'incoming money' part of an internal transfer, return the reconciliable move line
"""
aml_obj = self.env['account.move.line'].with_context(check_move_validity=False)
debit, credit, amount_currency = aml_obj.with_context(date=self.payment_date).compute_amount_fields(amount, self.currency_id, self.company_id.currency_id)
amount_currency = self.destination_journal_id.currency_id and self.currency_id.with_context(date=self.payment_date).compute(amount, self.destination_journal_id.currency_id) or 0
dst_move = self.env['account.move'].create(self._get_move_vals(self.destination_journal_id))
dst_liquidity_aml_dict = self._get_shared_move_line_vals(debit, credit, amount_currency, dst_move.id)
dst_liquidity_aml_dict.update({
'name': _('Transfer from %s') % self.journal_id.name,
'account_id': self.destination_journal_id.default_credit_account_id.id,
'currency_id': self.destination_journal_id.currency_id.id,
'payment_id': self.id,
'journal_id': self.destination_journal_id.id})
aml_obj.create(dst_liquidity_aml_dict)
transfer_debit_aml_dict = self._get_shared_move_line_vals(credit, debit, 0, dst_move.id)
transfer_debit_aml_dict.update({
'name': self.name,
'payment_id': self.id,
'account_id': self.company_id.transfer_account_id.id,
'journal_id': self.destination_journal_id.id})
if self.currency_id != self.company_id.currency_id:
transfer_debit_aml_dict.update({
'currency_id': self.currency_id.id,
'amount_currency': -self.amount,
})
transfer_debit_aml = aml_obj.create(transfer_debit_aml_dict)
dst_move.post()
return transfer_debit_aml
def _get_move_vals(self, journal=None):
""" Return dict to create the payment move
"""
journal = journal or self.journal_id
if not journal.sequence_id:
raise UserError(_('Configuration Error !'), _('The journal %s does not have a sequence, please specify one.') % journal.name)
if not journal.sequence_id.active:
raise UserError(_('Configuration Error !'), _('The sequence of journal %s is deactivated.') % journal.name)
name = journal.with_context(ir_sequence_date=self.payment_date).sequence_id.next_by_id()
return {
'name': name,
'date': self.payment_date,
'ref': self.communication or '',
'company_id': self.company_id.id,
'journal_id': journal.id,
}
def _get_shared_move_line_vals(self, debit, credit, amount_currency, move_id, invoice_id=False):
""" Returns values common to both move lines (except for debit, credit and amount_currency which are reversed)
"""
return {
'partner_id': self.payment_type in ('inbound', 'outbound') and self.partner_id.commercial_partner_id.id or False,
'invoice_id': invoice_id and invoice_id.id or False,
'move_id': move_id,
'debit': debit,
'credit': credit,
'amount_currency': amount_currency or False,
}
def _get_counterpart_move_line_vals(self, invoice=False):
if self.payment_type == 'transfer':
name = self.name
else:
name = ''
if self.partner_type == 'customer':
if self.payment_type == 'inbound':
name += _("Customer Payment")
elif self.payment_type == 'outbound':
name += _("Customer Refund")
elif self.partner_type == 'supplier':
if self.payment_type == 'inbound':
name += _("Vendor Refund")
elif self.payment_type == 'outbound':
name += _("Vendor Payment")
if invoice:
name += ': '
for inv in invoice:
name += inv.number+', '
name = name[:len(name)-2]
return {
'name': name,
'account_id': self.destination_account_id.id,
'journal_id': self.journal_id.id,
'currency_id': self.currency_id != self.company_id.currency_id and self.currency_id.id or False,
'payment_id': self.id,
}
def _get_liquidity_move_line_vals(self, amount):
name = self.name
if self.payment_type == 'transfer':
name = _('Transfer to %s') % self.destination_journal_id.name
vals = {
'name': name,
'account_id': self.payment_type in ('outbound','transfer') and self.journal_id.default_debit_account_id.id or self.journal_id.default_credit_account_id.id,
'payment_id': self.id,
'journal_id': self.journal_id.id,
'currency_id': self.currency_id != self.company_id.currency_id and self.currency_id.id or False,
}
# If the journal has a currency specified, the journal item need to be expressed in this currency
if self.journal_id.currency_id and self.currency_id != self.journal_id.currency_id:
amount = self.currency_id.with_context(date=self.payment_date).compute(amount, self.journal_id.currency_id)
debit, credit, amount_currency = self.env['account.move.line'].with_context(date=self.payment_date).compute_amount_fields(amount, self.journal_id.currency_id, self.company_id.currency_id)
vals.update({
'amount_currency': amount_currency,
'currency_id': self.journal_id.currency_id.id,
})
return vals
|
illicitonion/givabit
|
refs/heads/master
|
lib/sdks/google_appengine_1.7.1/google_appengine/lib/django_1_3/django/contrib/messages/tests/base.py
|
152
|
import warnings
from django import http
from django.test import TestCase
from django.conf import settings
from django.utils.translation import ugettext_lazy
from django.utils.unittest import skipIf
from django.contrib.messages import constants, utils, get_level, set_level
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.storage import default_storage, base
from django.contrib.messages.storage.base import Message
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
def skipUnlessAuthIsInstalled(func):
return skipIf(
'django.contrib.auth' not in settings.INSTALLED_APPS,
"django.contrib.auth isn't installed")(func)
def add_level_messages(storage):
"""
Adds 6 messages from different levels (including a custom one) to a storage
instance.
"""
storage.add(constants.INFO, 'A generic info message')
storage.add(29, 'Some custom level')
storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag')
storage.add(constants.WARNING, 'A warning')
storage.add(constants.ERROR, 'An error')
storage.add(constants.SUCCESS, 'This was a triumph.')
class BaseTest(TestCase):
storage_class = default_storage
restore_settings = ['MESSAGE_LEVEL', 'MESSAGE_TAGS']
urls = 'django.contrib.messages.tests.urls'
levels = {
'debug': constants.DEBUG,
'info': constants.INFO,
'success': constants.SUCCESS,
'warning': constants.WARNING,
'error': constants.ERROR,
}
def setUp(self):
self._remembered_settings = {}
for setting in self.restore_settings:
if hasattr(settings, setting):
self._remembered_settings[setting] = getattr(settings, setting)
delattr(settings._wrapped, setting)
# Backup these manually because we do not want them deleted.
self._middleware_classes = settings.MIDDLEWARE_CLASSES
self._template_context_processors = \
settings.TEMPLATE_CONTEXT_PROCESSORS
self._installed_apps = settings.INSTALLED_APPS
self._message_storage = settings.MESSAGE_STORAGE
settings.MESSAGE_STORAGE = '%s.%s' % (self.storage_class.__module__,
self.storage_class.__name__)
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = ()
self.save_warnings_state()
warnings.filterwarnings('ignore', category=DeprecationWarning,
module='django.contrib.auth.models')
def tearDown(self):
for setting in self.restore_settings:
self.restore_setting(setting)
# Restore these manually (see above).
settings.MIDDLEWARE_CLASSES = self._middleware_classes
settings.TEMPLATE_CONTEXT_PROCESSORS = \
self._template_context_processors
settings.INSTALLED_APPS = self._installed_apps
settings.MESSAGE_STORAGE = self._message_storage
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
self.restore_warnings_state()
def restore_setting(self, setting):
if setting in self._remembered_settings:
value = self._remembered_settings.pop(setting)
setattr(settings, setting, value)
elif hasattr(settings, setting):
delattr(settings._wrapped, setting)
def get_request(self):
return http.HttpRequest()
def get_response(self):
return http.HttpResponse()
def get_storage(self, data=None):
"""
Returns the storage backend, setting its loaded data to the ``data``
argument.
This method avoids the storage ``_get`` method from getting called so
that other parts of the storage backend can be tested independent of
the message retrieval logic.
"""
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 1')
self.assertTrue(storage.added_new)
storage.add(constants.INFO, 'Test message 2', extra_tags='tag')
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, ugettext_lazy('lazy message'))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 1')
storage.add(constants.INFO, 'Test message 1', extra_tags='tag')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 3')
list(storage) # Simulates a read
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage) # Simulates a read
storage.add(constants.INFO, 'Test message 3')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_full_request_response_cycle(self):
"""
With the message middleware enabled, tests that messages are properly
stored and then retrieved across the full request/redirect/response
cycle.
"""
settings.MESSAGE_LEVEL = constants.DEBUG
data = {
'messages': ['Test message %d' % x for x in xrange(10)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
messages = [Message(self.levels[level], msg) for msg in
data['messages']]
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
def test_with_template_response(self):
settings.MESSAGE_LEVEL = constants.DEBUG
data = {
'messages': ['Test message %d' % x for x in xrange(10)],
}
show_url = reverse('django.contrib.messages.tests.urls.show_template_response')
for level in self.levels.keys():
add_url = reverse('django.contrib.messages.tests.urls.add_template_response',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
for msg in data['messages']:
self.assertContains(response, msg)
# there shouldn't be any messages on second GET request
response = self.client.get(show_url)
for msg in data['messages']:
self.assertNotContains(response, msg)
def test_multiple_posts(self):
"""
Tests that messages persist properly when multiple POSTs are made
before a GET.
"""
settings.MESSAGE_LEVEL = constants.DEBUG
data = {
'messages': ['Test message %d' % x for x in xrange(10)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
messages = []
for level in ('debug', 'info', 'success', 'warning', 'error'):
messages.extend([Message(self.levels[level], msg) for msg in
data['messages']])
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertTrue('messages' in response.context)
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@skipUnlessAuthIsInstalled
def test_middleware_disabled_auth_user(self):
"""
Tests that the messages API successfully falls back to using
user.message_set to store messages directly when the middleware is
disabled.
"""
settings.MESSAGE_LEVEL = constants.DEBUG
user = User.objects.create_user('test', 'test@example.com', 'test')
self.client.login(username='test', password='test')
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS)
settings.INSTALLED_APPS.remove(
'django.contrib.messages',
)
settings.MIDDLEWARE_CLASSES = list(settings.MIDDLEWARE_CLASSES)
settings.MIDDLEWARE_CLASSES.remove(
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.TEMPLATE_CONTEXT_PROCESSORS = \
list(settings.TEMPLATE_CONTEXT_PROCESSORS)
settings.TEMPLATE_CONTEXT_PROCESSORS.remove(
'django.contrib.messages.context_processors.messages',
)
data = {
'messages': ['Test message %d' % x for x in xrange(10)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
context_messages = list(response.context['messages'])
for msg in data['messages']:
self.assertTrue(msg in context_messages)
self.assertContains(response, msg)
def test_middleware_disabled_anon_user(self):
"""
Tests that, when the middleware is disabled and a user is not logged
in, an exception is raised when one attempts to store a message.
"""
settings.MESSAGE_LEVEL = constants.DEBUG
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS)
settings.INSTALLED_APPS.remove(
'django.contrib.messages',
)
settings.MIDDLEWARE_CLASSES = list(settings.MIDDLEWARE_CLASSES)
settings.MIDDLEWARE_CLASSES.remove(
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.TEMPLATE_CONTEXT_PROCESSORS = \
list(settings.TEMPLATE_CONTEXT_PROCESSORS)
settings.TEMPLATE_CONTEXT_PROCESSORS.remove(
'django.contrib.messages.context_processors.messages',
)
data = {
'messages': ['Test message %d' % x for x in xrange(10)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
self.assertRaises(MessageFailure, self.client.post, add_url,
data, follow=True)
def test_middleware_disabled_anon_user_fail_silently(self):
"""
Tests that, when the middleware is disabled and a user is not logged
in, an exception is not raised if 'fail_silently' = True
"""
settings.MESSAGE_LEVEL = constants.DEBUG
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS)
settings.INSTALLED_APPS.remove(
'django.contrib.messages',
)
settings.MIDDLEWARE_CLASSES = list(settings.MIDDLEWARE_CLASSES)
settings.MIDDLEWARE_CLASSES.remove(
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.TEMPLATE_CONTEXT_PROCESSORS = \
list(settings.TEMPLATE_CONTEXT_PROCESSORS)
settings.TEMPLATE_CONTEXT_PROCESSORS.remove(
'django.contrib.messages.context_processors.messages',
)
data = {
'messages': ['Test message %d' % x for x in xrange(10)],
'fail_silently': True,
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
self.assertEqual(list(response.context['messages']), [])
def stored_messages_count(self, storage, response):
"""
Returns the number of messages being stored after a
``storage.update()`` call.
"""
raise NotImplementedError('This method must be set by a subclass.')
def test_get(self):
raise NotImplementedError('This method must be set by a subclass.')
def get_existing_storage(self):
return self.get_storage([Message(constants.INFO, 'Test message 1'),
Message(constants.INFO, 'Test message 2',
extra_tags='tag')])
def test_existing_read(self):
"""
Tests that reading the existing storage doesn't cause the data to be
lost.
"""
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 3')
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
# Only messages of sufficient level get recorded.
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
settings.MESSAGE_LEVEL = 29
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', '', 'extra-tag debug', 'warning', 'error',
'success'])
def test_custom_tags(self):
settings.MESSAGE_TAGS = {
constants.INFO: 'info',
constants.DEBUG: '',
constants.WARNING: '',
constants.ERROR: 'bad',
29: 'custom',
}
# LEVEL_TAGS is a constant defined in the
# django.contrib.messages.storage.base module, so after changing
# settings.MESSAGE_TAGS, we need to update that constant too.
base.LEVEL_TAGS = utils.get_level_tags()
try:
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', 'custom', 'extra-tag', '', 'bad', 'success'])
finally:
# Ensure the level tags constant is put back like we found it.
self.restore_setting('MESSAGE_TAGS')
base.LEVEL_TAGS = utils.get_level_tags()
|
restorando/avro
|
refs/heads/trunk
|
lang/py/src/avro/schema.py
|
14
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Contains the Schema classes.
A schema may be one of:
A record, mapping field names to field value data;
An error, equivalent to a record;
An enum, containing one of a small set of symbols;
An array of values, all of the same schema;
A map containing string/value pairs, each of a declared schema;
A union of other schemas;
A fixed sized binary object;
A unicode string;
A sequence of bytes;
A 32-bit signed int;
A 64-bit signed long;
A 32-bit floating-point float;
A 64-bit floating-point double;
A boolean; or
Null.
"""
try:
import json
except ImportError:
import simplejson as json
#
# Constants
#
PRIMITIVE_TYPES = (
'null',
'boolean',
'string',
'bytes',
'int',
'long',
'float',
'double',
)
NAMED_TYPES = (
'fixed',
'enum',
'record',
'error',
)
VALID_TYPES = PRIMITIVE_TYPES + NAMED_TYPES + (
'array',
'map',
'union',
'request',
'error_union'
)
SCHEMA_RESERVED_PROPS = (
'type',
'name',
'namespace',
'fields', # Record
'items', # Array
'size', # Fixed
'symbols', # Enum
'values', # Map
'doc',
)
FIELD_RESERVED_PROPS = (
'default',
'name',
'doc',
'order',
'type',
)
VALID_FIELD_SORT_ORDERS = (
'ascending',
'descending',
'ignore',
)
#
# Exceptions
#
class AvroException(Exception):
pass
class SchemaParseException(AvroException):
pass
#
# Base Classes
#
class Schema(object):
"""Base class for all Schema classes."""
def __init__(self, type, other_props=None):
# Ensure valid ctor args
if not isinstance(type, basestring):
fail_msg = 'Schema type must be a string.'
raise SchemaParseException(fail_msg)
elif type not in VALID_TYPES:
fail_msg = '%s is not a valid type.' % type
raise SchemaParseException(fail_msg)
# add members
if not hasattr(self, '_props'): self._props = {}
self.set_prop('type', type)
self.type = type
self._props.update(other_props or {})
# Read-only properties dict. Printing schemas
# creates JSON properties directly from this dict.
props = property(lambda self: self._props)
# Read-only property dict. Non-reserved properties
other_props = property(lambda self: get_other_props(self._props, SCHEMA_RESERVED_PROPS),
doc="dictionary of non-reserved properties")
# utility functions to manipulate properties dict
def get_prop(self, key):
return self._props.get(key)
def set_prop(self, key, value):
self._props[key] = value
def __str__(self):
return json.dumps(self.to_json())
def to_json(self, names):
"""
Converts the schema object into its AVRO specification representation.
Schema types that have names (records, enums, and fixed) must
be aware of not re-defining schemas that are already listed
in the parameter names.
"""
raise Exception("Must be implemented by subclasses.")
class Name(object):
"""Class to describe Avro name."""
def __init__(self, name_attr, space_attr, default_space):
"""
Formulate full name according to the specification.
@arg name_attr: name value read in schema or None.
@arg space_attr: namespace value read in schema or None.
@ard default_space: the current default space or None.
"""
# Ensure valid ctor args
if not (isinstance(name_attr, basestring) or (name_attr is None)):
fail_msg = 'Name must be non-empty string or None.'
raise SchemaParseException(fail_msg)
elif name_attr == "":
fail_msg = 'Name must be non-empty string or None.'
raise SchemaParseException(fail_msg)
if not (isinstance(space_attr, basestring) or (space_attr is None)):
fail_msg = 'Space must be non-empty string or None.'
raise SchemaParseException(fail_msg)
elif name_attr == "":
fail_msg = 'Space must be non-empty string or None.'
raise SchemaParseException(fail_msg)
if not (isinstance(default_space, basestring) or (default_space is None)):
fail_msg = 'Default space must be non-empty string or None.'
raise SchemaParseException(fail_msg)
elif name_attr == "":
fail_msg = 'Default must be non-empty string or None.'
raise SchemaParseException(fail_msg)
self._full = None;
if name_attr is None or name_attr == "":
return;
if (name_attr.find('.') < 0):
if (space_attr is not None) and (space_attr != ""):
self._full = "%s.%s" % (space_attr, name_attr)
else:
if (default_space is not None) and (default_space != ""):
self._full = "%s.%s" % (default_space, name_attr)
else:
self._full = name_attr
else:
self._full = name_attr
def __eq__(self, other):
if not isinstance(other, Name):
return False
return (self.fullname == other.fullname)
fullname = property(lambda self: self._full)
def get_space(self):
"""Back out a namespace from full name."""
if self._full is None:
return None
if (self._full.find('.') > 0):
return self._full.rsplit(".", 1)[0]
else:
return ""
class Names(object):
"""Track name set and default namespace during parsing."""
def __init__(self, default_namespace=None):
self.names = {}
self.default_namespace = default_namespace
def has_name(self, name_attr, space_attr):
test = Name(name_attr, space_attr, self.default_namespace).fullname
return self.names.has_key(test)
def get_name(self, name_attr, space_attr):
test = Name(name_attr, space_attr, self.default_namespace).fullname
if not self.names.has_key(test):
return None
return self.names[test]
def prune_namespace(self, properties):
"""given a properties, return properties with namespace removed if
it matches the own default namespace"""
if self.default_namespace is None:
# I have no default -- no change
return properties
if 'namespace' not in properties:
# he has no namespace - no change
return properties
if properties['namespace'] != self.default_namespace:
# we're different - leave his stuff alone
return properties
# we each have a namespace and it's redundant. delete his.
prunable = properties.copy()
del(prunable['namespace'])
return prunable
def add_name(self, name_attr, space_attr, new_schema):
"""
Add a new schema object to the name set.
@arg name_attr: name value read in schema
@arg space_attr: namespace value read in schema.
@return: the Name that was just added.
"""
to_add = Name(name_attr, space_attr, self.default_namespace)
if to_add.fullname in VALID_TYPES:
fail_msg = '%s is a reserved type name.' % to_add.fullname
raise SchemaParseException(fail_msg)
elif self.names.has_key(to_add.fullname):
fail_msg = 'The name "%s" is already in use.' % to_add.fullname
raise SchemaParseException(fail_msg)
self.names[to_add.fullname] = new_schema
return to_add
class NamedSchema(Schema):
"""Named Schemas specified in NAMED_TYPES."""
def __init__(self, type, name, namespace=None, names=None, other_props=None):
# Ensure valid ctor args
if not name:
fail_msg = 'Named Schemas must have a non-empty name.'
raise SchemaParseException(fail_msg)
elif not isinstance(name, basestring):
fail_msg = 'The name property must be a string.'
raise SchemaParseException(fail_msg)
elif namespace is not None and not isinstance(namespace, basestring):
fail_msg = 'The namespace property must be a string.'
raise SchemaParseException(fail_msg)
# Call parent ctor
Schema.__init__(self, type, other_props)
# Add class members
new_name = names.add_name(name, namespace, self)
# Store name and namespace as they were read in origin schema
self.set_prop('name', name)
if namespace is not None:
self.set_prop('namespace', new_name.get_space())
# Store full name as calculated from name, namespace
self._fullname = new_name.fullname
def name_ref(self, names):
if self.namespace == names.default_namespace:
return self.name
else:
return self.fullname
# read-only properties
name = property(lambda self: self.get_prop('name'))
namespace = property(lambda self: self.get_prop('namespace'))
fullname = property(lambda self: self._fullname)
class Field(object):
def __init__(self, type, name, has_default, default=None,
order=None,names=None, doc=None, other_props=None):
# Ensure valid ctor args
if not name:
fail_msg = 'Fields must have a non-empty name.'
raise SchemaParseException(fail_msg)
elif not isinstance(name, basestring):
fail_msg = 'The name property must be a string.'
raise SchemaParseException(fail_msg)
elif order is not None and order not in VALID_FIELD_SORT_ORDERS:
fail_msg = 'The order property %s is not valid.' % order
raise SchemaParseException(fail_msg)
# add members
self._props = {}
self._has_default = has_default
self._props.update(other_props or {})
if (isinstance(type, basestring) and names is not None
and names.has_name(type, None)):
type_schema = names.get_name(type, None)
else:
try:
type_schema = make_avsc_object(type, names)
except Exception, e:
fail_msg = 'Type property "%s" not a valid Avro schema: %s' % (type, e)
raise SchemaParseException(fail_msg)
self.set_prop('type', type_schema)
self.set_prop('name', name)
self.type = type_schema
self.name = name
# TODO(hammer): check to ensure default is valid
if has_default: self.set_prop('default', default)
if order is not None: self.set_prop('order', order)
if doc is not None: self.set_prop('doc', doc)
# read-only properties
default = property(lambda self: self.get_prop('default'))
has_default = property(lambda self: self._has_default)
order = property(lambda self: self.get_prop('order'))
doc = property(lambda self: self.get_prop('doc'))
props = property(lambda self: self._props)
# Read-only property dict. Non-reserved properties
other_props = property(lambda self: get_other_props(self._props, FIELD_RESERVED_PROPS),
doc="dictionary of non-reserved properties")
# utility functions to manipulate properties dict
def get_prop(self, key):
return self._props.get(key)
def set_prop(self, key, value):
self._props[key] = value
def __str__(self):
return json.dumps(self.to_json())
def to_json(self, names=None):
if names is None:
names = Names()
to_dump = self.props.copy()
to_dump['type'] = self.type.to_json(names)
return to_dump
def __eq__(self, that):
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))
#
# Primitive Types
#
class PrimitiveSchema(Schema):
"""Valid primitive types are in PRIMITIVE_TYPES."""
def __init__(self, type, other_props=None):
# Ensure valid ctor args
if type not in PRIMITIVE_TYPES:
raise AvroException("%s is not a valid primitive type." % type)
# Call parent ctor
Schema.__init__(self, type, other_props=other_props)
self.fullname = type
def to_json(self, names=None):
if len(self.props) == 1:
return self.fullname
else:
return self.props
def __eq__(self, that):
return self.props == that.props
#
# Complex Types (non-recursive)
#
class FixedSchema(NamedSchema):
def __init__(self, name, namespace, size, names=None, other_props=None):
# Ensure valid ctor args
if not isinstance(size, int):
fail_msg = 'Fixed Schema requires a valid integer for size property.'
raise AvroException(fail_msg)
# Call parent ctor
NamedSchema.__init__(self, 'fixed', name, namespace, names, other_props)
# Add class members
self.set_prop('size', size)
# read-only properties
size = property(lambda self: self.get_prop('size'))
def to_json(self, names=None):
if names is None:
names = Names()
if self.fullname in names.names:
return self.name_ref(names)
else:
names.names[self.fullname] = self
return names.prune_namespace(self.props)
def __eq__(self, that):
return self.props == that.props
class EnumSchema(NamedSchema):
def __init__(self, name, namespace, symbols, names=None, doc=None, other_props=None):
# Ensure valid ctor args
if not isinstance(symbols, list):
fail_msg = 'Enum Schema requires a JSON array for the symbols property.'
raise AvroException(fail_msg)
elif False in [isinstance(s, basestring) for s in symbols]:
fail_msg = 'Enum Schems requires All symbols to be JSON strings.'
raise AvroException(fail_msg)
elif len(set(symbols)) < len(symbols):
fail_msg = 'Duplicate symbol: %s' % symbols
raise AvroException(fail_msg)
# Call parent ctor
NamedSchema.__init__(self, 'enum', name, namespace, names, other_props)
# Add class members
self.set_prop('symbols', symbols)
if doc is not None: self.set_prop('doc', doc)
# read-only properties
symbols = property(lambda self: self.get_prop('symbols'))
doc = property(lambda self: self.get_prop('doc'))
def to_json(self, names=None):
if names is None:
names = Names()
if self.fullname in names.names:
return self.name_ref(names)
else:
names.names[self.fullname] = self
return names.prune_namespace(self.props)
def __eq__(self, that):
return self.props == that.props
#
# Complex Types (recursive)
#
class ArraySchema(Schema):
def __init__(self, items, names=None, other_props=None):
# Call parent ctor
Schema.__init__(self, 'array', other_props)
# Add class members
if isinstance(items, basestring) and names.has_name(items, None):
items_schema = names.get_name(items, None)
else:
try:
items_schema = make_avsc_object(items, names)
except SchemaParseException, e:
fail_msg = 'Items schema (%s) not a valid Avro schema: %s (known names: %s)' % (items, e, names.names.keys())
raise SchemaParseException(fail_msg)
self.set_prop('items', items_schema)
# read-only properties
items = property(lambda self: self.get_prop('items'))
def to_json(self, names=None):
if names is None:
names = Names()
to_dump = self.props.copy()
item_schema = self.get_prop('items')
to_dump['items'] = item_schema.to_json(names)
return to_dump
def __eq__(self, that):
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))
class MapSchema(Schema):
def __init__(self, values, names=None, other_props=None):
# Call parent ctor
Schema.__init__(self, 'map',other_props)
# Add class members
if isinstance(values, basestring) and names.has_name(values, None):
values_schema = names.get_name(values, None)
else:
try:
values_schema = make_avsc_object(values, names)
except:
fail_msg = 'Values schema not a valid Avro schema.'
raise SchemaParseException(fail_msg)
self.set_prop('values', values_schema)
# read-only properties
values = property(lambda self: self.get_prop('values'))
def to_json(self, names=None):
if names is None:
names = Names()
to_dump = self.props.copy()
to_dump['values'] = self.get_prop('values').to_json(names)
return to_dump
def __eq__(self, that):
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))
class UnionSchema(Schema):
"""
names is a dictionary of schema objects
"""
def __init__(self, schemas, names=None):
# Ensure valid ctor args
if not isinstance(schemas, list):
fail_msg = 'Union schema requires a list of schemas.'
raise SchemaParseException(fail_msg)
# Call parent ctor
Schema.__init__(self, 'union')
# Add class members
schema_objects = []
for schema in schemas:
if isinstance(schema, basestring) and names.has_name(schema, None):
new_schema = names.get_name(schema, None)
else:
try:
new_schema = make_avsc_object(schema, names)
except Exception, e:
raise SchemaParseException('Union item must be a valid Avro schema: %s' % str(e))
# check the new schema
if (new_schema.type in VALID_TYPES and new_schema.type not in NAMED_TYPES
and new_schema.type in [schema.type for schema in schema_objects]):
raise SchemaParseException('%s type already in Union' % new_schema.type)
elif new_schema.type == 'union':
raise SchemaParseException('Unions cannot contain other unions.')
else:
schema_objects.append(new_schema)
self._schemas = schema_objects
# read-only properties
schemas = property(lambda self: self._schemas)
def to_json(self, names=None):
if names is None:
names = Names()
to_dump = []
for schema in self.schemas:
to_dump.append(schema.to_json(names))
return to_dump
def __eq__(self, that):
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))
class ErrorUnionSchema(UnionSchema):
def __init__(self, schemas, names=None):
# Prepend "string" to handle system errors
UnionSchema.__init__(self, ['string'] + schemas, names)
def to_json(self, names=None):
if names is None:
names = Names()
to_dump = []
for schema in self.schemas:
# Don't print the system error schema
if schema.type == 'string': continue
to_dump.append(schema.to_json(names))
return to_dump
class RecordSchema(NamedSchema):
@staticmethod
def make_field_objects(field_data, names):
"""We're going to need to make message parameters too."""
field_objects = []
field_names = []
for i, field in enumerate(field_data):
if hasattr(field, 'get') and callable(field.get):
type = field.get('type')
name = field.get('name')
# null values can have a default value of None
has_default = False
default = None
if field.has_key('default'):
has_default = True
default = field.get('default')
order = field.get('order')
doc = field.get('doc')
other_props = get_other_props(field, FIELD_RESERVED_PROPS)
new_field = Field(type, name, has_default, default, order, names, doc,
other_props)
# make sure field name has not been used yet
if new_field.name in field_names:
fail_msg = 'Field name %s already in use.' % new_field.name
raise SchemaParseException(fail_msg)
field_names.append(new_field.name)
else:
raise SchemaParseException('Not a valid field: %s' % field)
field_objects.append(new_field)
return field_objects
def __init__(self, name, namespace, fields, names=None, schema_type='record',
doc=None, other_props=None):
# Ensure valid ctor args
if fields is None:
fail_msg = 'Record schema requires a non-empty fields property.'
raise SchemaParseException(fail_msg)
elif not isinstance(fields, list):
fail_msg = 'Fields property must be a list of Avro schemas.'
raise SchemaParseException(fail_msg)
# Call parent ctor (adds own name to namespace, too)
if schema_type == 'request':
Schema.__init__(self, schema_type, other_props)
else:
NamedSchema.__init__(self, schema_type, name, namespace, names,
other_props)
if schema_type == 'record':
old_default = names.default_namespace
names.default_namespace = Name(name, namespace,
names.default_namespace).get_space()
# Add class members
field_objects = RecordSchema.make_field_objects(fields, names)
self.set_prop('fields', field_objects)
if doc is not None: self.set_prop('doc', doc)
if schema_type == 'record':
names.default_namespace = old_default
# read-only properties
fields = property(lambda self: self.get_prop('fields'))
doc = property(lambda self: self.get_prop('doc'))
@property
def fields_dict(self):
fields_dict = {}
for field in self.fields:
fields_dict[field.name] = field
return fields_dict
def to_json(self, names=None):
if names is None:
names = Names()
# Request records don't have names
if self.type == 'request':
return [ f.to_json(names) for f in self.fields ]
if self.fullname in names.names:
return self.name_ref(names)
else:
names.names[self.fullname] = self
to_dump = names.prune_namespace(self.props.copy())
to_dump['fields'] = [ f.to_json(names) for f in self.fields ]
return to_dump
def __eq__(self, that):
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))
#
# Module Methods
#
def get_other_props(all_props,reserved_props):
"""
Retrieve the non-reserved properties from a dictionary of properties
@args reserved_props: The set of reserved properties to exclude
"""
if hasattr(all_props, 'items') and callable(all_props.items):
return dict([(k,v) for (k,v) in all_props.items() if k not in
reserved_props ])
def make_avsc_object(json_data, names=None):
"""
Build Avro Schema from data parsed out of JSON string.
@arg names: A Name object (tracks seen names and default space)
"""
if names == None:
names = Names()
# JSON object (non-union)
if hasattr(json_data, 'get') and callable(json_data.get):
type = json_data.get('type')
other_props = get_other_props(json_data, SCHEMA_RESERVED_PROPS)
if type in PRIMITIVE_TYPES:
return PrimitiveSchema(type, other_props)
elif type in NAMED_TYPES:
name = json_data.get('name')
namespace = json_data.get('namespace', names.default_namespace)
if type == 'fixed':
size = json_data.get('size')
return FixedSchema(name, namespace, size, names, other_props)
elif type == 'enum':
symbols = json_data.get('symbols')
doc = json_data.get('doc')
return EnumSchema(name, namespace, symbols, names, doc, other_props)
elif type in ['record', 'error']:
fields = json_data.get('fields')
doc = json_data.get('doc')
return RecordSchema(name, namespace, fields, names, type, doc, other_props)
else:
raise SchemaParseException('Unknown Named Type: %s' % type)
elif type in VALID_TYPES:
if type == 'array':
items = json_data.get('items')
return ArraySchema(items, names, other_props)
elif type == 'map':
values = json_data.get('values')
return MapSchema(values, names, other_props)
elif type == 'error_union':
declared_errors = json_data.get('declared_errors')
return ErrorUnionSchema(declared_errors, names)
else:
raise SchemaParseException('Unknown Valid Type: %s' % type)
elif type is None:
raise SchemaParseException('No "type" property: %s' % json_data)
else:
raise SchemaParseException('Undefined type: %s' % type)
# JSON array (union)
elif isinstance(json_data, list):
return UnionSchema(json_data, names)
# JSON string (primitive)
elif json_data in PRIMITIVE_TYPES:
return PrimitiveSchema(json_data)
# not for us!
else:
fail_msg = "Could not make an Avro Schema object from %s." % json_data
raise SchemaParseException(fail_msg)
# TODO(hammer): make method for reading from a file?
def parse(json_string):
"""Constructs the Schema from the JSON text."""
# TODO(hammer): preserve stack trace from JSON parse
# parse the JSON
try:
json_data = json.loads(json_string)
except:
raise SchemaParseException('Error parsing JSON: %s' % json_string)
# Initialize the names object
names = Names()
# construct the Avro Schema object
return make_avsc_object(json_data, names)
|
itsvetkov/pyqtgraph
|
refs/heads/develop
|
pyqtgraph/util/mutex.py
|
39
|
# -*- coding: utf-8 -*-
from ..Qt import QtCore
import traceback
class Mutex(QtCore.QMutex):
"""
Subclass of QMutex that provides useful debugging information during
deadlocks--tracebacks are printed for both the code location that is
attempting to lock the mutex as well as the location that has already
acquired the lock.
Also provides __enter__ and __exit__ methods for use in "with" statements.
"""
def __init__(self, *args, **kargs):
if kargs.get('recursive', False):
args = (QtCore.QMutex.Recursive,)
QtCore.QMutex.__init__(self, *args)
self.l = QtCore.QMutex() ## for serializing access to self.tb
self.tb = []
self.debug = True ## True to enable debugging functions
def tryLock(self, timeout=None, id=None):
if timeout is None:
locked = QtCore.QMutex.tryLock(self)
else:
locked = QtCore.QMutex.tryLock(self, timeout)
if self.debug and locked:
self.l.lock()
try:
if id is None:
self.tb.append(''.join(traceback.format_stack()[:-1]))
else:
self.tb.append(" " + str(id))
#print 'trylock', self, len(self.tb)
finally:
self.l.unlock()
return locked
def lock(self, id=None):
c = 0
waitTime = 5000 # in ms
while True:
if self.tryLock(waitTime, id):
break
c += 1
if self.debug:
self.l.lock()
try:
print("Waiting for mutex lock (%0.1f sec). Traceback follows:"
% (c*waitTime/1000.))
traceback.print_stack()
if len(self.tb) > 0:
print("Mutex is currently locked from:\n")
print(self.tb[-1])
else:
print("Mutex is currently locked from [???]")
finally:
self.l.unlock()
#print 'lock', self, len(self.tb)
def unlock(self):
QtCore.QMutex.unlock(self)
if self.debug:
self.l.lock()
try:
#print 'unlock', self, len(self.tb)
if len(self.tb) > 0:
self.tb.pop()
else:
raise Exception("Attempt to unlock mutex before it has been locked")
finally:
self.l.unlock()
def depth(self):
self.l.lock()
n = len(self.tb)
self.l.unlock()
return n
def traceback(self):
self.l.lock()
try:
ret = self.tb[:]
finally:
self.l.unlock()
return ret
def __exit__(self, *args):
self.unlock()
def __enter__(self):
self.lock()
return self
|
luci/luci-py
|
refs/heads/master
|
client/third_party/google/protobuf/test_messages_proto2_pb2.py
|
4
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/test_messages_proto2.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/test_messages_proto2.proto',
package='protobuf_test_messages.proto2',
syntax='proto2',
serialized_options=b'\n(com.google.protobuf_test_messages.proto2H\001\370\001\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n*google/protobuf/test_messages_proto2.proto\x12\x1dprotobuf_test_messages.proto2\"\xfe\x39\n\x12TestAllTypesProto2\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12`\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage\x12U\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2\x12Z\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum\x12O\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\x30.protobuf_test_messages.proto2.ForeignEnumProto2\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12L\n\x11recursive_message\x18\x1b \x01(\x0b\x32\x31.protobuf_test_messages.proto2.TestAllTypesProto2\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12`\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage\x12U\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2\x12Z\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum\x12O\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\x30.protobuf_test_messages.proto2.ForeignEnumProto2\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12\x18\n\x0cpacked_int32\x18K \x03(\x05\x42\x02\x10\x01\x12\x18\n\x0cpacked_int64\x18L \x03(\x03\x42\x02\x10\x01\x12\x19\n\rpacked_uint32\x18M \x03(\rB\x02\x10\x01\x12\x19\n\rpacked_uint64\x18N \x03(\x04\x42\x02\x10\x01\x12\x19\n\rpacked_sint32\x18O \x03(\x11\x42\x02\x10\x01\x12\x19\n\rpacked_sint64\x18P \x03(\x12\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed32\x18Q \x03(\x07\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed64\x18R \x03(\x06\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed32\x18S \x03(\x0f\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed64\x18T \x03(\x10\x42\x02\x10\x01\x12\x18\n\x0cpacked_float\x18U \x03(\x02\x42\x02\x10\x01\x12\x19\n\rpacked_double\x18V \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bpacked_bool\x18W \x03(\x08\x42\x02\x10\x01\x12\\\n\x12packed_nested_enum\x18X \x03(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnumB\x02\x10\x01\x12\x1a\n\x0eunpacked_int32\x18Y \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_int64\x18Z \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0funpacked_uint32\x18[ \x03(\rB\x02\x10\x00\x12\x1b\n\x0funpacked_uint64\x18\\ \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint32\x18] \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint64\x18^ \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed32\x18_ \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed64\x18` \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed32\x18\x61 \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed64\x18\x62 \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_float\x18\x63 \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0funpacked_double\x18\x64 \x03(\x01\x42\x02\x10\x00\x12\x19\n\runpacked_bool\x18\x65 \x03(\x08\x42\x02\x10\x00\x12^\n\x14unpacked_nested_enum\x18\x66 \x03(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnumB\x02\x10\x00\x12]\n\x0fmap_int32_int32\x18\x38 \x03(\x0b\x32\x44.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry\x12]\n\x0fmap_int64_int64\x18\x39 \x03(\x0b\x32\x44.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry\x12\x61\n\x11map_uint32_uint32\x18: \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry\x12\x61\n\x11map_uint64_uint64\x18; \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry\x12\x61\n\x11map_sint32_sint32\x18< \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry\x12\x61\n\x11map_sint64_sint64\x18= \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry\x12\x65\n\x13map_fixed32_fixed32\x18> \x03(\x0b\x32H.protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry\x12\x65\n\x13map_fixed64_fixed64\x18? \x03(\x0b\x32H.protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry\x12i\n\x15map_sfixed32_sfixed32\x18@ \x03(\x0b\x32J.protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry\x12i\n\x15map_sfixed64_sfixed64\x18\x41 \x03(\x0b\x32J.protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry\x12]\n\x0fmap_int32_float\x18\x42 \x03(\x0b\x32\x44.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry\x12_\n\x10map_int32_double\x18\x43 \x03(\x0b\x32\x45.protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry\x12Y\n\rmap_bool_bool\x18\x44 \x03(\x0b\x32\x42.protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry\x12\x61\n\x11map_string_string\x18\x45 \x03(\x0b\x32\x46.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry\x12_\n\x10map_string_bytes\x18\x46 \x03(\x0b\x32\x45.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry\x12p\n\x19map_string_nested_message\x18G \x03(\x0b\x32M.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry\x12r\n\x1amap_string_foreign_message\x18H \x03(\x0b\x32N.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry\x12j\n\x16map_string_nested_enum\x18I \x03(\x0b\x32J.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry\x12l\n\x17map_string_foreign_enum\x18J \x03(\x0b\x32K.protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12_\n\x14oneof_nested_message\x18p \x01(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x12\x14\n\noneof_bool\x18s \x01(\x08H\x00\x12\x16\n\x0coneof_uint64\x18t \x01(\x04H\x00\x12\x15\n\x0boneof_float\x18u \x01(\x02H\x00\x12\x16\n\x0coneof_double\x18v \x01(\x01H\x00\x12R\n\noneof_enum\x18w \x01(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnumH\x00\x12\x45\n\x04\x64\x61ta\x18\xc9\x01 \x01(\n26.protobuf_test_messages.proto2.TestAllTypesProto2.Data\x12\x13\n\nfieldname1\x18\x91\x03 \x01(\x05\x12\x14\n\x0b\x66ield_name2\x18\x92\x03 \x01(\x05\x12\x15\n\x0c_field_name3\x18\x93\x03 \x01(\x05\x12\x16\n\rfield__name4_\x18\x94\x03 \x01(\x05\x12\x14\n\x0b\x66ield0name5\x18\x95\x03 \x01(\x05\x12\x16\n\rfield_0_name6\x18\x96\x03 \x01(\x05\x12\x13\n\nfieldName7\x18\x97\x03 \x01(\x05\x12\x13\n\nFieldName8\x18\x98\x03 \x01(\x05\x12\x14\n\x0b\x66ield_Name9\x18\x99\x03 \x01(\x05\x12\x15\n\x0c\x46ield_Name10\x18\x9a\x03 \x01(\x05\x12\x15\n\x0c\x46IELD_NAME11\x18\x9b\x03 \x01(\x05\x12\x15\n\x0c\x46IELD_name12\x18\x9c\x03 \x01(\x05\x12\x17\n\x0e__field_name13\x18\x9d\x03 \x01(\x05\x12\x17\n\x0e__Field_name14\x18\x9e\x03 \x01(\x05\x12\x16\n\rfield__name15\x18\x9f\x03 \x01(\x05\x12\x16\n\rfield__Name16\x18\xa0\x03 \x01(\x05\x12\x17\n\x0e\x66ield_name17__\x18\xa1\x03 \x01(\x05\x12\x17\n\x0e\x46ield_name18__\x18\xa2\x03 \x01(\x05\x1a\x62\n\rNestedMessage\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\x46\n\x0b\x63orecursive\x18\x02 \x01(\x0b\x32\x31.protobuf_test_messages.proto2.TestAllTypesProto2\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x35\n\x13MapStringBytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1a~\n\x1bMapStringNestedMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12N\n\x05value\x18\x02 \x01(\x0b\x32?.protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage:\x02\x38\x01\x1as\n\x1cMapStringForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x42\n\x05value\x18\x02 \x01(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2:\x02\x38\x01\x1ax\n\x18MapStringNestedEnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12K\n\x05value\x18\x02 \x01(\x0e\x32<.protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum:\x02\x38\x01\x1am\n\x19MapStringForeignEnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12?\n\x05value\x18\x02 \x01(\x0e\x32\x30.protobuf_test_messages.proto2.ForeignEnumProto2:\x02\x38\x01\x1a\x33\n\x04\x44\x61ta\x12\x14\n\x0bgroup_int32\x18\xca\x01 \x01(\x05\x12\x15\n\x0cgroup_uint32\x18\xcb\x01 \x01(\r\x1a!\n\x11MessageSetCorrect*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\x1a\xe0\x01\n\x1bMessageSetCorrectExtension1\x12\x0b\n\x03str\x18\x19 \x01(\t2\xb3\x01\n\x15message_set_extension\x12\x43.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect\x18\xf9\xbb^ \x01(\x0b\x32M.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1\x1a\xdf\x01\n\x1bMessageSetCorrectExtension2\x12\t\n\x01i\x18\t \x01(\x05\x32\xb4\x01\n\x15message_set_extension\x12\x43.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect\x18\x90\xb3\xfc\x01 \x01(\x0b\x32M.protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2\"9\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x12\x07\n\x03\x42\x41Z\x10\x02\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01*\x05\x08x\x10\xc9\x01\x42\r\n\x0boneof_fieldJ\x06\x08\xe8\x07\x10\x90N\"!\n\x14\x46oreignMessageProto2\x12\t\n\x01\x63\x18\x01 \x01(\x05\"\xc1\x02\n\x15UnknownToTestAllTypes\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0foptional_string\x18\xea\x07 \x01(\t\x12L\n\x0enested_message\x18\xeb\x07 \x01(\x0b\x32\x33.protobuf_test_messages.proto2.ForeignMessageProto2\x12Z\n\roptionalgroup\x18\xec\x07 \x01(\n2B.protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup\x12\x16\n\roptional_bool\x18\xee\x07 \x01(\x08\x12\x17\n\x0erepeated_int32\x18\xf3\x07 \x03(\x05\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x01 \x01(\x05*F\n\x11\x46oreignEnumProto2\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x00\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x01\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x02:J\n\x0f\x65xtension_int32\x12\x31.protobuf_test_messages.proto2.TestAllTypesProto2\x18x \x01(\x05\x42/\n(com.google.protobuf_test_messages.proto2H\x01\xf8\x01\x01'
)
_FOREIGNENUMPROTO2 = _descriptor.EnumDescriptor(
name='ForeignEnumProto2',
full_name='protobuf_test_messages.proto2.ForeignEnumProto2',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FOREIGN_FOO', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FOREIGN_BAR', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FOREIGN_BAZ', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7861,
serialized_end=7931,
)
_sym_db.RegisterEnumDescriptor(_FOREIGNENUMPROTO2)
ForeignEnumProto2 = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUMPROTO2)
FOREIGN_FOO = 0
FOREIGN_BAR = 1
FOREIGN_BAZ = 2
EXTENSION_INT32_FIELD_NUMBER = 120
extension_int32 = _descriptor.FieldDescriptor(
name='extension_int32', full_name='protobuf_test_messages.proto2.extension_int32', index=0,
number=120, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_TESTALLTYPESPROTO2_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FOO', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAR', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAZ', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NEG', index=3, number=-1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7413,
serialized_end=7470,
)
_sym_db.RegisterEnumDescriptor(_TESTALLTYPESPROTO2_NESTEDENUM)
_TESTALLTYPESPROTO2_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='a', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage.a', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='corecursive', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage.corecursive', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5454,
serialized_end=5552,
)
_TESTALLTYPESPROTO2_MAPINT32INT32ENTRY = _descriptor.Descriptor(
name='MapInt32Int32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5554,
serialized_end=5606,
)
_TESTALLTYPESPROTO2_MAPINT64INT64ENTRY = _descriptor.Descriptor(
name='MapInt64Int64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry.key', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5608,
serialized_end=5660,
)
_TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY = _descriptor.Descriptor(
name='MapUint32Uint32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry.key', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry.value', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5662,
serialized_end=5716,
)
_TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY = _descriptor.Descriptor(
name='MapUint64Uint64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry.key', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry.value', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5718,
serialized_end=5772,
)
_TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY = _descriptor.Descriptor(
name='MapSint32Sint32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry.key', index=0,
number=1, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry.value', index=1,
number=2, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5774,
serialized_end=5828,
)
_TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY = _descriptor.Descriptor(
name='MapSint64Sint64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry.key', index=0,
number=1, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry.value', index=1,
number=2, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5830,
serialized_end=5884,
)
_TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor(
name='MapFixed32Fixed32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry.key', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry.value', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5886,
serialized_end=5942,
)
_TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor(
name='MapFixed64Fixed64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry.key', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry.value', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5944,
serialized_end=6000,
)
_TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor(
name='MapSfixed32Sfixed32Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry.key', index=0,
number=1, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry.value', index=1,
number=2, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6002,
serialized_end=6060,
)
_TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor(
name='MapSfixed64Sfixed64Entry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry.key', index=0,
number=1, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry.value', index=1,
number=2, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6062,
serialized_end=6120,
)
_TESTALLTYPESPROTO2_MAPINT32FLOATENTRY = _descriptor.Descriptor(
name='MapInt32FloatEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6122,
serialized_end=6174,
)
_TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY = _descriptor.Descriptor(
name='MapInt32DoubleEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6176,
serialized_end=6229,
)
_TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY = _descriptor.Descriptor(
name='MapBoolBoolEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry.key', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry.value', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6231,
serialized_end=6281,
)
_TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor(
name='MapStringStringEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6283,
serialized_end=6337,
)
_TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY = _descriptor.Descriptor(
name='MapStringBytesEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6339,
serialized_end=6392,
)
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY = _descriptor.Descriptor(
name='MapStringNestedMessageEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6394,
serialized_end=6520,
)
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY = _descriptor.Descriptor(
name='MapStringForeignMessageEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6522,
serialized_end=6637,
)
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY = _descriptor.Descriptor(
name='MapStringNestedEnumEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry.value', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6639,
serialized_end=6759,
)
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY = _descriptor.Descriptor(
name='MapStringForeignEnumEntry',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry.value', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6761,
serialized_end=6870,
)
_TESTALLTYPESPROTO2_DATA = _descriptor.Descriptor(
name='Data',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Data',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='group_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Data.group_int32', index=0,
number=202, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Data.group_uint32', index=1,
number=203, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6872,
serialized_end=6923,
)
_TESTALLTYPESPROTO2_MESSAGESETCORRECT = _descriptor.Descriptor(
name='MessageSetCorrect',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\010\001',
is_extendable=True,
syntax='proto2',
extension_ranges=[(4, 2147483647), ],
oneofs=[
],
serialized_start=6925,
serialized_end=6958,
)
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1 = _descriptor.Descriptor(
name='MessageSetCorrectExtension1',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='str', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1.str', index=0,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='message_set_extension', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1.message_set_extension', index=0,
number=1547769, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6961,
serialized_end=7185,
)
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2 = _descriptor.Descriptor(
name='MessageSetCorrectExtension2',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='i', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2.i', index=0,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='message_set_extension', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2.message_set_extension', index=0,
number=4135312, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7188,
serialized_end=7411,
)
_TESTALLTYPESPROTO2 = _descriptor.Descriptor(
name='TestAllTypesProto2',
full_name='protobuf_test_messages.proto2.TestAllTypesProto2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_int32', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_int64', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_uint32', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_uint64', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sint32', index=4,
number=5, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sint64', index=5,
number=6, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_fixed32', index=6,
number=7, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_fixed64', index=7,
number=8, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sfixed32', index=8,
number=9, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_sfixed64', index=9,
number=10, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_float', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_double', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_bool', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_string', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_bytes', index=14,
number=15, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_nested_message', index=15,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_foreign_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_foreign_message', index=16,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_nested_enum', index=17,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_foreign_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_foreign_enum', index=18,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_string_piece', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_string_piece', index=19,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_cord', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.optional_cord', index=20,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='recursive_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.recursive_message', index=21,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_int32', index=22,
number=31, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_int64', index=23,
number=32, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_uint32', index=24,
number=33, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_uint64', index=25,
number=34, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sint32', index=26,
number=35, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sint64', index=27,
number=36, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_fixed32', index=28,
number=37, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_fixed64', index=29,
number=38, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sfixed32', index=30,
number=39, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_sfixed64', index=31,
number=40, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_float', index=32,
number=41, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_double', index=33,
number=42, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_bool', index=34,
number=43, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_string', index=35,
number=44, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_bytes', index=36,
number=45, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_nested_message', index=37,
number=48, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_foreign_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_foreign_message', index=38,
number=49, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_nested_enum', index=39,
number=51, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_foreign_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_foreign_enum', index=40,
number=52, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_string_piece', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_string_piece', index=41,
number=54, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_cord', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.repeated_cord', index=42,
number=55, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\010\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_int32', index=43,
number=75, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_int64', index=44,
number=76, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_uint32', index=45,
number=77, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_uint64', index=46,
number=78, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sint32', index=47,
number=79, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sint64', index=48,
number=80, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_fixed32', index=49,
number=81, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_fixed64', index=50,
number=82, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sfixed32', index=51,
number=83, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_sfixed64', index=52,
number=84, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_float', index=53,
number=85, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_double', index=54,
number=86, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_bool', index=55,
number=87, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.packed_nested_enum', index=56,
number=88, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_int32', index=57,
number=89, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_int64', index=58,
number=90, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_uint32', index=59,
number=91, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_uint64', index=60,
number=92, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sint32', index=61,
number=93, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sint64', index=62,
number=94, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_fixed32', index=63,
number=95, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_fixed64', index=64,
number=96, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sfixed32', index=65,
number=97, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_sfixed64', index=66,
number=98, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_float', index=67,
number=99, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_double', index=68,
number=100, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_bool', index=69,
number=101, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unpacked_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.unpacked_nested_enum', index=70,
number=102, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int32_int32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int32_int32', index=71,
number=56, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int64_int64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int64_int64', index=72,
number=57, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_uint32_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_uint32_uint32', index=73,
number=58, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_uint64_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_uint64_uint64', index=74,
number=59, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sint32_sint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sint32_sint32', index=75,
number=60, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sint64_sint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sint64_sint64', index=76,
number=61, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_fixed32_fixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_fixed32_fixed32', index=77,
number=62, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_fixed64_fixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_fixed64_fixed64', index=78,
number=63, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sfixed32_sfixed32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sfixed32_sfixed32', index=79,
number=64, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_sfixed64_sfixed64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_sfixed64_sfixed64', index=80,
number=65, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int32_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int32_float', index=81,
number=66, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_int32_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_int32_double', index=82,
number=67, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_bool_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_bool_bool', index=83,
number=68, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_string', index=84,
number=69, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_bytes', index=85,
number=70, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_nested_message', index=86,
number=71, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_foreign_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_foreign_message', index=87,
number=72, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_nested_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_nested_enum', index=88,
number=73, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_string_foreign_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.map_string_foreign_enum', index=89,
number=74, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_uint32', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_uint32', index=90,
number=111, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_nested_message', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_nested_message', index=91,
number=112, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_string', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_string', index=92,
number=113, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_bytes', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_bytes', index=93,
number=114, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_bool', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_bool', index=94,
number=115, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_uint64', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_uint64', index=95,
number=116, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_float', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_float', index=96,
number=117, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_double', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_double', index=97,
number=118, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_enum', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_enum', index=98,
number=119, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.data', index=99,
number=201, type=10, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fieldname1', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.fieldname1', index=100,
number=401, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_name2', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_name2', index=101,
number=402, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='_field_name3', full_name='protobuf_test_messages.proto2.TestAllTypesProto2._field_name3', index=102,
number=403, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field__name4_', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field__name4_', index=103,
number=404, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field0name5', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field0name5', index=104,
number=405, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_0_name6', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_0_name6', index=105,
number=406, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fieldName7', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.fieldName7', index=106,
number=407, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='FieldName8', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.FieldName8', index=107,
number=408, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_Name9', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_Name9', index=108,
number=409, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Field_Name10', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Field_Name10', index=109,
number=410, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='FIELD_NAME11', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.FIELD_NAME11', index=110,
number=411, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='FIELD_name12', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.FIELD_name12', index=111,
number=412, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='__field_name13', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.__field_name13', index=112,
number=413, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='__Field_name14', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.__Field_name14', index=113,
number=414, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field__name15', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field__name15', index=114,
number=415, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field__Name16', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field__Name16', index=115,
number=416, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='field_name17__', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.field_name17__', index=116,
number=417, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Field_name18__', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.Field_name18__', index=117,
number=418, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTALLTYPESPROTO2_NESTEDMESSAGE, _TESTALLTYPESPROTO2_MAPINT32INT32ENTRY, _TESTALLTYPESPROTO2_MAPINT64INT64ENTRY, _TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY, _TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY, _TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY, _TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY, _TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY, _TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY, _TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY, _TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY, _TESTALLTYPESPROTO2_MAPINT32FLOATENTRY, _TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY, _TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY, _TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY, _TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY, _TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY, _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY, _TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY, _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY, _TESTALLTYPESPROTO2_DATA, _TESTALLTYPESPROTO2_MESSAGESETCORRECT, _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1, _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2, ],
enum_types=[
_TESTALLTYPESPROTO2_NESTEDENUM,
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(120, 201), ],
oneofs=[
_descriptor.OneofDescriptor(
name='oneof_field', full_name='protobuf_test_messages.proto2.TestAllTypesProto2.oneof_field',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=78,
serialized_end=7500,
)
_FOREIGNMESSAGEPROTO2 = _descriptor.Descriptor(
name='ForeignMessageProto2',
full_name='protobuf_test_messages.proto2.ForeignMessageProto2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='c', full_name='protobuf_test_messages.proto2.ForeignMessageProto2.c', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7502,
serialized_end=7535,
)
_UNKNOWNTOTESTALLTYPES_OPTIONALGROUP = _descriptor.Descriptor(
name='OptionalGroup',
full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='a', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup.a', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7833,
serialized_end=7859,
)
_UNKNOWNTOTESTALLTYPES = _descriptor.Descriptor(
name='UnknownToTestAllTypes',
full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_int32', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optional_int32', index=0,
number=1001, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_string', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optional_string', index=1,
number=1002, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_message', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.nested_message', index=2,
number=1003, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optionalgroup', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optionalgroup', index=3,
number=1004, type=10, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bool', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.optional_bool', index=4,
number=1006, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int32', full_name='protobuf_test_messages.proto2.UnknownToTestAllTypes.repeated_int32', index=5,
number=1011, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_UNKNOWNTOTESTALLTYPES_OPTIONALGROUP, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7538,
serialized_end=7859,
)
_TESTALLTYPESPROTO2_NESTEDMESSAGE.fields_by_name['corecursive'].message_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_NESTEDMESSAGE.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT32INT32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT64INT64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT32FLOATENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY.fields_by_name['value'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = _FOREIGNMESSAGEPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY.fields_by_name['value'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY.fields_by_name['value'].enum_type = _FOREIGNENUMPROTO2
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_DATA.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MESSAGESETCORRECT.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGEPROTO2
_TESTALLTYPESPROTO2.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUMPROTO2
_TESTALLTYPESPROTO2.fields_by_name['recursive_message'].message_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGEPROTO2
_TESTALLTYPESPROTO2.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUMPROTO2
_TESTALLTYPESPROTO2.fields_by_name['packed_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['unpacked_nested_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['map_int32_int32'].message_type = _TESTALLTYPESPROTO2_MAPINT32INT32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_int64_int64'].message_type = _TESTALLTYPESPROTO2_MAPINT64INT64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_uint32_uint32'].message_type = _TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_uint64_uint64'].message_type = _TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sint32_sint32'].message_type = _TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sint64_sint64'].message_type = _TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_fixed32_fixed32'].message_type = _TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_fixed64_fixed64'].message_type = _TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_int32_float'].message_type = _TESTALLTYPESPROTO2_MAPINT32FLOATENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_int32_double'].message_type = _TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_bool_bool'].message_type = _TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_string'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_bytes'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_nested_message'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_foreign_message'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_nested_enum'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY
_TESTALLTYPESPROTO2.fields_by_name['map_string_foreign_enum'].message_type = _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY
_TESTALLTYPESPROTO2.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPESPROTO2_NESTEDMESSAGE
_TESTALLTYPESPROTO2.fields_by_name['oneof_enum'].enum_type = _TESTALLTYPESPROTO2_NESTEDENUM
_TESTALLTYPESPROTO2.fields_by_name['data'].message_type = _TESTALLTYPESPROTO2_DATA
_TESTALLTYPESPROTO2_NESTEDENUM.containing_type = _TESTALLTYPESPROTO2
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint32'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_nested_message'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_string'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_bytes'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_bool'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_bool'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint64'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_uint64'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_float'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_float'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_double'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_double'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_TESTALLTYPESPROTO2.oneofs_by_name['oneof_field'].fields.append(
_TESTALLTYPESPROTO2.fields_by_name['oneof_enum'])
_TESTALLTYPESPROTO2.fields_by_name['oneof_enum'].containing_oneof = _TESTALLTYPESPROTO2.oneofs_by_name['oneof_field']
_UNKNOWNTOTESTALLTYPES_OPTIONALGROUP.containing_type = _UNKNOWNTOTESTALLTYPES
_UNKNOWNTOTESTALLTYPES.fields_by_name['nested_message'].message_type = _FOREIGNMESSAGEPROTO2
_UNKNOWNTOTESTALLTYPES.fields_by_name['optionalgroup'].message_type = _UNKNOWNTOTESTALLTYPES_OPTIONALGROUP
DESCRIPTOR.message_types_by_name['TestAllTypesProto2'] = _TESTALLTYPESPROTO2
DESCRIPTOR.message_types_by_name['ForeignMessageProto2'] = _FOREIGNMESSAGEPROTO2
DESCRIPTOR.message_types_by_name['UnknownToTestAllTypes'] = _UNKNOWNTOTESTALLTYPES
DESCRIPTOR.enum_types_by_name['ForeignEnumProto2'] = _FOREIGNENUMPROTO2
DESCRIPTOR.extensions_by_name['extension_int32'] = extension_int32
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestAllTypesProto2 = _reflection.GeneratedProtocolMessageType('TestAllTypesProto2', (_message.Message,), {
'NestedMessage' : _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_NESTEDMESSAGE,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.NestedMessage)
})
,
'MapInt32Int32Entry' : _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT32INT32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32Int32Entry)
})
,
'MapInt64Int64Entry' : _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT64INT64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt64Int64Entry)
})
,
'MapUint32Uint32Entry' : _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapUint32Uint32Entry)
})
,
'MapUint64Uint64Entry' : _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapUint64Uint64Entry)
})
,
'MapSint32Sint32Entry' : _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSint32Sint32Entry)
})
,
'MapSint64Sint64Entry' : _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSint64Sint64Entry)
})
,
'MapFixed32Fixed32Entry' : _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed32Fixed32Entry)
})
,
'MapFixed64Fixed64Entry' : _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapFixed64Fixed64Entry)
})
,
'MapSfixed32Sfixed32Entry' : _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed32Sfixed32Entry)
})
,
'MapSfixed64Sfixed64Entry' : _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapSfixed64Sfixed64Entry)
})
,
'MapInt32FloatEntry' : _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT32FLOATENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32FloatEntry)
})
,
'MapInt32DoubleEntry' : _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapInt32DoubleEntry)
})
,
'MapBoolBoolEntry' : _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapBoolBoolEntry)
})
,
'MapStringStringEntry' : _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringStringEntry)
})
,
'MapStringBytesEntry' : _reflection.GeneratedProtocolMessageType('MapStringBytesEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringBytesEntry)
})
,
'MapStringNestedMessageEntry' : _reflection.GeneratedProtocolMessageType('MapStringNestedMessageEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedMessageEntry)
})
,
'MapStringForeignMessageEntry' : _reflection.GeneratedProtocolMessageType('MapStringForeignMessageEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignMessageEntry)
})
,
'MapStringNestedEnumEntry' : _reflection.GeneratedProtocolMessageType('MapStringNestedEnumEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringNestedEnumEntry)
})
,
'MapStringForeignEnumEntry' : _reflection.GeneratedProtocolMessageType('MapStringForeignEnumEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MapStringForeignEnumEntry)
})
,
'Data' : _reflection.GeneratedProtocolMessageType('Data', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_DATA,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.Data)
})
,
'MessageSetCorrect' : _reflection.GeneratedProtocolMessageType('MessageSetCorrect', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MESSAGESETCORRECT,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrect)
})
,
'MessageSetCorrectExtension1' : _reflection.GeneratedProtocolMessageType('MessageSetCorrectExtension1', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension1)
})
,
'MessageSetCorrectExtension2' : _reflection.GeneratedProtocolMessageType('MessageSetCorrectExtension2', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2.MessageSetCorrectExtension2)
})
,
'DESCRIPTOR' : _TESTALLTYPESPROTO2,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.TestAllTypesProto2)
})
_sym_db.RegisterMessage(TestAllTypesProto2)
_sym_db.RegisterMessage(TestAllTypesProto2.NestedMessage)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt32Int32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt64Int64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapUint32Uint32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapUint64Uint64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSint32Sint32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSint64Sint64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapFixed32Fixed32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapFixed64Fixed64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSfixed32Sfixed32Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapSfixed64Sfixed64Entry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt32FloatEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapInt32DoubleEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapBoolBoolEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringStringEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringBytesEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringNestedMessageEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringForeignMessageEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringNestedEnumEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.MapStringForeignEnumEntry)
_sym_db.RegisterMessage(TestAllTypesProto2.Data)
_sym_db.RegisterMessage(TestAllTypesProto2.MessageSetCorrect)
_sym_db.RegisterMessage(TestAllTypesProto2.MessageSetCorrectExtension1)
_sym_db.RegisterMessage(TestAllTypesProto2.MessageSetCorrectExtension2)
ForeignMessageProto2 = _reflection.GeneratedProtocolMessageType('ForeignMessageProto2', (_message.Message,), {
'DESCRIPTOR' : _FOREIGNMESSAGEPROTO2,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.ForeignMessageProto2)
})
_sym_db.RegisterMessage(ForeignMessageProto2)
UnknownToTestAllTypes = _reflection.GeneratedProtocolMessageType('UnknownToTestAllTypes', (_message.Message,), {
'OptionalGroup' : _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), {
'DESCRIPTOR' : _UNKNOWNTOTESTALLTYPES_OPTIONALGROUP,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.UnknownToTestAllTypes.OptionalGroup)
})
,
'DESCRIPTOR' : _UNKNOWNTOTESTALLTYPES,
'__module__' : 'google.protobuf.test_messages_proto2_pb2'
# @@protoc_insertion_point(class_scope:protobuf_test_messages.proto2.UnknownToTestAllTypes)
})
_sym_db.RegisterMessage(UnknownToTestAllTypes)
_sym_db.RegisterMessage(UnknownToTestAllTypes.OptionalGroup)
TestAllTypesProto2.RegisterExtension(extension_int32)
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1.extensions_by_name['message_set_extension'].message_type = _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1
TestAllTypesProto2.MessageSetCorrect.RegisterExtension(_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION1.extensions_by_name['message_set_extension'])
_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2.extensions_by_name['message_set_extension'].message_type = _TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2
TestAllTypesProto2.MessageSetCorrect.RegisterExtension(_TESTALLTYPESPROTO2_MESSAGESETCORRECTEXTENSION2.extensions_by_name['message_set_extension'])
DESCRIPTOR._options = None
_TESTALLTYPESPROTO2_MAPINT32INT32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPINT64INT64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPUINT32UINT32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPUINT64UINT64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSINT32SINT32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSINT64SINT64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPFIXED32FIXED32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPFIXED64FIXED64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSFIXED32SFIXED32ENTRY._options = None
_TESTALLTYPESPROTO2_MAPSFIXED64SFIXED64ENTRY._options = None
_TESTALLTYPESPROTO2_MAPINT32FLOATENTRY._options = None
_TESTALLTYPESPROTO2_MAPINT32DOUBLEENTRY._options = None
_TESTALLTYPESPROTO2_MAPBOOLBOOLENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGSTRINGENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGBYTESENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDMESSAGEENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNMESSAGEENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGNESTEDENUMENTRY._options = None
_TESTALLTYPESPROTO2_MAPSTRINGFOREIGNENUMENTRY._options = None
_TESTALLTYPESPROTO2_MESSAGESETCORRECT._options = None
_TESTALLTYPESPROTO2.fields_by_name['optional_string_piece']._options = None
_TESTALLTYPESPROTO2.fields_by_name['optional_cord']._options = None
_TESTALLTYPESPROTO2.fields_by_name['repeated_string_piece']._options = None
_TESTALLTYPESPROTO2.fields_by_name['repeated_cord']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_int32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_int64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_uint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_uint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_fixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_fixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sfixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_sfixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_float']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_double']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_bool']._options = None
_TESTALLTYPESPROTO2.fields_by_name['packed_nested_enum']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_int32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_int64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_uint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_uint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sint32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sint64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_fixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_fixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sfixed32']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_sfixed64']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_float']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_double']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_bool']._options = None
_TESTALLTYPESPROTO2.fields_by_name['unpacked_nested_enum']._options = None
# @@protoc_insertion_point(module_scope)
|
AOSPU/external_chromium_org
|
refs/heads/android-5.0/py3
|
tools/telemetry/telemetry/core/__init__.py
|
115
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
bhaskar24/ns_3_dev_RARED
|
refs/heads/master
|
src/dsr/bindings/modulegen__gcc_ILP32.py
|
14
| null |
Ingenico-ePayments/connect-sdk-python2
|
refs/heads/master
|
ingenico/connect/sdk/domain/payment/payment_error_response.py
|
2
|
# -*- coding: utf-8 -*-
#
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
from ingenico.connect.sdk.data_object import DataObject
from ingenico.connect.sdk.domain.errors.definitions.api_error import APIError
from ingenico.connect.sdk.domain.payment.definitions.create_payment_result import CreatePaymentResult
class PaymentErrorResponse(DataObject):
__error_id = None
__errors = None
__payment_result = None
@property
def error_id(self):
"""
| Unique reference, for debugging purposes, of this error response
Type: str
"""
return self.__error_id
@error_id.setter
def error_id(self, value):
self.__error_id = value
@property
def errors(self):
"""
| List of one or more errors
Type: list[:class:`ingenico.connect.sdk.domain.errors.definitions.api_error.APIError`]
"""
return self.__errors
@errors.setter
def errors(self, value):
self.__errors = value
@property
def payment_result(self):
"""
| Object that contains details on the created payment in case one has been created
Type: :class:`ingenico.connect.sdk.domain.payment.definitions.create_payment_result.CreatePaymentResult`
"""
return self.__payment_result
@payment_result.setter
def payment_result(self, value):
self.__payment_result = value
def to_dictionary(self):
dictionary = super(PaymentErrorResponse, self).to_dictionary()
if self.error_id is not None:
dictionary['errorId'] = self.error_id
if self.errors is not None:
dictionary['errors'] = []
for element in self.errors:
if element is not None:
dictionary['errors'].append(element.to_dictionary())
if self.payment_result is not None:
dictionary['paymentResult'] = self.payment_result.to_dictionary()
return dictionary
def from_dictionary(self, dictionary):
super(PaymentErrorResponse, self).from_dictionary(dictionary)
if 'errorId' in dictionary:
self.error_id = dictionary['errorId']
if 'errors' in dictionary:
if not isinstance(dictionary['errors'], list):
raise TypeError('value \'{}\' is not a list'.format(dictionary['errors']))
self.errors = []
for element in dictionary['errors']:
value = APIError()
self.errors.append(value.from_dictionary(element))
if 'paymentResult' in dictionary:
if not isinstance(dictionary['paymentResult'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['paymentResult']))
value = CreatePaymentResult()
self.payment_result = value.from_dictionary(dictionary['paymentResult'])
return self
|
bacher09/gentoo-packages
|
refs/heads/master
|
gpackages/apps/packages/management/commands/scanusedesc.py
|
1
|
from django.core.management.base import BaseCommand, CommandError
from packages.scan import Scanner
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-g','--no-globals',
action='store_false',
dest='scan_global_use',
default=True,
help='Don\'t scan globals use descriptions'),
make_option('-l', '--no-locals',
action='store_false',
dest='scan_local_use',
default=True,
help='Don\'t scan globals use descriptions'),
make_option('--not-show-time',
action='store_false',
dest='show_time',
default=True,
help='Show time of scanning'),
)
args = ''
help = 'Will scan use descriptions'
def handle(self, *args, **options):
Scanner(**options).scan()
|
andreif/django
|
refs/heads/master
|
tests/template_tests/templatetags/bad_tag.py
|
513
|
from django import template
register = template.Library()
@register.tag
def badtag(parser, token):
raise RuntimeError("I am a bad tag")
@register.simple_tag
def badsimpletag():
raise RuntimeError("I am a bad simpletag")
|
hisaharu/ryu
|
refs/heads/master
|
ryu/tests/integrated/test_of_config.py
|
19
|
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013 Isaku Yamahata <yamahata at private email ne jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
How to run this test
edit linc config file. LINC-Switch/rel/linc/releases/1.0/sys.config
You can find the sample config I used for the test below
For this following config to work, the network interface
linc-port and linc-port2 must be created before hand.
(Or edit the port name depending on your environment)
An easy way is to create them as follows
# ip link add linc-port type veth peer name linc-port-peer
# ip link set linc-port up
# ip link add linc-port2 type veth peer name linc-port-peer2
# ip link set linc-port2 up
Then run linc
# rel/linc/bin/linc console
Then run ryu
# PYTHONPATH=. ./bin/ryu-manager --verbose \
ryu/tests/integrated/test_of_config.py
Here is my sys.config used for this test.
-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8---
[
{linc,
[
{of_config, enabled},
{logical_switches,
[
{switch, 0,
[
{backend, linc_us4},
{controllers,
[
{"Switch0-DefaultController", "localhost", 6633, tcp}
]},
{ports,
[
{port, 1, [{interface, "linc-port"}]},
{port, 2, [{interface, "linc-port2"}]}
]},
{queues_status, disabled},
{queues,
[
]}
]}
]}
]},
{enetconf,
[
{capabilities, [{base, {1, 1}},
{startup, {1, 0}},
{'writable-running', {1, 0}}]},
{callback_module, linc_ofconfig},
{sshd_ip, any},
{sshd_port, 1830},
{sshd_user_passwords,
[
{"linc", "linc"}
]}
]},
{lager,
[
{handlers,
[
{lager_console_backend, info},
{lager_file_backend,
[
{"log/error.log", error, 10485760, "$D0", 5},
{"log/console.log", info, 10485760, "$D0", 5}
]}
]}
]},
{sasl,
[
{sasl_error_logger, {file, "log/sasl-error.log"}},
{errlog_type, error},
{error_logger_mf_dir, "log/sasl"}, % Log directory
{error_logger_mf_maxbytes, 10485760}, % 10 MB max file size
{error_logger_mf_maxfiles, 5} % 5 files max
]}
].
-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8-->8--
"""
from __future__ import print_function
import traceback
import lxml.etree
import ncclient
from ryu.base import app_manager
from ryu.lib.netconf import constants as nc_consts
from ryu.lib import hub
from ryu.lib import of_config
from ryu.lib.of_config import capable_switch
from ryu.lib.of_config import constants as ofc_consts
# Change those depending on switch configuration
HOST = '127.0.0.1'
PORT = 1830
USERNAME = 'linc'
PASSWORD = 'linc'
CAPABLE_SWITCH_ID = 'CapableSwitch0'
LOGICAL_SWITCH = 'LogicalSwitch0'
PORT_ID = 'LogicalSwitch0-Port2'
CONTROLLER_ID = 'Switch0-DefaultController'
PORT_DICT = {
'capable_switch': CAPABLE_SWITCH_ID,
'port_id': PORT_ID,
'logical_switch': LOGICAL_SWITCH,
'controller_id': CONTROLLER_ID,
'ip': HOST,
}
SWITCH_PORT_DOWN = '''
<nc:config xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<capable-switch xmlns="urn:onf:of111:config:yang">
<id>%(capable_switch)s</id>
<resources>
<port>
<resource-id>%(port_id)s</resource-id>
<configuration operation="merge">
<admin-state>down</admin-state>
<no-receive>false</no-receive>
<no-forward>false</no-forward>
<no-packet-in>false</no-packet-in>
</configuration>
</port>
</resources>
</capable-switch>
</nc:config>
''' % PORT_DICT
SWITCH_ADVERTISED = '''
<nc:config xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<capable-switch xmlns="urn:onf:of111:config:yang">
<id>%(capable_switch)s</id>
<resources>
<port>
<resource-id>%(port_id)s</resource-id>
<features>
<advertised operation="merge">
<rate>10Mb-FD</rate>
<auto-negotiate>true</auto-negotiate>
<medium>copper</medium>
<pause>unsupported</pause>
</advertised>
</features>
</port>
</resources>
</capable-switch>
</nc:config>
''' % PORT_DICT
SWITCH_CONTROLLER = '''
<nc:config xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
<capable-switch xmlns="urn:onf:of111:config:yang">
<id>%(capable_switch)s</id>
<logical-switches>
<switch>
<id>%(logical_switch)s</id>
<controllers>
<controller operation="merge">
<id>%(controller_id)s</id>
<role>master</role>
<ip-address>%(ip)s</ip-address>
<port>6633</port>
<protocol>tcp</protocol>
</controller>
</controllers>
</switch>
</logical-switches>
</capable-switch>
</nc:config>
''' % PORT_DICT
def _get_schema():
# file_name = of_config.OF_CONFIG_1_0_XSD
# file_name = of_config.OF_CONFIG_1_1_XSD
file_name = of_config.OF_CONFIG_1_1_1_XSD
return lxml.etree.XMLSchema(file=file_name)
class OFConfigClient(app_manager.RyuApp):
def __init__(self, *args, **kwargs):
super(OFConfigClient, self).__init__(*args, **kwargs)
self.switch = capable_switch.OFCapableSwitch(
host=HOST, port=PORT, username=USERNAME, password=PASSWORD,
unknown_host_cb=lambda host, fingeprint: True)
hub.spawn(self._do_of_config)
def _validate(self, tree):
xmlschema = _get_schema()
try:
xmlschema.assertValid(tree)
except:
traceback.print_exc()
def _do_get(self):
data_xml = self.switch.raw_get()
tree = lxml.etree.fromstring(data_xml)
# print(lxml.etree.tostring(tree, pretty_print=True))
self._validate(tree)
name_spaces = set()
for e in tree.getiterator():
name_spaces.add(capable_switch.get_ns_tag(e.tag)[0])
print(name_spaces)
return tree
def _do_get_config(self, source):
print('source = %s' % source)
config_xml = self.switch.raw_get_config(source)
tree = lxml.etree.fromstring(config_xml)
# print(lxml.etree.tostring(tree, pretty_print=True))
self._validate(tree)
def _do_edit_config(self, config):
tree = lxml.etree.fromstring(config)
self._validate(tree)
self.switch.raw_edit_config(target='running', config=config)
def _print_ports(self, tree, ns):
for port in tree.findall('{%s}%s/{%s}%s' % (ns, ofc_consts.RESOURCES,
ns, ofc_consts.PORT)):
print(lxml.etree.tostring(port, pretty_print=True))
def _set_ports_down(self):
"""try to set all ports down with etree operation"""
tree = self._do_get()
print(lxml.etree.tostring(tree, pretty_print=True))
qname = lxml.etree.QName(tree.tag)
ns = qname.namespace
self._print_ports(tree, ns)
switch_id = tree.find('{%s}%s' % (ns, ofc_consts.ID))
resources = tree.find('{%s}%s' % (ns, ofc_consts.RESOURCES))
configuration = tree.find(
'{%s}%s/{%s}%s/{%s}%s' % (ns, ofc_consts.RESOURCES,
ns, ofc_consts.PORT,
ns, ofc_consts.CONFIGURATION))
admin_state = tree.find(
'{%s}%s/{%s}%s/{%s}%s/{%s}%s' % (ns, ofc_consts.RESOURCES,
ns, ofc_consts.PORT,
ns, ofc_consts.CONFIGURATION,
ns, ofc_consts.ADMIN_STATE))
config_ = lxml.etree.Element(
'{%s}%s' % (ncclient.xml_.BASE_NS_1_0, nc_consts.CONFIG))
capable_switch_ = lxml.etree.SubElement(config_, tree.tag)
switch_id_ = lxml.etree.SubElement(capable_switch_, switch_id.tag)
switch_id_.text = switch_id.text
resources_ = lxml.etree.SubElement(capable_switch_,
resources.tag)
for port in tree.findall(
'{%s}%s/{%s}%s' % (ns, ofc_consts.RESOURCES,
ns, ofc_consts.PORT)):
resource_id = port.find('{%s}%s' % (ns, ofc_consts.RESOURCE_ID))
port_ = lxml.etree.SubElement(resources_, port.tag)
resource_id_ = lxml.etree.SubElement(port_, resource_id.tag)
resource_id_.text = resource_id.text
configuration_ = lxml.etree.SubElement(port_, configuration.tag)
configuration_.set(ofc_consts.OPERATION, nc_consts.MERGE)
admin_state_ = lxml.etree.SubElement(configuration_,
admin_state.tag)
admin_state_.text = ofc_consts.DOWN
self._do_edit_config(lxml.etree.tostring(config_, pretty_print=True))
tree = self._do_get()
self._print_ports(tree, ns)
def _do_of_config(self):
self._do_get()
self._do_get_config('running')
self._do_get_config('startup')
# LINC doesn't support 'candidate' datastore
try:
self._do_get_config('candidate')
except ncclient.NCClientError:
traceback.print_exc()
# use raw XML format
self._do_edit_config(SWITCH_PORT_DOWN)
self._do_edit_config(SWITCH_ADVERTISED)
self._do_edit_config(SWITCH_CONTROLLER)
self._set_ports_down()
self.switch.close_session()
|
inares/edx-platform
|
refs/heads/inares_sass
|
common/djangoapps/cors_csrf/helpers.py
|
162
|
"""Helper methods for CORS and CSRF checks. """
import logging
import urlparse
import contextlib
from django.conf import settings
log = logging.getLogger(__name__)
def is_cross_domain_request_allowed(request):
"""Check whether we should allow the cross-domain request.
We allow a cross-domain request only if:
1) The request is made securely and the referer has "https://" as the protocol.
2) The referer domain has been whitelisted.
Arguments:
request (HttpRequest)
Returns:
bool
"""
referer = request.META.get('HTTP_REFERER')
referer_parts = urlparse.urlparse(referer) if referer else None
referer_hostname = referer_parts.hostname if referer_parts is not None else None
# Use CORS_ALLOW_INSECURE *only* for development and testing environments;
# it should never be enabled in production.
if not getattr(settings, 'CORS_ALLOW_INSECURE', False):
if not request.is_secure():
log.debug(
u"Request is not secure, so we cannot send the CSRF token. "
u"For testing purposes, you can disable this check by setting "
u"`CORS_ALLOW_INSECURE` to True in the settings"
)
return False
if not referer:
log.debug(u"No referer provided over a secure connection, so we cannot check the protocol.")
return False
if not referer_parts.scheme == 'https':
log.debug(u"Referer '%s' must have the scheme 'https'")
return False
domain_is_whitelisted = (
getattr(settings, 'CORS_ORIGIN_ALLOW_ALL', False) or
referer_hostname in getattr(settings, 'CORS_ORIGIN_WHITELIST', [])
)
if not domain_is_whitelisted:
if referer_hostname is None:
# If no referer is specified, we can't check if it's a cross-domain
# request or not.
log.debug(u"Referrer hostname is `None`, so it is not on the whitelist.")
elif referer_hostname != request.get_host():
log.info(
(
u"Domain '%s' is not on the cross domain whitelist. "
u"Add the domain to `CORS_ORIGIN_WHITELIST` or set "
u"`CORS_ORIGIN_ALLOW_ALL` to True in the settings."
), referer_hostname
)
else:
log.debug(
(
u"Domain '%s' is the same as the hostname in the request, "
u"so we are not going to treat it as a cross-domain request."
), referer_hostname
)
return False
return True
@contextlib.contextmanager
def skip_cross_domain_referer_check(request):
"""Skip the cross-domain CSRF referer check.
Django's CSRF middleware performs the referer check
only when the request is made over a secure connection.
To skip the check, we patch `request.is_secure()` to
False.
"""
is_secure_default = request.is_secure
request.is_secure = lambda: False
try:
yield
finally:
request.is_secure = is_secure_default
|
AndreasMadsen/tensorflow
|
refs/heads/master
|
tensorflow/tools/dist_test/python/mnist_replica.py
|
57
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Distributed MNIST training and validation, with model replicas.
A simple softmax model with one hidden layer is defined. The parameters
(weights and biases) are located on two parameter servers (ps), while the
ops are defined on a worker node. The TF sessions also run on the worker
node.
Multiple invocations of this script can be done in parallel, with different
values for --task_index. There should be exactly one invocation with
--task_index, which will create a master session that carries out variable
initialization. The other, non-master, sessions will wait for the master
session to finish the initialization before proceeding to the training stage.
The coordination between the multiple worker invocations occurs due to
the definition of the parameters on the same ps devices. The parameter updates
from one worker is visible to all other workers. As such, the workers can
perform forward computation and gradient calculation in parallel, which
should lead to increased training speed for the simple model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import sys
import tempfile
import time
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
flags = tf.app.flags
flags.DEFINE_string("data_dir", "/tmp/mnist-data",
"Directory for storing mnist data")
flags.DEFINE_boolean("download_only", False,
"Only perform downloading of data; Do not proceed to "
"session preparation, model definition or training")
flags.DEFINE_integer("task_index", None,
"Worker task index, should be >= 0. task_index=0 is "
"the master worker task the performs the variable "
"initialization ")
flags.DEFINE_integer("num_gpus", 1,
"Total number of gpus for each machine."
"If you don't use GPU, please set it to '0'")
flags.DEFINE_integer("replicas_to_aggregate", None,
"Number of replicas to aggregate before parameter update"
"is applied (For sync_replicas mode only; default: "
"num_workers)")
flags.DEFINE_integer("hidden_units", 100,
"Number of units in the hidden layer of the NN")
flags.DEFINE_integer("train_steps", 200,
"Number of (global) training steps to perform")
flags.DEFINE_integer("batch_size", 100, "Training batch size")
flags.DEFINE_float("learning_rate", 0.01, "Learning rate")
flags.DEFINE_boolean("sync_replicas", False,
"Use the sync_replicas (synchronized replicas) mode, "
"wherein the parameter updates from workers are aggregated "
"before applied to avoid stale gradients")
flags.DEFINE_boolean(
"existing_servers", False, "Whether servers already exists. If True, "
"will use the worker hosts via their GRPC URLs (one client process "
"per worker host). Otherwise, will create an in-process TensorFlow "
"server.")
flags.DEFINE_string("ps_hosts","localhost:2222",
"Comma-separated list of hostname:port pairs")
flags.DEFINE_string("worker_hosts", "localhost:2223,localhost:2224",
"Comma-separated list of hostname:port pairs")
flags.DEFINE_string("job_name", None,"job name: worker or ps")
FLAGS = flags.FLAGS
IMAGE_PIXELS = 28
def main(unused_argv):
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
if FLAGS.download_only:
sys.exit(0)
if FLAGS.job_name is None or FLAGS.job_name == "":
raise ValueError("Must specify an explicit `job_name`")
if FLAGS.task_index is None or FLAGS.task_index =="":
raise ValueError("Must specify an explicit `task_index`")
print("job name = %s" % FLAGS.job_name)
print("task index = %d" % FLAGS.task_index)
#Construct the cluster and start the server
ps_spec = FLAGS.ps_hosts.split(",")
worker_spec = FLAGS.worker_hosts.split(",")
# Get the number of workers.
num_workers = len(worker_spec)
cluster = tf.train.ClusterSpec({
"ps": ps_spec,
"worker": worker_spec})
if not FLAGS.existing_servers:
# Not using existing servers. Create an in-process server.
server = tf.train.Server(
cluster, job_name=FLAGS.job_name, task_index=FLAGS.task_index)
if FLAGS.job_name == "ps":
server.join()
is_chief = (FLAGS.task_index == 0)
if FLAGS.num_gpus > 0:
if FLAGS.num_gpus < num_workers:
raise ValueError("number of gpus is less than number of workers")
# Avoid gpu allocation conflict: now allocate task_num -> #gpu
# for each worker in the corresponding machine
gpu = (FLAGS.task_index % FLAGS.num_gpus)
worker_device = "/job:worker/task:%d/gpu:%d" % (FLAGS.task_index, gpu)
elif FLAGS.num_gpus == 0:
# Just allocate the CPU to worker server
cpu = 0
worker_device = "/job:worker/task:%d/cpu:%d" % (FLAGS.task_index, cpu)
# The device setter will automatically place Variables ops on separate
# parameter servers (ps). The non-Variable ops will be placed on the workers.
# The ps use CPU and workers use corresponding GPU
with tf.device(
tf.train.replica_device_setter(
worker_device=worker_device,
ps_device="/job:ps/cpu:0",
cluster=cluster)):
global_step = tf.Variable(0, name="global_step", trainable=False)
# Variables of the hidden layer
hid_w = tf.Variable(
tf.truncated_normal(
[IMAGE_PIXELS * IMAGE_PIXELS, FLAGS.hidden_units],
stddev=1.0 / IMAGE_PIXELS),
name="hid_w")
hid_b = tf.Variable(tf.zeros([FLAGS.hidden_units]), name="hid_b")
# Variables of the softmax layer
sm_w = tf.Variable(
tf.truncated_normal(
[FLAGS.hidden_units, 10],
stddev=1.0 / math.sqrt(FLAGS.hidden_units)),
name="sm_w")
sm_b = tf.Variable(tf.zeros([10]), name="sm_b")
# Ops: located on the worker specified with FLAGS.task_index
x = tf.placeholder(tf.float32, [None, IMAGE_PIXELS * IMAGE_PIXELS])
y_ = tf.placeholder(tf.float32, [None, 10])
hid_lin = tf.nn.xw_plus_b(x, hid_w, hid_b)
hid = tf.nn.relu(hid_lin)
y = tf.nn.softmax(tf.nn.xw_plus_b(hid, sm_w, sm_b))
cross_entropy = -tf.reduce_sum(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0)))
opt = tf.train.AdamOptimizer(FLAGS.learning_rate)
if FLAGS.sync_replicas:
if FLAGS.replicas_to_aggregate is None:
replicas_to_aggregate = num_workers
else:
replicas_to_aggregate = FLAGS.replicas_to_aggregate
opt = tf.train.SyncReplicasOptimizer(
opt,
replicas_to_aggregate=replicas_to_aggregate,
total_num_replicas=num_workers,
name="mnist_sync_replicas")
train_step = opt.minimize(cross_entropy, global_step=global_step)
if FLAGS.sync_replicas:
local_init_op = opt.local_step_init_op
if is_chief:
local_init_op = opt.chief_init_op
ready_for_local_init_op = opt.ready_for_local_init_op
# Initial token and chief queue runners required by the sync_replicas mode
chief_queue_runner = opt.get_chief_queue_runner()
sync_init_op = opt.get_init_tokens_op()
init_op = tf.global_variables_initializer()
train_dir = tempfile.mkdtemp()
if FLAGS.sync_replicas:
sv = tf.train.Supervisor(
is_chief=is_chief,
logdir=train_dir,
init_op=init_op,
local_init_op=local_init_op,
ready_for_local_init_op=ready_for_local_init_op,
recovery_wait_secs=1,
global_step=global_step)
else:
sv = tf.train.Supervisor(
is_chief=is_chief,
logdir=train_dir,
init_op=init_op,
recovery_wait_secs=1,
global_step=global_step)
sess_config = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=False,
device_filters=["/job:ps", "/job:worker/task:%d" % FLAGS.task_index])
# The chief worker (task_index==0) session will prepare the session,
# while the remaining workers will wait for the preparation to complete.
if is_chief:
print("Worker %d: Initializing session..." % FLAGS.task_index)
else:
print("Worker %d: Waiting for session to be initialized..." %
FLAGS.task_index)
if FLAGS.existing_servers:
server_grpc_url = "grpc://" + worker_spec[FLAGS.task_index]
print("Using existing server at: %s" % server_grpc_url)
sess = sv.prepare_or_wait_for_session(server_grpc_url,
config=sess_config)
else:
sess = sv.prepare_or_wait_for_session(server.target, config=sess_config)
print("Worker %d: Session initialization complete." % FLAGS.task_index)
if FLAGS.sync_replicas and is_chief:
# Chief worker will start the chief queue runner and call the init op.
sess.run(sync_init_op)
sv.start_queue_runners(sess, [chief_queue_runner])
# Perform training
time_begin = time.time()
print("Training begins @ %f" % time_begin)
local_step = 0
while True:
# Training feed
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batch_size)
train_feed = {x: batch_xs, y_: batch_ys}
_, step = sess.run([train_step, global_step], feed_dict=train_feed)
local_step += 1
now = time.time()
print("%f: Worker %d: training step %d done (global step: %d)" %
(now, FLAGS.task_index, local_step, step))
if step >= FLAGS.train_steps:
break
time_end = time.time()
print("Training ends @ %f" % time_end)
training_time = time_end - time_begin
print("Training elapsed time: %f s" % training_time)
# Validation feed
val_feed = {x: mnist.validation.images, y_: mnist.validation.labels}
val_xent = sess.run(cross_entropy, feed_dict=val_feed)
print("After %d training step(s), validation cross entropy = %g" %
(FLAGS.train_steps, val_xent))
if __name__ == "__main__":
tf.app.run()
|
anhstudios/swganh
|
refs/heads/develop
|
data/scripts/templates/object/draft_schematic/clothing/shared_clothing_robe_casual_05.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/clothing/shared_clothing_robe_casual_05.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
normtown/SickRage
|
refs/heads/master
|
lib/github/tests/Commit.py
|
39
|
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
class Commit(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.commit = self.g.get_user().get_repo("PyGithub").get_commit("1292bf0e22c796e91cc3d6e24b544aece8c21f2a")
self.commit.author.login # to force lazy completion
def testAttributes(self):
self.assertEqual(self.commit.author.login, "jacquev6")
self.assertEqual(self.commit.commit.url, "https://api.github.com/repos/jacquev6/PyGithub/git/commits/1292bf0e22c796e91cc3d6e24b544aece8c21f2a")
self.assertEqual(self.commit.committer.login, "jacquev6")
self.assertEqual(len(self.commit.files), 1)
self.assertEqual(self.commit.files[0].additions, 0)
self.assertEqual(self.commit.files[0].blob_url, "https://github.com/jacquev6/PyGithub/blob/1292bf0e22c796e91cc3d6e24b544aece8c21f2a/github/GithubObjects/GitAuthor.py")
self.assertEqual(self.commit.files[0].changes, 20)
self.assertEqual(self.commit.files[0].deletions, 20)
self.assertEqual(self.commit.files[0].filename, "github/GithubObjects/GitAuthor.py")
self.assertTrue(isinstance(self.commit.files[0].patch, (str, unicode)))
self.assertEqual(self.commit.files[0].raw_url, "https://github.com/jacquev6/PyGithub/raw/1292bf0e22c796e91cc3d6e24b544aece8c21f2a/github/GithubObjects/GitAuthor.py")
self.assertEqual(self.commit.files[0].sha, "1292bf0e22c796e91cc3d6e24b544aece8c21f2a")
self.assertEqual(self.commit.files[0].status, "modified")
self.assertEqual(len(self.commit.parents), 1)
self.assertEqual(self.commit.parents[0].sha, "b46ed0dfde5ad02d3b91eb54a41c5ed960710eae")
self.assertEqual(self.commit.sha, "1292bf0e22c796e91cc3d6e24b544aece8c21f2a")
self.assertEqual(self.commit.stats.deletions, 20)
self.assertEqual(self.commit.stats.additions, 0)
self.assertEqual(self.commit.stats.total, 20)
self.assertEqual(self.commit.url, "https://api.github.com/repos/jacquev6/PyGithub/commits/1292bf0e22c796e91cc3d6e24b544aece8c21f2a")
def testGetComments(self):
self.assertListKeyEqual(self.commit.get_comments(), lambda c: c.id, [1347033, 1347083, 1347397, 1349654])
def testCreateComment(self):
comment = self.commit.create_comment("Comment created by PyGithub")
self.assertEqual(comment.id, 1361949)
self.assertEqual(comment.line, None)
self.assertEqual(comment.path, None)
self.assertEqual(comment.position, None)
def testCreateCommentOnFileLine(self):
comment = self.commit.create_comment("Comment created by PyGithub", path="codegen/templates/GithubObject.MethodBody.UseResult.py", line=26)
self.assertEqual(comment.id, 1362000)
self.assertEqual(comment.line, 26)
self.assertEqual(comment.path, "codegen/templates/GithubObject.MethodBody.UseResult.py")
self.assertEqual(comment.position, None)
def testCreateCommentOnFilePosition(self):
comment = self.commit.create_comment("Comment also created by PyGithub", path="codegen/templates/GithubObject.MethodBody.UseResult.py", position=3)
self.assertEqual(comment.id, 1362001)
self.assertEqual(comment.line, None)
self.assertEqual(comment.path, "codegen/templates/GithubObject.MethodBody.UseResult.py")
self.assertEqual(comment.position, 3)
def testCreateStatusWithoutOptionalParameters(self):
status = self.commit.create_status("pending")
self.assertEqual(status.id, 277031)
self.assertEqual(status.state, "pending")
self.assertEqual(status.target_url, None)
self.assertEqual(status.description, None)
def testCreateStatusWithAllParameters(self):
status = self.commit.create_status("success", "https://github.com/jacquev6/PyGithub/issues/67", "Status successfuly created by PyGithub")
self.assertEqual(status.id, 277040)
self.assertEqual(status.state, "success")
self.assertEqual(status.target_url, "https://github.com/jacquev6/PyGithub/issues/67")
self.assertEqual(status.description, "Status successfuly created by PyGithub")
|
takeflight/wagtail
|
refs/heads/master
|
wagtail/users/views/groups.py
|
2
|
from django.contrib.auth.models import Group
from django.urls import re_path
from django.utils.translation import gettext as _
from wagtail.admin.views import generic, mixins
from wagtail.admin.viewsets.model import ModelViewSet
from wagtail.core import hooks
from wagtail.users.forms import GroupForm, GroupPagePermissionFormSet
from wagtail.users.views.users import index
_permission_panel_classes = None
def get_permission_panel_classes():
global _permission_panel_classes
if _permission_panel_classes is None:
_permission_panel_classes = [GroupPagePermissionFormSet]
for fn in hooks.get_hooks('register_group_permission_panel'):
_permission_panel_classes.append(fn())
return _permission_panel_classes
class PermissionPanelFormsMixin:
def get_permission_panel_form_kwargs(self, cls):
kwargs = {}
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
if hasattr(self, 'object'):
kwargs.update({'instance': self.object})
return kwargs
def get_permission_panel_forms(self):
return [
cls(**self.get_permission_panel_form_kwargs(cls))
for cls in get_permission_panel_classes()
]
def get_context_data(self, **kwargs):
if 'permission_panels' not in kwargs:
kwargs['permission_panels'] = self.get_permission_panel_forms()
return super().get_context_data(**kwargs)
class IndexView(mixins.SearchableListMixin, generic.IndexView):
page_title = _("Groups")
add_item_label = _("Add a group")
search_box_placeholder = _("Search groups")
search_fields = ['name']
context_object_name = 'groups'
paginate_by = 20
page_kwarg = 'p'
ordering = ['name']
def get_template_names(self):
if self.request.is_ajax():
return ['wagtailusers/groups/results.html']
else:
return ['wagtailusers/groups/index.html']
class CreateView(PermissionPanelFormsMixin, generic.CreateView):
page_title = _("Add group")
success_message = _("Group '{0}' created.")
template_name = 'wagtailusers/groups/create.html'
def post(self, request, *args, **kwargs):
"""
Handle POST requests: instantiate a form instance with the passed
POST variables and then check if it's valid.
"""
# Create an object now so that the permission panel forms have something to link them against
self.object = Group()
form = self.get_form()
permission_panels = self.get_permission_panel_forms()
if form.is_valid() and all(panel.is_valid() for panel in permission_panels):
response = self.form_valid(form)
for panel in permission_panels:
panel.save()
return response
else:
return self.form_invalid(form)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
# add a 'form_media' variable for the collected js/css media from the form and all formsets
form_media = context['form'].media
for panel in context['permission_panels']:
form_media += panel.media
context['form_media'] = form_media
return context
class EditView(PermissionPanelFormsMixin, generic.EditView):
success_message = _("Group '{0}' updated.")
error_message = _("The group could not be saved due to errors.")
delete_item_label = _("Delete group")
context_object_name = 'group'
template_name = 'wagtailusers/groups/edit.html'
def post(self, request, *args, **kwargs):
"""
Handle POST requests: instantiate a form instance with the passed
POST variables and then check if it's valid.
"""
self.object = self.get_object()
form = self.get_form()
permission_panels = self.get_permission_panel_forms()
if form.is_valid() and all(panel.is_valid() for panel in permission_panels):
response = self.form_valid(form)
for panel in permission_panels:
panel.save()
return response
else:
return self.form_invalid(form)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
# add a 'form_media' variable for the collected js/css media from the form and all formsets
form_media = context['form'].media
for panel in context['permission_panels']:
form_media += panel.media
context['form_media'] = form_media
return context
class DeleteView(generic.DeleteView):
success_message = _("Group '{0}' deleted.")
page_title = _("Delete group")
confirmation_message = _("Are you sure you want to delete this group?")
template_name = 'wagtailusers/groups/confirm_delete.html'
class GroupViewSet(ModelViewSet):
icon = 'group'
model = Group
index_view_class = IndexView
add_view_class = CreateView
edit_view_class = EditView
delete_view_class = DeleteView
@property
def users_view(self):
return index
def get_form_class(self, for_update=False):
return GroupForm
def get_urlpatterns(self):
return super().get_urlpatterns() + [
re_path(r'(\d+)/users/$', self.users_view, name='users'),
]
|
milinbhakta/flaskmaterialdesign
|
refs/heads/bug123
|
venv/Lib/encodings/iso8859_10.py
|
272
|
""" Python Character Mapping Codec iso8859_10 generated from 'MAPPINGS/ISO8859/8859-10.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-10',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0112' # 0xA2 -> LATIN CAPITAL LETTER E WITH MACRON
'\u0122' # 0xA3 -> LATIN CAPITAL LETTER G WITH CEDILLA
'\u012a' # 0xA4 -> LATIN CAPITAL LETTER I WITH MACRON
'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
'\u0136' # 0xA6 -> LATIN CAPITAL LETTER K WITH CEDILLA
'\xa7' # 0xA7 -> SECTION SIGN
'\u013b' # 0xA8 -> LATIN CAPITAL LETTER L WITH CEDILLA
'\u0110' # 0xA9 -> LATIN CAPITAL LETTER D WITH STROKE
'\u0160' # 0xAA -> LATIN CAPITAL LETTER S WITH CARON
'\u0166' # 0xAB -> LATIN CAPITAL LETTER T WITH STROKE
'\u017d' # 0xAC -> LATIN CAPITAL LETTER Z WITH CARON
'\xad' # 0xAD -> SOFT HYPHEN
'\u016a' # 0xAE -> LATIN CAPITAL LETTER U WITH MACRON
'\u014a' # 0xAF -> LATIN CAPITAL LETTER ENG
'\xb0' # 0xB0 -> DEGREE SIGN
'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
'\u0113' # 0xB2 -> LATIN SMALL LETTER E WITH MACRON
'\u0123' # 0xB3 -> LATIN SMALL LETTER G WITH CEDILLA
'\u012b' # 0xB4 -> LATIN SMALL LETTER I WITH MACRON
'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
'\u0137' # 0xB6 -> LATIN SMALL LETTER K WITH CEDILLA
'\xb7' # 0xB7 -> MIDDLE DOT
'\u013c' # 0xB8 -> LATIN SMALL LETTER L WITH CEDILLA
'\u0111' # 0xB9 -> LATIN SMALL LETTER D WITH STROKE
'\u0161' # 0xBA -> LATIN SMALL LETTER S WITH CARON
'\u0167' # 0xBB -> LATIN SMALL LETTER T WITH STROKE
'\u017e' # 0xBC -> LATIN SMALL LETTER Z WITH CARON
'\u2015' # 0xBD -> HORIZONTAL BAR
'\u016b' # 0xBE -> LATIN SMALL LETTER U WITH MACRON
'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic)
'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\u0168' # 0xD7 -> LATIN CAPITAL LETTER U WITH TILDE
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic)
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic)
'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\u0169' # 0xF7 -> LATIN SMALL LETTER U WITH TILDE
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic)
'\u0138' # 0xFF -> LATIN SMALL LETTER KRA
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
SnowWalkerJ/quantlib
|
refs/heads/master
|
quant/data/wind/tables/aindexeodprices.py
|
1
|
from ....common.db.sql import VARCHAR, Numeric as NUMBER, DateTime as DATETIME, Column, BaseModel, CLOB, DATE
VARCHAR2 = VARCHAR
class AIndexEODPrices(BaseModel):
"""
4.84 中国A股指数日行情
Attributes
----------
object_id: VARCHAR2(100)
对象ID
s_info_windcode: VARCHAR2(40)
Wind代码
trade_dt: VARCHAR2(8)
交易日期
crncy_code: VARCHAR2(10)
货币代码
s_dq_preclose: NUMBER(20,4)
昨收盘价(点)
s_dq_open: NUMBER(20,4)
开盘价(点)
s_dq_high: NUMBER(20,4)
最高价(点)
s_dq_low: NUMBER(20,4)
最低价(点)
s_dq_close: NUMBER(20,4)
收盘价(点)
s_dq_change: NUMBER(20,4)
涨跌(点)
s_dq_pctchange: NUMBER(20,4)
涨跌幅(%)
s_dq_volume: NUMBER(20,4)
成交量(手)
s_dq_amount: NUMBER(20,4)
成交金额(千元)
opdate: DATETIME
opdate
opmode: VARCHAR(1)
opmode
"""
__tablename__ = "AIndexEODPrices"
object_id = Column(VARCHAR2(100), primary_key=True)
s_info_windcode = Column(VARCHAR2(40))
trade_dt = Column(VARCHAR2(8))
crncy_code = Column(VARCHAR2(10))
s_dq_preclose = Column(NUMBER(20,4))
s_dq_open = Column(NUMBER(20,4))
s_dq_high = Column(NUMBER(20,4))
s_dq_low = Column(NUMBER(20,4))
s_dq_close = Column(NUMBER(20,4))
s_dq_change = Column(NUMBER(20,4))
s_dq_pctchange = Column(NUMBER(20,4))
s_dq_volume = Column(NUMBER(20,4))
s_dq_amount = Column(NUMBER(20,4))
opdate = Column(DATETIME)
opmode = Column(VARCHAR(1))
|
crackerhead/nemio
|
refs/heads/master
|
lib/python2.7/site-packages/werkzeug/datastructures.py
|
148
|
# -*- coding: utf-8 -*-
"""
werkzeug.datastructures
~~~~~~~~~~~~~~~~~~~~~~~
This module provides mixins and classes with an immutable interface.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import sys
import codecs
import mimetypes
from copy import deepcopy
from itertools import repeat
from werkzeug._internal import _missing, _empty_stream
from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \
PY2, text_type, integer_types, string_types, make_literal_wrapper, \
to_native
_locale_delim_re = re.compile(r'[_-]')
def is_immutable(self):
raise TypeError('%r objects are immutable' % self.__class__.__name__)
def iter_multi_items(mapping):
"""Iterates over the items of a mapping yielding keys and values
without dropping any from more complex structures.
"""
if isinstance(mapping, MultiDict):
for item in iteritems(mapping, multi=True):
yield item
elif isinstance(mapping, dict):
for key, value in iteritems(mapping):
if isinstance(value, (tuple, list)):
for value in value:
yield key, value
else:
yield key, value
else:
for item in mapping:
yield item
def native_itermethods(names):
if not PY2:
return lambda x: x
def setmethod(cls, name):
itermethod = getattr(cls, name)
setattr(cls, 'iter%s' % name, itermethod)
listmethod = lambda self, *a, **kw: list(itermethod(self, *a, **kw))
listmethod.__doc__ = \
'Like :py:meth:`iter%s`, but returns a list.' % name
setattr(cls, name, listmethod)
def wrap(cls):
for name in names:
setmethod(cls, name)
return cls
return wrap
class ImmutableListMixin(object):
"""Makes a :class:`list` immutable.
.. versionadded:: 0.5
:private:
"""
_hash_cache = None
def __hash__(self):
if self._hash_cache is not None:
return self._hash_cache
rv = self._hash_cache = hash(tuple(self))
return rv
def __reduce_ex__(self, protocol):
return type(self), (list(self),)
def __delitem__(self, key):
is_immutable(self)
def __delslice__(self, i, j):
is_immutable(self)
def __iadd__(self, other):
is_immutable(self)
__imul__ = __iadd__
def __setitem__(self, key, value):
is_immutable(self)
def __setslice__(self, i, j, value):
is_immutable(self)
def append(self, item):
is_immutable(self)
remove = append
def extend(self, iterable):
is_immutable(self)
def insert(self, pos, value):
is_immutable(self)
def pop(self, index=-1):
is_immutable(self)
def reverse(self):
is_immutable(self)
def sort(self, cmp=None, key=None, reverse=None):
is_immutable(self)
class ImmutableList(ImmutableListMixin, list):
"""An immutable :class:`list`.
.. versionadded:: 0.5
:private:
"""
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
list.__repr__(self),
)
class ImmutableDictMixin(object):
"""Makes a :class:`dict` immutable.
.. versionadded:: 0.5
:private:
"""
_hash_cache = None
@classmethod
def fromkeys(cls, keys, value=None):
instance = super(cls, cls).__new__(cls)
instance.__init__(zip(keys, repeat(value)))
return instance
def __reduce_ex__(self, protocol):
return type(self), (dict(self),)
def _iter_hashitems(self):
return iteritems(self)
def __hash__(self):
if self._hash_cache is not None:
return self._hash_cache
rv = self._hash_cache = hash(frozenset(self._iter_hashitems()))
return rv
def setdefault(self, key, default=None):
is_immutable(self)
def update(self, *args, **kwargs):
is_immutable(self)
def pop(self, key, default=None):
is_immutable(self)
def popitem(self):
is_immutable(self)
def __setitem__(self, key, value):
is_immutable(self)
def __delitem__(self, key):
is_immutable(self)
def clear(self):
is_immutable(self)
class ImmutableMultiDictMixin(ImmutableDictMixin):
"""Makes a :class:`MultiDict` immutable.
.. versionadded:: 0.5
:private:
"""
def __reduce_ex__(self, protocol):
return type(self), (list(iteritems(self, multi=True)),)
def _iter_hashitems(self):
return iteritems(self, multi=True)
def add(self, key, value):
is_immutable(self)
def popitemlist(self):
is_immutable(self)
def poplist(self, key):
is_immutable(self)
def setlist(self, key, new_list):
is_immutable(self)
def setlistdefault(self, key, default_list=None):
is_immutable(self)
class UpdateDictMixin(object):
"""Makes dicts call `self.on_update` on modifications.
.. versionadded:: 0.5
:private:
"""
on_update = None
def calls_update(name):
def oncall(self, *args, **kw):
rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw)
if self.on_update is not None:
self.on_update(self)
return rv
oncall.__name__ = name
return oncall
def setdefault(self, key, default=None):
modified = key not in self
rv = super(UpdateDictMixin, self).setdefault(key, default)
if modified and self.on_update is not None:
self.on_update(self)
return rv
def pop(self, key, default=_missing):
modified = key in self
if default is _missing:
rv = super(UpdateDictMixin, self).pop(key)
else:
rv = super(UpdateDictMixin, self).pop(key, default)
if modified and self.on_update is not None:
self.on_update(self)
return rv
__setitem__ = calls_update('__setitem__')
__delitem__ = calls_update('__delitem__')
clear = calls_update('clear')
popitem = calls_update('popitem')
update = calls_update('update')
del calls_update
class TypeConversionDict(dict):
"""Works like a regular dict but the :meth:`get` method can perform
type conversions. :class:`MultiDict` and :class:`CombinedMultiDict`
are subclasses of this class and provide the same feature.
.. versionadded:: 0.5
"""
def get(self, key, default=None, type=None):
"""Return the default value if the requested data doesn't exist.
If `type` is provided and is a callable it should convert the value,
return it or raise a :exc:`ValueError` if that is not possible. In
this case the function will return the default as if the value was not
found:
>>> d = TypeConversionDict(foo='42', bar='blub')
>>> d.get('foo', type=int)
42
>>> d.get('bar', -1, type=int)
-1
:param key: The key to be looked up.
:param default: The default value to be returned if the key can't
be looked up. If not further specified `None` is
returned.
:param type: A callable that is used to cast the value in the
:class:`MultiDict`. If a :exc:`ValueError` is raised
by this callable the default value is returned.
"""
try:
rv = self[key]
if type is not None:
rv = type(rv)
except (KeyError, ValueError):
rv = default
return rv
class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict):
"""Works like a :class:`TypeConversionDict` but does not support
modifications.
.. versionadded:: 0.5
"""
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return TypeConversionDict(self)
def __copy__(self):
return self
@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues'])
class MultiDict(TypeConversionDict):
"""A :class:`MultiDict` is a dictionary subclass customized to deal with
multiple values for the same key which is for example used by the parsing
functions in the wrappers. This is necessary because some HTML form
elements pass multiple values for the same key.
:class:`MultiDict` implements all standard dictionary methods.
Internally, it saves all values for a key as a list, but the standard dict
access methods will only return the first value for a key. If you want to
gain access to the other values, too, you have to use the `list` methods as
explained below.
Basic Usage:
>>> d = MultiDict([('a', 'b'), ('a', 'c')])
>>> d
MultiDict([('a', 'b'), ('a', 'c')])
>>> d['a']
'b'
>>> d.getlist('a')
['b', 'c']
>>> 'a' in d
True
It behaves like a normal dict thus all dict functions will only return the
first value when multiple values for one key are found.
From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
exceptions.
A :class:`MultiDict` can be constructed from an iterable of
``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2
onwards some keyword parameters.
:param mapping: the initial value for the :class:`MultiDict`. Either a
regular dict, an iterable of ``(key, value)`` tuples
or `None`.
"""
def __init__(self, mapping=None):
if isinstance(mapping, MultiDict):
dict.__init__(self, ((k, l[:]) for k, l in iterlists(mapping)))
elif isinstance(mapping, dict):
tmp = {}
for key, value in iteritems(mapping):
if isinstance(value, (tuple, list)):
value = list(value)
else:
value = [value]
tmp[key] = value
dict.__init__(self, tmp)
else:
tmp = {}
for key, value in mapping or ():
tmp.setdefault(key, []).append(value)
dict.__init__(self, tmp)
def __getstate__(self):
return dict(self.lists())
def __setstate__(self, value):
dict.clear(self)
dict.update(self, value)
def __getitem__(self, key):
"""Return the first data value for this key;
raises KeyError if not found.
:param key: The key to be looked up.
:raise KeyError: if the key does not exist.
"""
if key in self:
return dict.__getitem__(self, key)[0]
raise exceptions.BadRequestKeyError(key)
def __setitem__(self, key, value):
"""Like :meth:`add` but removes an existing key first.
:param key: the key for the value.
:param value: the value to set.
"""
dict.__setitem__(self, key, [value])
def add(self, key, value):
"""Adds a new value for the key.
.. versionadded:: 0.6
:param key: the key for the value.
:param value: the value to add.
"""
dict.setdefault(self, key, []).append(value)
def getlist(self, key, type=None):
"""Return the list of items for a given key. If that key is not in the
`MultiDict`, the return value will be an empty list. Just as `get`
`getlist` accepts a `type` parameter. All items will be converted
with the callable defined there.
:param key: The key to be looked up.
:param type: A callable that is used to cast the value in the
:class:`MultiDict`. If a :exc:`ValueError` is raised
by this callable the value will be removed from the list.
:return: a :class:`list` of all the values for the key.
"""
try:
rv = dict.__getitem__(self, key)
except KeyError:
return []
if type is None:
return list(rv)
result = []
for item in rv:
try:
result.append(type(item))
except ValueError:
pass
return result
def setlist(self, key, new_list):
"""Remove the old values for a key and add new ones. Note that the list
you pass the values in will be shallow-copied before it is inserted in
the dictionary.
>>> d = MultiDict()
>>> d.setlist('foo', ['1', '2'])
>>> d['foo']
'1'
>>> d.getlist('foo')
['1', '2']
:param key: The key for which the values are set.
:param new_list: An iterable with the new values for the key. Old values
are removed first.
"""
dict.__setitem__(self, key, list(new_list))
def setdefault(self, key, default=None):
"""Returns the value for the key if it is in the dict, otherwise it
returns `default` and sets that value for `key`.
:param key: The key to be looked up.
:param default: The default value to be returned if the key is not
in the dict. If not further specified it's `None`.
"""
if key not in self:
self[key] = default
else:
default = self[key]
return default
def setlistdefault(self, key, default_list=None):
"""Like `setdefault` but sets multiple values. The list returned
is not a copy, but the list that is actually used internally. This
means that you can put new values into the dict by appending items
to the list:
>>> d = MultiDict({"foo": 1})
>>> d.setlistdefault("foo").extend([2, 3])
>>> d.getlist("foo")
[1, 2, 3]
:param key: The key to be looked up.
:param default: An iterable of default values. It is either copied
(in case it was a list) or converted into a list
before returned.
:return: a :class:`list`
"""
if key not in self:
default_list = list(default_list or ())
dict.__setitem__(self, key, default_list)
else:
default_list = dict.__getitem__(self, key)
return default_list
def items(self, multi=False):
"""Return an iterator of ``(key, value)`` pairs.
:param multi: If set to `True` the iterator returned will have a pair
for each value of each key. Otherwise it will only
contain pairs for the first value of each key.
"""
for key, values in iteritems(dict, self):
if multi:
for value in values:
yield key, value
else:
yield key, values[0]
def lists(self):
"""Return a list of ``(key, values)`` pairs, where values is the list
of all values associated with the key."""
for key, values in iteritems(dict, self):
yield key, list(values)
def keys(self):
return iterkeys(dict, self)
__iter__ = keys
def values(self):
"""Returns an iterator of the first value on every key's value list."""
for values in itervalues(dict, self):
yield values[0]
def listvalues(self):
"""Return an iterator of all values associated with a key. Zipping
:meth:`keys` and this is the same as calling :meth:`lists`:
>>> d = MultiDict({"foo": [1, 2, 3]})
>>> zip(d.keys(), d.listvalues()) == d.lists()
True
"""
return itervalues(dict, self)
def copy(self):
"""Return a shallow copy of this object."""
return self.__class__(self)
def deepcopy(self, memo=None):
"""Return a deep copy of this object."""
return self.__class__(deepcopy(self.to_dict(flat=False), memo))
def to_dict(self, flat=True):
"""Return the contents as regular dict. If `flat` is `True` the
returned dict will only have the first item present, if `flat` is
`False` all values will be returned as lists.
:param flat: If set to `False` the dict returned will have lists
with all the values in it. Otherwise it will only
contain the first value for each key.
:return: a :class:`dict`
"""
if flat:
return dict(iteritems(self))
return dict(self.lists())
def update(self, other_dict):
"""update() extends rather than replaces existing key lists."""
for key, value in iter_multi_items(other_dict):
MultiDict.add(self, key, value)
def pop(self, key, default=_missing):
"""Pop the first item for a list on the dict. Afterwards the
key is removed from the dict, so additional values are discarded:
>>> d = MultiDict({"foo": [1, 2, 3]})
>>> d.pop("foo")
1
>>> "foo" in d
False
:param key: the key to pop.
:param default: if provided the value to return if the key was
not in the dictionary.
"""
try:
return dict.pop(self, key)[0]
except KeyError as e:
if default is not _missing:
return default
raise exceptions.BadRequestKeyError(str(e))
def popitem(self):
"""Pop an item from the dict."""
try:
item = dict.popitem(self)
return (item[0], item[1][0])
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
def poplist(self, key):
"""Pop the list for a key from the dict. If the key is not in the dict
an empty list is returned.
.. versionchanged:: 0.5
If the key does no longer exist a list is returned instead of
raising an error.
"""
return dict.pop(self, key, [])
def popitemlist(self):
"""Pop a ``(key, list)`` tuple from the dict."""
try:
return dict.popitem(self)
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
def __copy__(self):
return self.copy()
def __deepcopy__(self, memo):
return self.deepcopy(memo=memo)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, list(iteritems(self, multi=True)))
class _omd_bucket(object):
"""Wraps values in the :class:`OrderedMultiDict`. This makes it
possible to keep an order over multiple different keys. It requires
a lot of extra memory and slows down access a lot, but makes it
possible to access elements in O(1) and iterate in O(n).
"""
__slots__ = ('prev', 'key', 'value', 'next')
def __init__(self, omd, key, value):
self.prev = omd._last_bucket
self.key = key
self.value = value
self.next = None
if omd._first_bucket is None:
omd._first_bucket = self
if omd._last_bucket is not None:
omd._last_bucket.next = self
omd._last_bucket = self
def unlink(self, omd):
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
if omd._first_bucket is self:
omd._first_bucket = self.next
if omd._last_bucket is self:
omd._last_bucket = self.prev
@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues'])
class OrderedMultiDict(MultiDict):
"""Works like a regular :class:`MultiDict` but preserves the
order of the fields. To convert the ordered multi dict into a
list you can use the :meth:`items` method and pass it ``multi=True``.
In general an :class:`OrderedMultiDict` is an order of magnitude
slower than a :class:`MultiDict`.
.. admonition:: note
Due to a limitation in Python you cannot convert an ordered
multi dict into a regular dict by using ``dict(multidict)``.
Instead you have to use the :meth:`to_dict` method, otherwise
the internal bucket objects are exposed.
"""
def __init__(self, mapping=None):
dict.__init__(self)
self._first_bucket = self._last_bucket = None
if mapping is not None:
OrderedMultiDict.update(self, mapping)
def __eq__(self, other):
if not isinstance(other, MultiDict):
return NotImplemented
if isinstance(other, OrderedMultiDict):
iter1 = iteritems(self, multi=True)
iter2 = iteritems(other, multi=True)
try:
for k1, v1 in iter1:
k2, v2 = next(iter2)
if k1 != k2 or v1 != v2:
return False
except StopIteration:
return False
try:
next(iter2)
except StopIteration:
return True
return False
if len(self) != len(other):
return False
for key, values in iterlists(self):
if other.getlist(key) != values:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __reduce_ex__(self, protocol):
return type(self), (list(iteritems(self, multi=True)),)
def __getstate__(self):
return list(iteritems(self, multi=True))
def __setstate__(self, values):
dict.clear(self)
for key, value in values:
self.add(key, value)
def __getitem__(self, key):
if key in self:
return dict.__getitem__(self, key)[0].value
raise exceptions.BadRequestKeyError(key)
def __setitem__(self, key, value):
self.poplist(key)
self.add(key, value)
def __delitem__(self, key):
self.pop(key)
def keys(self):
return (key for key, value in iteritems(self))
__iter__ = keys
def values(self):
return (value for key, value in iteritems(self))
def items(self, multi=False):
ptr = self._first_bucket
if multi:
while ptr is not None:
yield ptr.key, ptr.value
ptr = ptr.next
else:
returned_keys = set()
while ptr is not None:
if ptr.key not in returned_keys:
returned_keys.add(ptr.key)
yield ptr.key, ptr.value
ptr = ptr.next
def lists(self):
returned_keys = set()
ptr = self._first_bucket
while ptr is not None:
if ptr.key not in returned_keys:
yield ptr.key, self.getlist(ptr.key)
returned_keys.add(ptr.key)
ptr = ptr.next
def listvalues(self):
for key, values in iterlists(self):
yield values
def add(self, key, value):
dict.setdefault(self, key, []).append(_omd_bucket(self, key, value))
def getlist(self, key, type=None):
try:
rv = dict.__getitem__(self, key)
except KeyError:
return []
if type is None:
return [x.value for x in rv]
result = []
for item in rv:
try:
result.append(type(item.value))
except ValueError:
pass
return result
def setlist(self, key, new_list):
self.poplist(key)
for value in new_list:
self.add(key, value)
def setlistdefault(self, key, default_list=None):
raise TypeError('setlistdefault is unsupported for '
'ordered multi dicts')
def update(self, mapping):
for key, value in iter_multi_items(mapping):
OrderedMultiDict.add(self, key, value)
def poplist(self, key):
buckets = dict.pop(self, key, ())
for bucket in buckets:
bucket.unlink(self)
return [x.value for x in buckets]
def pop(self, key, default=_missing):
try:
buckets = dict.pop(self, key)
except KeyError as e:
if default is not _missing:
return default
raise exceptions.BadRequestKeyError(str(e))
for bucket in buckets:
bucket.unlink(self)
return buckets[0].value
def popitem(self):
try:
key, buckets = dict.popitem(self)
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
for bucket in buckets:
bucket.unlink(self)
return key, buckets[0].value
def popitemlist(self):
try:
key, buckets = dict.popitem(self)
except KeyError as e:
raise exceptions.BadRequestKeyError(str(e))
for bucket in buckets:
bucket.unlink(self)
return key, [x.value for x in buckets]
def _options_header_vkw(value, kw):
return dump_options_header(value, dict((k.replace('_', '-'), v)
for k, v in kw.items()))
def _unicodify_header_value(value):
if isinstance(value, bytes):
value = value.decode('latin-1')
if not isinstance(value, text_type):
value = text_type(value)
return value
@native_itermethods(['keys', 'values', 'items'])
class Headers(object):
"""An object that stores some headers. It has a dict-like interface
but is ordered and can store the same keys multiple times.
This data structure is useful if you want a nicer way to handle WSGI
headers which are stored as tuples in a list.
From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is
also a subclass of the :class:`~exceptions.BadRequest` HTTP exception
and will render a page for a ``400 BAD REQUEST`` if caught in a
catch-all for HTTP exceptions.
Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers`
class, with the exception of `__getitem__`. :mod:`wsgiref` will return
`None` for ``headers['missing']``, whereas :class:`Headers` will raise
a :class:`KeyError`.
To create a new :class:`Headers` object pass it a list or dict of headers
which are used as default values. This does not reuse the list passed
to the constructor for internal usage.
:param defaults: The list of default values for the :class:`Headers`.
.. versionchanged:: 0.9
This data structure now stores unicode values similar to how the
multi dicts do it. The main difference is that bytes can be set as
well which will automatically be latin1 decoded.
.. versionchanged:: 0.9
The :meth:`linked` function was removed without replacement as it
was an API that does not support the changes to the encoding model.
"""
def __init__(self, defaults=None):
self._list = []
if defaults is not None:
if isinstance(defaults, (list, Headers)):
self._list.extend(defaults)
else:
self.extend(defaults)
def __getitem__(self, key, _get_mode=False):
if not _get_mode:
if isinstance(key, integer_types):
return self._list[key]
elif isinstance(key, slice):
return self.__class__(self._list[key])
if not isinstance(key, string_types):
raise exceptions.BadRequestKeyError(key)
ikey = key.lower()
for k, v in self._list:
if k.lower() == ikey:
return v
# micro optimization: if we are in get mode we will catch that
# exception one stack level down so we can raise a standard
# key error instead of our special one.
if _get_mode:
raise KeyError()
raise exceptions.BadRequestKeyError(key)
def __eq__(self, other):
return other.__class__ is self.__class__ and \
set(other._list) == set(self._list)
def __ne__(self, other):
return not self.__eq__(other)
def get(self, key, default=None, type=None, as_bytes=False):
"""Return the default value if the requested data doesn't exist.
If `type` is provided and is a callable it should convert the value,
return it or raise a :exc:`ValueError` if that is not possible. In
this case the function will return the default as if the value was not
found:
>>> d = Headers([('Content-Length', '42')])
>>> d.get('Content-Length', type=int)
42
If a headers object is bound you must not add unicode strings
because no encoding takes place.
.. versionadded:: 0.9
Added support for `as_bytes`.
:param key: The key to be looked up.
:param default: The default value to be returned if the key can't
be looked up. If not further specified `None` is
returned.
:param type: A callable that is used to cast the value in the
:class:`Headers`. If a :exc:`ValueError` is raised
by this callable the default value is returned.
:param as_bytes: return bytes instead of unicode strings.
"""
try:
rv = self.__getitem__(key, _get_mode=True)
except KeyError:
return default
if as_bytes:
rv = rv.encode('latin1')
if type is None:
return rv
try:
return type(rv)
except ValueError:
return default
def getlist(self, key, type=None, as_bytes=False):
"""Return the list of items for a given key. If that key is not in the
:class:`Headers`, the return value will be an empty list. Just as
:meth:`get` :meth:`getlist` accepts a `type` parameter. All items will
be converted with the callable defined there.
.. versionadded:: 0.9
Added support for `as_bytes`.
:param key: The key to be looked up.
:param type: A callable that is used to cast the value in the
:class:`Headers`. If a :exc:`ValueError` is raised
by this callable the value will be removed from the list.
:return: a :class:`list` of all the values for the key.
:param as_bytes: return bytes instead of unicode strings.
"""
ikey = key.lower()
result = []
for k, v in self:
if k.lower() == ikey:
if as_bytes:
v = v.encode('latin1')
if type is not None:
try:
v = type(v)
except ValueError:
continue
result.append(v)
return result
def get_all(self, name):
"""Return a list of all the values for the named field.
This method is compatible with the :mod:`wsgiref`
:meth:`~wsgiref.headers.Headers.get_all` method.
"""
return self.getlist(name)
def items(self, lower=False):
for key, value in self:
if lower:
key = key.lower()
yield key, value
def keys(self, lower=False):
for key, _ in iteritems(self, lower):
yield key
def values(self):
for _, value in iteritems(self):
yield value
def extend(self, iterable):
"""Extend the headers with a dict or an iterable yielding keys and
values.
"""
if isinstance(iterable, dict):
for key, value in iteritems(iterable):
if isinstance(value, (tuple, list)):
for v in value:
self.add(key, v)
else:
self.add(key, value)
else:
for key, value in iterable:
self.add(key, value)
def __delitem__(self, key, _index_operation=True):
if _index_operation and isinstance(key, (integer_types, slice)):
del self._list[key]
return
key = key.lower()
new = []
for k, v in self._list:
if k.lower() != key:
new.append((k, v))
self._list[:] = new
def remove(self, key):
"""Remove a key.
:param key: The key to be removed.
"""
return self.__delitem__(key, _index_operation=False)
def pop(self, key=None, default=_missing):
"""Removes and returns a key or index.
:param key: The key to be popped. If this is an integer the item at
that position is removed, if it's a string the value for
that key is. If the key is omitted or `None` the last
item is removed.
:return: an item.
"""
if key is None:
return self._list.pop()
if isinstance(key, integer_types):
return self._list.pop(key)
try:
rv = self[key]
self.remove(key)
except KeyError:
if default is not _missing:
return default
raise
return rv
def popitem(self):
"""Removes a key or index and returns a (key, value) item."""
return self.pop()
def __contains__(self, key):
"""Check if a key is present."""
try:
self.__getitem__(key, _get_mode=True)
except KeyError:
return False
return True
has_key = __contains__
def __iter__(self):
"""Yield ``(key, value)`` tuples."""
return iter(self._list)
def __len__(self):
return len(self._list)
def add(self, _key, _value, **kw):
"""Add a new header tuple to the list.
Keyword arguments can specify additional parameters for the header
value, with underscores converted to dashes::
>>> d = Headers()
>>> d.add('Content-Type', 'text/plain')
>>> d.add('Content-Disposition', 'attachment', filename='foo.png')
The keyword argument dumping uses :func:`dump_options_header`
behind the scenes.
.. versionadded:: 0.4.1
keyword arguments were added for :mod:`wsgiref` compatibility.
"""
if kw:
_value = _options_header_vkw(_value, kw)
_value = _unicodify_header_value(_value)
self._validate_value(_value)
self._list.append((_key, _value))
def _validate_value(self, value):
if not isinstance(value, text_type):
raise TypeError('Value should be unicode.')
if u'\n' in value or u'\r' in value:
raise ValueError('Detected newline in header value. This is '
'a potential security problem')
def add_header(self, _key, _value, **_kw):
"""Add a new header tuple to the list.
An alias for :meth:`add` for compatibility with the :mod:`wsgiref`
:meth:`~wsgiref.headers.Headers.add_header` method.
"""
self.add(_key, _value, **_kw)
def clear(self):
"""Clears all headers."""
del self._list[:]
def set(self, _key, _value, **kw):
"""Remove all header tuples for `key` and add a new one. The newly
added key either appears at the end of the list if there was no
entry or replaces the first one.
Keyword arguments can specify additional parameters for the header
value, with underscores converted to dashes. See :meth:`add` for
more information.
.. versionchanged:: 0.6.1
:meth:`set` now accepts the same arguments as :meth:`add`.
:param key: The key to be inserted.
:param value: The value to be inserted.
"""
if kw:
_value = _options_header_vkw(_value, kw)
_value = _unicodify_header_value(_value)
self._validate_value(_value)
if not self._list:
self._list.append((_key, _value))
return
listiter = iter(self._list)
ikey = _key.lower()
for idx, (old_key, old_value) in enumerate(listiter):
if old_key.lower() == ikey:
# replace first ocurrence
self._list[idx] = (_key, _value)
break
else:
self._list.append((_key, _value))
return
self._list[idx + 1:] = [t for t in listiter if t[0].lower() != ikey]
def setdefault(self, key, value):
"""Returns the value for the key if it is in the dict, otherwise it
returns `default` and sets that value for `key`.
:param key: The key to be looked up.
:param default: The default value to be returned if the key is not
in the dict. If not further specified it's `None`.
"""
if key in self:
return self[key]
self.set(key, value)
return value
def __setitem__(self, key, value):
"""Like :meth:`set` but also supports index/slice based setting."""
if isinstance(key, (slice, integer_types)):
if isinstance(key, integer_types):
value = [value]
value = [(k, _unicodify_header_value(v)) for (k, v) in value]
[self._validate_value(v) for (k, v) in value]
if isinstance(key, integer_types):
self._list[key] = value[0]
else:
self._list[key] = value
else:
self.set(key, value)
def to_list(self, charset='iso-8859-1'):
"""Convert the headers into a list suitable for WSGI."""
from warnings import warn
warn(DeprecationWarning('Method removed, use to_wsgi_list instead'),
stacklevel=2)
return self.to_wsgi_list()
def to_wsgi_list(self):
"""Convert the headers into a list suitable for WSGI.
The values are byte strings in Python 2 converted to latin1 and unicode
strings in Python 3 for the WSGI server to encode.
:return: list
"""
if PY2:
return [(to_native(k), v.encode('latin1')) for k, v in self]
return list(self)
def copy(self):
return self.__class__(self._list)
def __copy__(self):
return self.copy()
def __str__(self):
"""Returns formatted headers suitable for HTTP transmission."""
strs = []
for key, value in self.to_wsgi_list():
strs.append('%s: %s' % (key, value))
strs.append('\r\n')
return '\r\n'.join(strs)
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
list(self)
)
class ImmutableHeadersMixin(object):
"""Makes a :class:`Headers` immutable. We do not mark them as
hashable though since the only usecase for this datastructure
in Werkzeug is a view on a mutable structure.
.. versionadded:: 0.5
:private:
"""
def __delitem__(self, key):
is_immutable(self)
def __setitem__(self, key, value):
is_immutable(self)
set = __setitem__
def add(self, item):
is_immutable(self)
remove = add_header = add
def extend(self, iterable):
is_immutable(self)
def insert(self, pos, value):
is_immutable(self)
def pop(self, index=-1):
is_immutable(self)
def popitem(self):
is_immutable(self)
def setdefault(self, key, default):
is_immutable(self)
class EnvironHeaders(ImmutableHeadersMixin, Headers):
"""Read only version of the headers from a WSGI environment. This
provides the same interface as `Headers` and is constructed from
a WSGI environment.
From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
render a page for a ``400 BAD REQUEST`` if caught in a catch-all for
HTTP exceptions.
"""
def __init__(self, environ):
self.environ = environ
def __eq__(self, other):
return self.environ is other.environ
def __getitem__(self, key, _get_mode=False):
# _get_mode is a no-op for this class as there is no index but
# used because get() calls it.
key = key.upper().replace('-', '_')
if key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
return _unicodify_header_value(self.environ[key])
return _unicodify_header_value(self.environ['HTTP_' + key])
def __len__(self):
# the iter is necessary because otherwise list calls our
# len which would call list again and so forth.
return len(list(iter(self)))
def __iter__(self):
for key, value in iteritems(self.environ):
if key.startswith('HTTP_') and key not in \
('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
yield (key[5:].replace('_', '-').title(),
_unicodify_header_value(value))
elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
yield (key.replace('_', '-').title(),
_unicodify_header_value(value))
def copy(self):
raise TypeError('cannot create %r copies' % self.__class__.__name__)
@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues'])
class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict):
"""A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict`
instances as sequence and it will combine the return values of all wrapped
dicts:
>>> from werkzeug.datastructures import CombinedMultiDict, MultiDict
>>> post = MultiDict([('foo', 'bar')])
>>> get = MultiDict([('blub', 'blah')])
>>> combined = CombinedMultiDict([get, post])
>>> combined['foo']
'bar'
>>> combined['blub']
'blah'
This works for all read operations and will raise a `TypeError` for
methods that usually change data which isn't possible.
From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
exceptions.
"""
def __reduce_ex__(self, protocol):
return type(self), (self.dicts,)
def __init__(self, dicts=None):
self.dicts = dicts or []
@classmethod
def fromkeys(cls):
raise TypeError('cannot create %r instances by fromkeys' %
cls.__name__)
def __getitem__(self, key):
for d in self.dicts:
if key in d:
return d[key]
raise exceptions.BadRequestKeyError(key)
def get(self, key, default=None, type=None):
for d in self.dicts:
if key in d:
if type is not None:
try:
return type(d[key])
except ValueError:
continue
return d[key]
return default
def getlist(self, key, type=None):
rv = []
for d in self.dicts:
rv.extend(d.getlist(key, type))
return rv
def _keys_impl(self):
"""This function exists so __len__ can be implemented more efficiently,
saving one list creation from an iterator.
Using this for Python 2's ``dict.keys`` behavior would be useless since
`dict.keys` in Python 2 returns a list, while we have a set here.
"""
rv = set()
for d in self.dicts:
rv.update(iterkeys(d))
return rv
def keys(self):
return iter(self._keys_impl())
__iter__ = keys
def items(self, multi=False):
found = set()
for d in self.dicts:
for key, value in iteritems(d, multi):
if multi:
yield key, value
elif key not in found:
found.add(key)
yield key, value
def values(self):
for key, value in iteritems(self):
yield value
def lists(self):
rv = {}
for d in self.dicts:
for key, values in iterlists(d):
rv.setdefault(key, []).extend(values)
return iteritems(rv)
def listvalues(self):
return (x[1] for x in self.lists())
def copy(self):
"""Return a shallow copy of this object."""
return self.__class__(self.dicts[:])
def to_dict(self, flat=True):
"""Return the contents as regular dict. If `flat` is `True` the
returned dict will only have the first item present, if `flat` is
`False` all values will be returned as lists.
:param flat: If set to `False` the dict returned will have lists
with all the values in it. Otherwise it will only
contain the first item for each key.
:return: a :class:`dict`
"""
rv = {}
for d in reversed(self.dicts):
rv.update(d.to_dict(flat))
return rv
def __len__(self):
return len(self._keys_impl())
def __contains__(self, key):
for d in self.dicts:
if key in d:
return True
return False
has_key = __contains__
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.dicts)
class FileMultiDict(MultiDict):
"""A special :class:`MultiDict` that has convenience methods to add
files to it. This is used for :class:`EnvironBuilder` and generally
useful for unittesting.
.. versionadded:: 0.5
"""
def add_file(self, name, file, filename=None, content_type=None):
"""Adds a new file to the dict. `file` can be a file name or
a :class:`file`-like or a :class:`FileStorage` object.
:param name: the name of the field.
:param file: a filename or :class:`file`-like object
:param filename: an optional filename
:param content_type: an optional content type
"""
if isinstance(file, FileStorage):
value = file
else:
if isinstance(file, string_types):
if filename is None:
filename = file
file = open(file, 'rb')
if filename and content_type is None:
content_type = mimetypes.guess_type(filename)[0] or \
'application/octet-stream'
value = FileStorage(file, filename, name, content_type)
self.add(name, value)
class ImmutableDict(ImmutableDictMixin, dict):
"""An immutable :class:`dict`.
.. versionadded:: 0.5
"""
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
dict.__repr__(self),
)
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return dict(self)
def __copy__(self):
return self
class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict):
"""An immutable :class:`MultiDict`.
.. versionadded:: 0.5
"""
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return MultiDict(self)
def __copy__(self):
return self
class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict):
"""An immutable :class:`OrderedMultiDict`.
.. versionadded:: 0.6
"""
def _iter_hashitems(self):
return enumerate(iteritems(self, multi=True))
def copy(self):
"""Return a shallow mutable copy of this object. Keep in mind that
the standard library's :func:`copy` function is a no-op for this class
like for any other python immutable type (eg: :class:`tuple`).
"""
return OrderedMultiDict(self)
def __copy__(self):
return self
@native_itermethods(['values'])
class Accept(ImmutableList):
"""An :class:`Accept` object is just a list subclass for lists of
``(value, quality)`` tuples. It is automatically sorted by quality.
All :class:`Accept` objects work similar to a list but provide extra
functionality for working with the data. Containment checks are
normalized to the rules of that header:
>>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)])
>>> a.best
'ISO-8859-1'
>>> 'iso-8859-1' in a
True
>>> 'UTF8' in a
True
>>> 'utf7' in a
False
To get the quality for an item you can use normal item lookup:
>>> print a['utf-8']
0.7
>>> a['utf7']
0
.. versionchanged:: 0.5
:class:`Accept` objects are forced immutable now.
"""
def __init__(self, values=()):
if values is None:
list.__init__(self)
self.provided = False
elif isinstance(values, Accept):
self.provided = values.provided
list.__init__(self, values)
else:
self.provided = True
values = [(a, b) for b, a in values]
values.sort()
values.reverse()
list.__init__(self, [(a, b) for b, a in values])
def _value_matches(self, value, item):
"""Check if a value matches a given accept item."""
return item == '*' or item.lower() == value.lower()
def __getitem__(self, key):
"""Besides index lookup (getting item n) you can also pass it a string
to get the quality for the item. If the item is not in the list, the
returned quality is ``0``.
"""
if isinstance(key, string_types):
return self.quality(key)
return list.__getitem__(self, key)
def quality(self, key):
"""Returns the quality of the key.
.. versionadded:: 0.6
In previous versions you had to use the item-lookup syntax
(eg: ``obj[key]`` instead of ``obj.quality(key)``)
"""
for item, quality in self:
if self._value_matches(key, item):
return quality
return 0
def __contains__(self, value):
for item, quality in self:
if self._value_matches(value, item):
return True
return False
def __repr__(self):
return '%s([%s])' % (
self.__class__.__name__,
', '.join('(%r, %s)' % (x, y) for x, y in self)
)
def index(self, key):
"""Get the position of an entry or raise :exc:`ValueError`.
:param key: The key to be looked up.
.. versionchanged:: 0.5
This used to raise :exc:`IndexError`, which was inconsistent
with the list API.
"""
if isinstance(key, string_types):
for idx, (item, quality) in enumerate(self):
if self._value_matches(key, item):
return idx
raise ValueError(key)
return list.index(self, key)
def find(self, key):
"""Get the position of an entry or return -1.
:param key: The key to be looked up.
"""
try:
return self.index(key)
except ValueError:
return -1
def values(self):
"""Iterate over all values."""
for item in self:
yield item[0]
def to_header(self):
"""Convert the header set into an HTTP header string."""
result = []
for value, quality in self:
if quality != 1:
value = '%s;q=%s' % (value, quality)
result.append(value)
return ','.join(result)
def __str__(self):
return self.to_header()
def best_match(self, matches, default=None):
"""Returns the best match from a list of possible matches based
on the quality of the client. If two items have the same quality,
the one is returned that comes first.
:param matches: a list of matches to check for
:param default: the value that is returned if none match
"""
best_quality = -1
result = default
for server_item in matches:
for client_item, quality in self:
if quality <= best_quality:
break
if (self._value_matches(server_item, client_item)
and quality > 0):
best_quality = quality
result = server_item
return result
@property
def best(self):
"""The best match as value."""
if self:
return self[0][0]
class MIMEAccept(Accept):
"""Like :class:`Accept` but with special methods and behavior for
mimetypes.
"""
def _value_matches(self, value, item):
def _normalize(x):
x = x.lower()
return x == '*' and ('*', '*') or x.split('/', 1)
# this is from the application which is trusted. to avoid developer
# frustration we actually check these for valid values
if '/' not in value:
raise ValueError('invalid mimetype %r' % value)
value_type, value_subtype = _normalize(value)
if value_type == '*' and value_subtype != '*':
raise ValueError('invalid mimetype %r' % value)
if '/' not in item:
return False
item_type, item_subtype = _normalize(item)
if item_type == '*' and item_subtype != '*':
return False
return (
(item_type == item_subtype == '*' or
value_type == value_subtype == '*') or
(item_type == value_type and (item_subtype == '*' or
value_subtype == '*' or
item_subtype == value_subtype))
)
@property
def accept_html(self):
"""True if this object accepts HTML."""
return (
'text/html' in self or
'application/xhtml+xml' in self or
self.accept_xhtml
)
@property
def accept_xhtml(self):
"""True if this object accepts XHTML."""
return (
'application/xhtml+xml' in self or
'application/xml' in self
)
@property
def accept_json(self):
"""True if this object accepts JSON."""
return 'application/json' in self
class LanguageAccept(Accept):
"""Like :class:`Accept` but with normalization for languages."""
def _value_matches(self, value, item):
def _normalize(language):
return _locale_delim_re.split(language.lower())
return item == '*' or _normalize(value) == _normalize(item)
class CharsetAccept(Accept):
"""Like :class:`Accept` but with normalization for charsets."""
def _value_matches(self, value, item):
def _normalize(name):
try:
return codecs.lookup(name).name
except LookupError:
return name.lower()
return item == '*' or _normalize(value) == _normalize(item)
def cache_property(key, empty, type):
"""Return a new property object for a cache header. Useful if you
want to add support for a cache extension in a subclass."""
return property(lambda x: x._get_cache_value(key, empty, type),
lambda x, v: x._set_cache_value(key, v, type),
lambda x: x._del_cache_value(key),
'accessor for %r' % key)
class _CacheControl(UpdateDictMixin, dict):
"""Subclass of a dict that stores values for a Cache-Control header. It
has accessors for all the cache-control directives specified in RFC 2616.
The class does not differentiate between request and response directives.
Because the cache-control directives in the HTTP header use dashes the
python descriptors use underscores for that.
To get a header of the :class:`CacheControl` object again you can convert
the object into a string or call the :meth:`to_header` method. If you plan
to subclass it and add your own items have a look at the sourcecode for
that class.
.. versionchanged:: 0.4
Setting `no_cache` or `private` to boolean `True` will set the implicit
none-value which is ``*``:
>>> cc = ResponseCacheControl()
>>> cc.no_cache = True
>>> cc
<ResponseCacheControl 'no-cache'>
>>> cc.no_cache
'*'
>>> cc.no_cache = None
>>> cc
<ResponseCacheControl ''>
In versions before 0.5 the behavior documented here affected the now
no longer existing `CacheControl` class.
"""
no_cache = cache_property('no-cache', '*', None)
no_store = cache_property('no-store', None, bool)
max_age = cache_property('max-age', -1, int)
no_transform = cache_property('no-transform', None, None)
def __init__(self, values=(), on_update=None):
dict.__init__(self, values or ())
self.on_update = on_update
self.provided = values is not None
def _get_cache_value(self, key, empty, type):
"""Used internally by the accessor properties."""
if type is bool:
return key in self
if key in self:
value = self[key]
if value is None:
return empty
elif type is not None:
try:
value = type(value)
except ValueError:
pass
return value
def _set_cache_value(self, key, value, type):
"""Used internally by the accessor properties."""
if type is bool:
if value:
self[key] = None
else:
self.pop(key, None)
else:
if value is None:
self.pop(key)
elif value is True:
self[key] = None
else:
self[key] = value
def _del_cache_value(self, key):
"""Used internally by the accessor properties."""
if key in self:
del self[key]
def to_header(self):
"""Convert the stored values into a cache control header."""
return dump_header(self)
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %s>' % (
self.__class__.__name__,
" ".join(
"%s=%r" % (k, v) for k, v in sorted(self.items())
),
)
class RequestCacheControl(ImmutableDictMixin, _CacheControl):
"""A cache control for requests. This is immutable and gives access
to all the request-relevant cache control headers.
To get a header of the :class:`RequestCacheControl` object again you can
convert the object into a string or call the :meth:`to_header` method. If
you plan to subclass it and add your own items have a look at the sourcecode
for that class.
.. versionadded:: 0.5
In previous versions a `CacheControl` class existed that was used
both for request and response.
"""
max_stale = cache_property('max-stale', '*', int)
min_fresh = cache_property('min-fresh', '*', int)
no_transform = cache_property('no-transform', None, None)
only_if_cached = cache_property('only-if-cached', None, bool)
class ResponseCacheControl(_CacheControl):
"""A cache control for responses. Unlike :class:`RequestCacheControl`
this is mutable and gives access to response-relevant cache control
headers.
To get a header of the :class:`ResponseCacheControl` object again you can
convert the object into a string or call the :meth:`to_header` method. If
you plan to subclass it and add your own items have a look at the sourcecode
for that class.
.. versionadded:: 0.5
In previous versions a `CacheControl` class existed that was used
both for request and response.
"""
public = cache_property('public', None, bool)
private = cache_property('private', '*', None)
must_revalidate = cache_property('must-revalidate', None, bool)
proxy_revalidate = cache_property('proxy-revalidate', None, bool)
s_maxage = cache_property('s-maxage', None, None)
# attach cache_property to the _CacheControl as staticmethod
# so that others can reuse it.
_CacheControl.cache_property = staticmethod(cache_property)
class CallbackDict(UpdateDictMixin, dict):
"""A dict that calls a function passed every time something is changed.
The function is passed the dict instance.
"""
def __init__(self, initial=None, on_update=None):
dict.__init__(self, initial or ())
self.on_update = on_update
def __repr__(self):
return '<%s %s>' % (
self.__class__.__name__,
dict.__repr__(self)
)
class HeaderSet(object):
"""Similar to the :class:`ETags` class this implements a set-like structure.
Unlike :class:`ETags` this is case insensitive and used for vary, allow, and
content-language headers.
If not constructed using the :func:`parse_set_header` function the
instantiation works like this:
>>> hs = HeaderSet(['foo', 'bar', 'baz'])
>>> hs
HeaderSet(['foo', 'bar', 'baz'])
"""
def __init__(self, headers=None, on_update=None):
self._headers = list(headers or ())
self._set = set([x.lower() for x in self._headers])
self.on_update = on_update
def add(self, header):
"""Add a new header to the set."""
self.update((header,))
def remove(self, header):
"""Remove a header from the set. This raises an :exc:`KeyError` if the
header is not in the set.
.. versionchanged:: 0.5
In older versions a :exc:`IndexError` was raised instead of a
:exc:`KeyError` if the object was missing.
:param header: the header to be removed.
"""
key = header.lower()
if key not in self._set:
raise KeyError(header)
self._set.remove(key)
for idx, key in enumerate(self._headers):
if key.lower() == header:
del self._headers[idx]
break
if self.on_update is not None:
self.on_update(self)
def update(self, iterable):
"""Add all the headers from the iterable to the set.
:param iterable: updates the set with the items from the iterable.
"""
inserted_any = False
for header in iterable:
key = header.lower()
if key not in self._set:
self._headers.append(header)
self._set.add(key)
inserted_any = True
if inserted_any and self.on_update is not None:
self.on_update(self)
def discard(self, header):
"""Like :meth:`remove` but ignores errors.
:param header: the header to be discarded.
"""
try:
return self.remove(header)
except KeyError:
pass
def find(self, header):
"""Return the index of the header in the set or return -1 if not found.
:param header: the header to be looked up.
"""
header = header.lower()
for idx, item in enumerate(self._headers):
if item.lower() == header:
return idx
return -1
def index(self, header):
"""Return the index of the header in the set or raise an
:exc:`IndexError`.
:param header: the header to be looked up.
"""
rv = self.find(header)
if rv < 0:
raise IndexError(header)
return rv
def clear(self):
"""Clear the set."""
self._set.clear()
del self._headers[:]
if self.on_update is not None:
self.on_update(self)
def as_set(self, preserve_casing=False):
"""Return the set as real python set type. When calling this, all
the items are converted to lowercase and the ordering is lost.
:param preserve_casing: if set to `True` the items in the set returned
will have the original case like in the
:class:`HeaderSet`, otherwise they will
be lowercase.
"""
if preserve_casing:
return set(self._headers)
return set(self._set)
def to_header(self):
"""Convert the header set into an HTTP header string."""
return ', '.join(map(quote_header_value, self._headers))
def __getitem__(self, idx):
return self._headers[idx]
def __delitem__(self, idx):
rv = self._headers.pop(idx)
self._set.remove(rv.lower())
if self.on_update is not None:
self.on_update(self)
def __setitem__(self, idx, value):
old = self._headers[idx]
self._set.remove(old.lower())
self._headers[idx] = value
self._set.add(value.lower())
if self.on_update is not None:
self.on_update(self)
def __contains__(self, header):
return header.lower() in self._set
def __len__(self):
return len(self._set)
def __iter__(self):
return iter(self._headers)
def __nonzero__(self):
return bool(self._set)
def __str__(self):
return self.to_header()
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
self._headers
)
class ETags(object):
"""A set that can be used to check if one etag is present in a collection
of etags.
"""
def __init__(self, strong_etags=None, weak_etags=None, star_tag=False):
self._strong = frozenset(not star_tag and strong_etags or ())
self._weak = frozenset(weak_etags or ())
self.star_tag = star_tag
def as_set(self, include_weak=False):
"""Convert the `ETags` object into a python set. Per default all the
weak etags are not part of this set."""
rv = set(self._strong)
if include_weak:
rv.update(self._weak)
return rv
def is_weak(self, etag):
"""Check if an etag is weak."""
return etag in self._weak
def contains_weak(self, etag):
"""Check if an etag is part of the set including weak and strong tags."""
return self.is_weak(etag) or self.contains(etag)
def contains(self, etag):
"""Check if an etag is part of the set ignoring weak tags.
It is also possible to use the ``in`` operator.
"""
if self.star_tag:
return True
return etag in self._strong
def contains_raw(self, etag):
"""When passed a quoted tag it will check if this tag is part of the
set. If the tag is weak it is checked against weak and strong tags,
otherwise strong only."""
etag, weak = unquote_etag(etag)
if weak:
return self.contains_weak(etag)
return self.contains(etag)
def to_header(self):
"""Convert the etags set into a HTTP header string."""
if self.star_tag:
return '*'
return ', '.join(
['"%s"' % x for x in self._strong] +
['w/"%s"' % x for x in self._weak]
)
def __call__(self, etag=None, data=None, include_weak=False):
if [etag, data].count(None) != 1:
raise TypeError('either tag or data required, but at least one')
if etag is None:
etag = generate_etag(data)
if include_weak:
if etag in self._weak:
return True
return etag in self._strong
def __nonzero__(self):
return bool(self.star_tag or self._strong or self._weak)
def __str__(self):
return self.to_header()
def __iter__(self):
return iter(self._strong)
def __contains__(self, etag):
return self.contains(etag)
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class IfRange(object):
"""Very simple object that represents the `If-Range` header in parsed
form. It will either have neither a etag or date or one of either but
never both.
.. versionadded:: 0.7
"""
def __init__(self, etag=None, date=None):
#: The etag parsed and unquoted. Ranges always operate on strong
#: etags so the weakness information is not necessary.
self.etag = etag
#: The date in parsed format or `None`.
self.date = date
def to_header(self):
"""Converts the object back into an HTTP header."""
if self.date is not None:
return http_date(self.date)
if self.etag is not None:
return quote_etag(self.etag)
return ''
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class Range(object):
"""Represents a range header. All the methods are only supporting bytes
as unit. It does store multiple ranges but :meth:`range_for_length` will
only work if only one range is provided.
.. versionadded:: 0.7
"""
def __init__(self, units, ranges):
#: The units of this range. Usually "bytes".
self.units = units
#: A list of ``(begin, end)`` tuples for the range header provided.
#: The ranges are non-inclusive.
self.ranges = ranges
def range_for_length(self, length):
"""If the range is for bytes, the length is not None and there is
exactly one range and it is satisfiable it returns a ``(start, stop)``
tuple, otherwise `None`.
"""
if self.units != 'bytes' or length is None or len(self.ranges) != 1:
return None
start, end = self.ranges[0]
if end is None:
end = length
if start < 0:
start += length
if is_byte_range_valid(start, end, length):
return start, min(end, length)
def make_content_range(self, length):
"""Creates a :class:`~werkzeug.datastructures.ContentRange` object
from the current range and given content length.
"""
rng = self.range_for_length(length)
if rng is not None:
return ContentRange(self.units, rng[0], rng[1], length)
def to_header(self):
"""Converts the object back into an HTTP header."""
ranges = []
for begin, end in self.ranges:
if end is None:
ranges.append(begin >= 0 and '%s-' % begin or str(begin))
else:
ranges.append('%s-%s' % (begin, end - 1))
return '%s=%s' % (self.units, ','.join(ranges))
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class ContentRange(object):
"""Represents the content range header.
.. versionadded:: 0.7
"""
def __init__(self, units, start, stop, length=None, on_update=None):
assert is_byte_range_valid(start, stop, length), \
'Bad range provided'
self.on_update = on_update
self.set(start, stop, length, units)
def _callback_property(name):
def fget(self):
return getattr(self, name)
def fset(self, value):
setattr(self, name, value)
if self.on_update is not None:
self.on_update(self)
return property(fget, fset)
#: The units to use, usually "bytes"
units = _callback_property('_units')
#: The start point of the range or `None`.
start = _callback_property('_start')
#: The stop point of the range (non-inclusive) or `None`. Can only be
#: `None` if also start is `None`.
stop = _callback_property('_stop')
#: The length of the range or `None`.
length = _callback_property('_length')
def set(self, start, stop, length=None, units='bytes'):
"""Simple method to update the ranges."""
assert is_byte_range_valid(start, stop, length), \
'Bad range provided'
self._units = units
self._start = start
self._stop = stop
self._length = length
if self.on_update is not None:
self.on_update(self)
def unset(self):
"""Sets the units to `None` which indicates that the header should
no longer be used.
"""
self.set(None, None, units=None)
def to_header(self):
if self.units is None:
return ''
if self.length is None:
length = '*'
else:
length = self.length
if self.start is None:
return '%s */%s' % (self.units, length)
return '%s %s-%s/%s' % (
self.units,
self.start,
self.stop - 1,
length
)
def __nonzero__(self):
return self.units is not None
__bool__ = __nonzero__
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, str(self))
class Authorization(ImmutableDictMixin, dict):
"""Represents an `Authorization` header sent by the client. You should
not create this kind of object yourself but use it when it's returned by
the `parse_authorization_header` function.
This object is a dict subclass and can be altered by setting dict items
but it should be considered immutable as it's returned by the client and
not meant for modifications.
.. versionchanged:: 0.5
This object became immutable.
"""
def __init__(self, auth_type, data=None):
dict.__init__(self, data or {})
self.type = auth_type
username = property(lambda x: x.get('username'), doc='''
The username transmitted. This is set for both basic and digest
auth all the time.''')
password = property(lambda x: x.get('password'), doc='''
When the authentication type is basic this is the password
transmitted by the client, else `None`.''')
realm = property(lambda x: x.get('realm'), doc='''
This is the server realm sent back for HTTP digest auth.''')
nonce = property(lambda x: x.get('nonce'), doc='''
The nonce the server sent for digest auth, sent back by the client.
A nonce should be unique for every 401 response for HTTP digest
auth.''')
uri = property(lambda x: x.get('uri'), doc='''
The URI from Request-URI of the Request-Line; duplicated because
proxies are allowed to change the Request-Line in transit. HTTP
digest auth only.''')
nc = property(lambda x: x.get('nc'), doc='''
The nonce count value transmitted by clients if a qop-header is
also transmitted. HTTP digest auth only.''')
cnonce = property(lambda x: x.get('cnonce'), doc='''
If the server sent a qop-header in the ``WWW-Authenticate``
header, the client has to provide this value for HTTP digest auth.
See the RFC for more details.''')
response = property(lambda x: x.get('response'), doc='''
A string of 32 hex digits computed as defined in RFC 2617, which
proves that the user knows a password. Digest auth only.''')
opaque = property(lambda x: x.get('opaque'), doc='''
The opaque header from the server returned unchanged by the client.
It is recommended that this string be base64 or hexadecimal data.
Digest auth only.''')
@property
def qop(self):
"""Indicates what "quality of protection" the client has applied to
the message for HTTP digest auth."""
def on_update(header_set):
if not header_set and 'qop' in self:
del self['qop']
elif header_set:
self['qop'] = header_set.to_header()
return parse_set_header(self.get('qop'), on_update)
class WWWAuthenticate(UpdateDictMixin, dict):
"""Provides simple access to `WWW-Authenticate` headers."""
#: list of keys that require quoting in the generated header
_require_quoting = frozenset(['domain', 'nonce', 'opaque', 'realm', 'qop'])
def __init__(self, auth_type=None, values=None, on_update=None):
dict.__init__(self, values or ())
if auth_type:
self['__auth_type__'] = auth_type
self.on_update = on_update
def set_basic(self, realm='authentication required'):
"""Clear the auth info and enable basic auth."""
dict.clear(self)
dict.update(self, {'__auth_type__': 'basic', 'realm': realm})
if self.on_update:
self.on_update(self)
def set_digest(self, realm, nonce, qop=('auth',), opaque=None,
algorithm=None, stale=False):
"""Clear the auth info and enable digest auth."""
d = {
'__auth_type__': 'digest',
'realm': realm,
'nonce': nonce,
'qop': dump_header(qop)
}
if stale:
d['stale'] = 'TRUE'
if opaque is not None:
d['opaque'] = opaque
if algorithm is not None:
d['algorithm'] = algorithm
dict.clear(self)
dict.update(self, d)
if self.on_update:
self.on_update(self)
def to_header(self):
"""Convert the stored values into a WWW-Authenticate header."""
d = dict(self)
auth_type = d.pop('__auth_type__', None) or 'basic'
return '%s %s' % (auth_type.title(), ', '.join([
'%s=%s' % (key, quote_header_value(value,
allow_token=key not in self._require_quoting))
for key, value in iteritems(d)
]))
def __str__(self):
return self.to_header()
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.to_header()
)
def auth_property(name, doc=None):
"""A static helper function for subclasses to add extra authentication
system properties onto a class::
class FooAuthenticate(WWWAuthenticate):
special_realm = auth_property('special_realm')
For more information have a look at the sourcecode to see how the
regular properties (:attr:`realm` etc.) are implemented.
"""
def _set_value(self, value):
if value is None:
self.pop(name, None)
else:
self[name] = str(value)
return property(lambda x: x.get(name), _set_value, doc=doc)
def _set_property(name, doc=None):
def fget(self):
def on_update(header_set):
if not header_set and name in self:
del self[name]
elif header_set:
self[name] = header_set.to_header()
return parse_set_header(self.get(name), on_update)
return property(fget, doc=doc)
type = auth_property('__auth_type__', doc='''
The type of the auth mechanism. HTTP currently specifies
`Basic` and `Digest`.''')
realm = auth_property('realm', doc='''
A string to be displayed to users so they know which username and
password to use. This string should contain at least the name of
the host performing the authentication and might additionally
indicate the collection of users who might have access.''')
domain = _set_property('domain', doc='''
A list of URIs that define the protection space. If a URI is an
absolute path, it is relative to the canonical root URL of the
server being accessed.''')
nonce = auth_property('nonce', doc='''
A server-specified data string which should be uniquely generated
each time a 401 response is made. It is recommended that this
string be base64 or hexadecimal data.''')
opaque = auth_property('opaque', doc='''
A string of data, specified by the server, which should be returned
by the client unchanged in the Authorization header of subsequent
requests with URIs in the same protection space. It is recommended
that this string be base64 or hexadecimal data.''')
algorithm = auth_property('algorithm', doc='''
A string indicating a pair of algorithms used to produce the digest
and a checksum. If this is not present it is assumed to be "MD5".
If the algorithm is not understood, the challenge should be ignored
(and a different one used, if there is more than one).''')
qop = _set_property('qop', doc='''
A set of quality-of-privacy directives such as auth and auth-int.''')
def _get_stale(self):
val = self.get('stale')
if val is not None:
return val.lower() == 'true'
def _set_stale(self, value):
if value is None:
self.pop('stale', None)
else:
self['stale'] = value and 'TRUE' or 'FALSE'
stale = property(_get_stale, _set_stale, doc='''
A flag, indicating that the previous request from the client was
rejected because the nonce value was stale.''')
del _get_stale, _set_stale
# make auth_property a staticmethod so that subclasses of
# `WWWAuthenticate` can use it for new properties.
auth_property = staticmethod(auth_property)
del _set_property
class FileStorage(object):
"""The :class:`FileStorage` class is a thin wrapper over incoming files.
It is used by the request object to represent uploaded files. All the
attributes of the wrapper stream are proxied by the file storage so
it's possible to do ``storage.read()`` instead of the long form
``storage.stream.read()``.
"""
def __init__(self, stream=None, filename=None, name=None,
content_type=None, content_length=None,
headers=None):
self.name = name
self.stream = stream or _empty_stream
# if no filename is provided we can attempt to get the filename
# from the stream object passed. There we have to be careful to
# skip things like <fdopen>, <stderr> etc. Python marks these
# special filenames with angular brackets.
if filename is None:
filename = getattr(stream, 'name', None)
s = make_literal_wrapper(filename)
if filename and filename[0] == s('<') and filename[-1] == s('>'):
filename = None
# On Python 3 we want to make sure the filename is always unicode.
# This might not be if the name attribute is bytes due to the
# file being opened from the bytes API.
if not PY2 and isinstance(filename, bytes):
filename = filename.decode(sys.getfilesystemencoding(),
'replace')
self.filename = filename
if headers is None:
headers = Headers()
self.headers = headers
if content_type is not None:
headers['Content-Type'] = content_type
if content_length is not None:
headers['Content-Length'] = str(content_length)
def _parse_content_type(self):
if not hasattr(self, '_parsed_content_type'):
self._parsed_content_type = \
parse_options_header(self.content_type)
@property
def content_type(self):
"""The content-type sent in the header. Usually not available"""
return self.headers.get('content-type')
@property
def content_length(self):
"""The content-length sent in the header. Usually not available"""
return int(self.headers.get('content-length') or 0)
@property
def mimetype(self):
"""Like :attr:`content_type` but without parameters (eg, without
charset, type etc.). For example if the content
type is ``text/html; charset=utf-8`` the mimetype would be
``'text/html'``.
.. versionadded:: 0.7
"""
self._parse_content_type()
return self._parsed_content_type[0]
@property
def mimetype_params(self):
"""The mimetype parameters as dict. For example if the content
type is ``text/html; charset=utf-8`` the params would be
``{'charset': 'utf-8'}``.
.. versionadded:: 0.7
"""
self._parse_content_type()
return self._parsed_content_type[1]
def save(self, dst, buffer_size=16384):
"""Save the file to a destination path or file object. If the
destination is a file object you have to close it yourself after the
call. The buffer size is the number of bytes held in memory during
the copy process. It defaults to 16KB.
For secure file saving also have a look at :func:`secure_filename`.
:param dst: a filename or open file object the uploaded file
is saved to.
:param buffer_size: the size of the buffer. This works the same as
the `length` parameter of
:func:`shutil.copyfileobj`.
"""
from shutil import copyfileobj
close_dst = False
if isinstance(dst, string_types):
dst = open(dst, 'wb')
close_dst = True
try:
copyfileobj(self.stream, dst, buffer_size)
finally:
if close_dst:
dst.close()
def close(self):
"""Close the underlying file if possible."""
try:
self.stream.close()
except Exception:
pass
def __nonzero__(self):
return bool(self.filename)
__bool__ = __nonzero__
def __getattr__(self, name):
return getattr(self.stream, name)
def __iter__(self):
return iter(self.readline, '')
def __repr__(self):
return '<%s: %r (%r)>' % (
self.__class__.__name__,
self.filename,
self.content_type
)
# circular dependencies
from werkzeug.http import dump_options_header, dump_header, generate_etag, \
quote_header_value, parse_set_header, unquote_etag, quote_etag, \
parse_options_header, http_date, is_byte_range_valid
from werkzeug import exceptions
|
darmaa/odoo
|
refs/heads/master
|
openerp/tools/func.py
|
49
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010, 2014 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
__all__ = ['synchronized', 'lazy_property']
from functools import wraps
from inspect import getsourcefile
class lazy_property(object):
""" Decorator for a lazy property of an object, i.e., an object attribute
that is determined by the result of a method call evaluated once. To
reevaluate the property, simply delete the attribute on the object, and
get it again.
"""
def __init__(self, fget):
self.fget = fget
def __get__(self, obj, cls):
if obj is None:
return self
value = self.fget(obj)
setattr(obj, self.fget.__name__, value)
return value
@staticmethod
def reset_all(obj):
""" Reset all lazy properties on the instance `obj`. """
cls = type(obj)
obj_dict = vars(obj)
for name in obj_dict.keys():
if isinstance(getattr(cls, name, None), lazy_property):
obj_dict.pop(name)
def synchronized(lock_attr='_lock'):
def decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
lock = getattr(self, lock_attr)
try:
lock.acquire()
return func(self, *args, **kwargs)
finally:
lock.release()
return wrapper
return decorator
def frame_codeinfo(fframe, back=0):
""" Return a (filename, line) pair for a previous frame .
@return (filename, lineno) where lineno is either int or string==''
"""
try:
if not fframe:
return "<unknown>", ''
for i in range(back):
fframe = fframe.f_back
try:
fname = getsourcefile(fframe)
except TypeError:
fname = '<builtin>'
lineno = fframe.f_lineno or ''
return fname, lineno
except Exception:
return "<unknown>", ''
def compose(a, b):
""" Composes the callables ``a`` and ``b``. ``compose(a, b)(*args)`` is
equivalent to ``a(b(*args))``.
Can be used as a decorator by partially applying ``a``::
@partial(compose, a)
def b():
...
"""
@wraps(b)
def wrapper(*args, **kwargs):
return a(b(*args, **kwargs))
return wrapper
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
sloshedpuppie/LetsGoRetro
|
refs/heads/master
|
lib/gtest/test/gtest_break_on_failure_unittest.py
|
2140
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's break-on-failure mode.
A user can ask Google Test to seg-fault when an assertion fails, using
either the GTEST_BREAK_ON_FAILURE environment variable or the
--gtest_break_on_failure flag. This script tests such functionality
by invoking gtest_break_on_failure_unittest_ (a program written with
Google Test) with different environments and command line flags.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import gtest_test_utils
import os
import sys
# Constants.
IS_WINDOWS = os.name == 'nt'
# The environment variable for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE'
# The command line flag for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure'
# The environment variable for enabling/disabling the throw-on-failure mode.
THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE'
# The environment variable for enabling/disabling the catch-exceptions mode.
CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'
# Path to the gtest_break_on_failure_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_break_on_failure_unittest_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
def Run(command):
"""Runs a command; returns 1 if it was killed by a signal, or 0 otherwise."""
p = gtest_test_utils.Subprocess(command, env=environ)
if p.terminated_by_signal:
return 1
else:
return 0
# The tests.
class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable or
the --gtest_break_on_failure flag to turn assertion failures into
segmentation faults.
"""
def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):
"""Runs gtest_break_on_failure_unittest_ and verifies that it does
(or does not) have a seg-fault.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
variable; None if the variable should be unset.
flag_value: value of the --gtest_break_on_failure flag;
None if the flag should not be present.
expect_seg_fault: 1 if the program is expected to generate a seg-fault;
0 otherwise.
"""
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)
if env_var_value is None:
env_var_value_msg = ' is not set'
else:
env_var_value_msg = '=' + env_var_value
if flag_value is None:
flag = ''
elif flag_value == '0':
flag = '--%s=0' % BREAK_ON_FAILURE_FLAG
else:
flag = '--%s' % BREAK_ON_FAILURE_FLAG
command = [EXE_PATH]
if flag:
command.append(flag)
if expect_seg_fault:
should_or_not = 'should'
else:
should_or_not = 'should not'
has_seg_fault = Run(command)
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)
msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' %
(BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),
should_or_not))
self.assert_(has_seg_fault == expect_seg_fault, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(env_var_value=None,
flag_value=None,
expect_seg_fault=0)
def testEnvVar(self):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value=None,
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value=None,
expect_seg_fault=1)
def testFlag(self):
"""Tests using the --gtest_break_on_failure flag."""
self.RunAndVerify(env_var_value=None,
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
def testFlagOverridesEnvVar(self):
"""Tests that the flag overrides the environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='0',
flag_value='1',
expect_seg_fault=1)
self.RunAndVerify(env_var_value='1',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
def testBreakOnFailureOverridesThrowOnFailure(self):
"""Tests that gtest_break_on_failure overrides gtest_throw_on_failure."""
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)
if IS_WINDOWS:
def testCatchExceptionsDoesNotInterfere(self):
"""Tests that gtest_catch_exceptions doesn't interfere."""
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)
if __name__ == '__main__':
gtest_test_utils.Main()
|
ryandougherty/mwa-capstone
|
refs/heads/heroku
|
MWA_Tools/build/matplotlib/doc/mpl_examples/pylab_examples/contour_demo.py
|
3
|
#!/usr/bin/env python
"""
Illustrate simple contour plotting, contours on an image with
a colorbar for the contours, and labelled contours.
See also contour_image.py.
"""
import matplotlib
import numpy as np
import matplotlib.cm as cm
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
matplotlib.rcParams['xtick.direction'] = 'out'
matplotlib.rcParams['ytick.direction'] = 'out'
delta = 0.025
x = np.arange(-3.0, 3.0, delta)
y = np.arange(-2.0, 2.0, delta)
X, Y = np.meshgrid(x, y)
Z1 = mlab.bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0)
Z2 = mlab.bivariate_normal(X, Y, 1.5, 0.5, 1, 1)
# difference of Gaussians
Z = 10.0 * (Z2 - Z1)
# Create a simple contour plot with labels using default colors. The
# inline argument to clabel will control whether the labels are draw
# over the line segments of the contour, removing the lines beneath
# the label
plt.figure()
CS = plt.contour(X, Y, Z)
plt.clabel(CS, inline=1, fontsize=10)
plt.title('Simplest default with labels')
# You can force all the contours to be the same color.
plt.figure()
CS = plt.contour(X, Y, Z, 6,
colors='k', # negative contours will be dashed by default
)
plt.clabel(CS, fontsize=9, inline=1)
plt.title('Single color - negative contours dashed')
# You can set negative contours to be solid instead of dashed:
matplotlib.rcParams['contour.negative_linestyle'] = 'solid'
plt.figure()
CS = plt.contour(X, Y, Z, 6,
colors='k', # negative contours will be dashed by default
)
plt.clabel(CS, fontsize=9, inline=1)
plt.title('Single color - negative contours solid')
# And you can manually specify the colors of the contour
plt.figure()
CS = plt.contour(X, Y, Z, 6,
linewidths=np.arange(.5, 4, .5),
colors=('r', 'green', 'blue', (1,1,0), '#afeeee', '0.5')
)
plt.clabel(CS, fontsize=9, inline=1)
plt.title('Crazy lines')
# Or you can use a colormap to specify the colors; the default
# colormap will be used for the contour lines
plt.figure()
im = plt.imshow(Z, interpolation='bilinear', origin='lower',
cmap=cm.gray, extent=(-3,3,-2,2))
levels = np.arange(-1.2, 1.6, 0.2)
CS = plt.contour(Z, levels,
origin='lower',
linewidths=2,
extent=(-3,3,-2,2))
#Thicken the zero contour.
zc = CS.collections[6]
plt.setp(zc, linewidth=4)
plt.clabel(CS, levels[1::2], # label every second level
inline=1,
fmt='%1.1f',
fontsize=14)
# make a colorbar for the contour lines
CB = plt.colorbar(CS, shrink=0.8, extend='both')
plt.title('Lines with colorbar')
#plt.hot() # Now change the colormap for the contour lines and colorbar
plt.flag()
# We can still add a colorbar for the image, too.
CBI = plt.colorbar(im, orientation='horizontal', shrink=0.8)
# This makes the original colorbar look a bit out of place,
# so let's improve its position.
l,b,w,h = plt.gca().get_position().bounds
ll,bb,ww,hh = CB.ax.get_position().bounds
CB.ax.set_position([ll, b+0.1*h, ww, h*0.8])
plt.show()
|
QianBIG/odoo
|
refs/heads/8.0
|
addons/stock_account/res_config.py
|
315
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class stock_config_settings(osv.osv_memory):
_inherit = 'stock.config.settings'
_columns = {
'group_stock_inventory_valuation': fields.boolean("Generate accounting entries per stock movement",
implied_group='stock_account.group_inventory_valuation',
help="""Allows to configure inventory valuations on products and product categories."""),
'module_stock_invoice_directly': fields.boolean("Create and open the invoice when the user finish a delivery order",
help='This allows to automatically launch the invoicing wizard if the delivery is '
'to be invoiced when you send or deliver goods.\n'
'-This installs the module stock_invoice_directly.'),
'module_stock_landed_costs': fields.boolean("Calculate landed costs on products",
help="""Install the module that allows to affect landed costs on pickings, and split them onto the different products."""),
}
def onchange_landed_costs(self, cr, uid, ids, module_landed_costs, context=None):
if module_landed_costs:
return {'value': {'group_stock_inventory_valuation': True}}
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
honeynet/beeswarm
|
refs/heads/master
|
beeswarm/drones/honeypot/capabilities/vnc.py
|
1
|
# Copyright (C) 2013 Aniket Panse <contact@aniketpanse.in>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Aniket Panse <contact@aniketpanse.in> grants Johnny Vestergaard <jkv@unixcluster.dk>
# a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable
# copyright license to reproduce, prepare derivative works of, publicly
# display, publicly perform, sublicense, relicense, and distribute [the] Contributions
# and such derivative works.
import socket
import random
import logging
import SocketServer
from beeswarm.drones.honeypot.capabilities.handlerbase import HandlerBase
# Import the constants defined for the VNC protocol
from beeswarm.shared.vnc_constants import *
logger = logging.getLogger(__name__)
class BaitVncHandler(SocketServer.StreamRequestHandler):
"""
Handler of VNC Connections. This is a rather primitive state machine.
"""
def __init__(self, request, client_address, server, session):
self.session = session
SocketServer.StreamRequestHandler.__init__(self, request, client_address, server)
def handle(self):
self.request.send(RFB_VERSION)
client_version = self.request.recv(1024)
if client_version == RFB_VERSION:
self.security_handshake()
else:
self.finish()
def security_handshake(self):
self.request.send(SUPPORTED_AUTH_METHODS)
sec_method = self.request.recv(1024)
if sec_method == VNC_AUTH:
self.do_vnc_authentication()
else:
self.finish()
def do_vnc_authentication(self):
challenge = get_random_challenge()
self.request.send(challenge)
client_response_ = self.request.recv(1024)
# This could result in an ugly log file, since the des_challenge is just an array of 4 bytes
self.session.try_auth('des_challenge', challenge=challenge, response=client_response_)
if self.session.authenticated:
self.request.send(AUTH_SUCCESSFUL)
else:
self.request.send(AUTH_FAILED)
self.finish()
class Vnc(HandlerBase):
def __init__(self, options, work_dir):
super(Vnc, self).__init__(options, work_dir)
self._options = options
def handle_session(self, gsocket, address):
session = self.create_session(address)
try:
BaitVncHandler(gsocket, address, None, session)
except socket.error as err:
logger.debug('Unexpected end of VNC session: {0}, errno: {1}. ({2})'.format(err, err.errno, session.id))
finally:
self.close_session(session)
def get_random_challenge():
challenge = []
for i in range(0, 16):
temp = random.randint(0, 255)
challenge.append(chr(temp))
return "".join(challenge)
|
cxong/Slappa
|
refs/heads/master
|
point.py
|
1
|
import math
class Point(object):
def __init__(self, x=0, y=0):
self.x = float(x)
self.y = float(y)
def add(self, other):
self.x += other.x
self.y += other.y
return self
def subtract(self, other):
self.x -= other.x
self.y -= other.y
return self
def multiply(self, other):
self.x *= other.x
self.y *= other.y
return self
def divide(self, other):
self.x /= other.x
self.y /= other.y
return self
def set_magnitude(self, magnitude):
return self.normalize().multiply(Point(magnitude, magnitude))
def normalize(self):
if not self.is_zero():
m = self.get_magnitude()
self.x /= m
self.y /= m
return self
def is_zero(self):
return self.x == 0 and self.y == 0
def get_magnitude(self):
return math.sqrt((self.x * self.x) + (self.y * self.y))
def dot_product(self, other):
return self.x * other.x + self.y * other.y
def distance2(self, other):
dx = self.x - other.x
dy = self.y - other.y
return dx**2 + dy**2
def __eq__(self, other):
return self.x == other.x and self.y == other.y
|
abelboldu/nagpy-pushover
|
refs/heads/master
|
nagpy/emailNotify.py
|
1
|
#
# Copyright (c) 2006 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/cpl.php.
#
# This program is distributed in the hope that it will be useful, but
# without any waranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
from nagpy.util import mail
from nagpy.util.exceptionHooks import email_hook
from nagpy.errors import MailError
from nagpy.notify import NagiosNotification
class EmailNotification(NagiosNotification):
template = ''
subject_template = ''
def setupParser(self):
p = NagiosNotification.setupParser(self)
p.add_option('-d', '--dest-email', dest='emailAddr',
help='email address to send to')
p.add_option('-f', '--from-email', dest='fromEmail',
help='email address to send from')
p.add_option('-n', '--from-name', dest='fromName',
help='name to send email from')
p.add_option('-e', '--error-email', dest='errorEmail',
default='root@localhost',
help='email address to send errors to')
return p
def parseArgs(self):
opts, args = NagiosNotification.parseArgs(self)
if not opts.fromEmail:
print "Source email address not defined."
self.usage()
if not opts.fromName:
print "Name to send email from not defined."
self.usage()
if opts.emailAddr:
self.emailAddr = opts.emailAddr
elif self.vars['contactemail'] != '':
self.emailAddr = self.vars['contactemail']
else:
print "Contact email not defined."
self.usage()
self.fromEmail = opts.fromEmail
self.fromName = opts.fromName
self.errorEmail = opts.errorEmail
return opts, args
def send(self, subject, body):
try:
mail.sendMailWithChecks(self.fromEmail, self.fromName,
self.emailAddr, subject, body)
except MailError, e:
body = 'To Address: %s\nError: %s\n\n%s' % (self.emailAddr,
e.error, body)
mail.sendMailWithChecks(self.fromEmail, self.fromName,
self.errorEmail, subject, body)
@email_hook
def notify(self):
NagiosNotification.notify(self)
subject = self.subject_template % self.vars
body = self.template % self.vars
self.send(subject, body)
class HostNotifyShort(EmailNotification):
template = """
Info: %(hostoutput)s
Time: %(date)s %(time)s
Type: %(notificationtype)s
"""
subject_template = """%(hoststate)s: %(hostname)s"""
class HostNotifyLong(HostNotifyShort):
template = "** Nagios Host Notification **" + HostNotifyShort.template + """
Host: %(hostname)s
Address: %(hostaddress)s
State: %(hoststate)s
Acknowledgment Author: %(hostackauthor)s
ACknowledgment: %(hostackcomment)s
Check Command: %(hostcheckcommand)s
Latency: %(hostlatency)s
Group: %(hostgroupname)s
Downtime: %(hostdowntime)s
Duration: %(hostduration)s
Perf Data: %(hostperfdata)s
"""
class ServiceNotifyShort(EmailNotification):
template = """
Info: %(serviceoutput)s
Time: %(date)s %(time)s
Type: %(notificationtype)s
"""
subject_template = """%(servicestate)s: %(hostname)s"""
class ServiceNotifyLong(ServiceNotifyShort):
template = "** Nagios Service Notification **" + ServiceNotifyShort.template + """
Host: %(hostname)s
Address: %(hostaddress)s
State: %(servicestate)s
Acknowledgment Author: %(serviceackauthor)s
Acknowledgment: %(serviceackcomment)s
Check Command: %(servicecheckcommand)s
Latency: %(servicelatency)s
Group: %(hostgroupname)s
Downtime: %(servicedowntime)s
Duration: %(serviceduration)s
Perf Data: %(serviceperfdata)s
"""
|
ex1usive-m4d/TemplateDocx
|
refs/heads/master
|
controllers/phpdocx/lib/openoffice/openoffice.org/basis3.4/program/python-core-2.6.1/lib/compiler/transformer.py
|
7
|
"""Parse tree transformation module.
Transforms Python source code into an abstract syntax tree (AST)
defined in the ast module.
The simplest ways to invoke this module are via parse and parseFile.
parse(buf) -> AST
parseFile(path) -> AST
"""
# Original version written by Greg Stein (gstein@lyra.org)
# and Bill Tutt (rassilon@lima.mudlib.org)
# February 1997.
#
# Modifications and improvements for Python 2.0 by Jeremy Hylton and
# Mark Hammond
#
# Some fixes to try to have correct line number on almost all nodes
# (except Module, Discard and Stmt) added by Sylvain Thenault
#
# Portions of this file are:
# Copyright (C) 1997-1998 Greg Stein. All Rights Reserved.
#
# This module is provided under a BSD-ish license. See
# http://www.opensource.org/licenses/bsd-license.html
# and replace OWNER, ORGANIZATION, and YEAR as appropriate.
from compiler.ast import *
import parser
import symbol
import token
class WalkerError(StandardError):
pass
from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
from compiler.consts import OP_ASSIGN, OP_DELETE, OP_APPLY
def parseFile(path):
f = open(path, "U")
# XXX The parser API tolerates files without a trailing newline,
# but not strings without a trailing newline. Always add an extra
# newline to the file contents, since we're going through the string
# version of the API.
src = f.read() + "\n"
f.close()
return parse(src)
def parse(buf, mode="exec"):
if mode == "exec" or mode == "single":
return Transformer().parsesuite(buf)
elif mode == "eval":
return Transformer().parseexpr(buf)
else:
raise ValueError("compile() arg 3 must be"
" 'exec' or 'eval' or 'single'")
def asList(nodes):
l = []
for item in nodes:
if hasattr(item, "asList"):
l.append(item.asList())
else:
if type(item) is type( (None, None) ):
l.append(tuple(asList(item)))
elif type(item) is type( [] ):
l.append(asList(item))
else:
l.append(item)
return l
def extractLineNo(ast):
if not isinstance(ast[1], tuple):
# get a terminal node
return ast[2]
for child in ast[1:]:
if isinstance(child, tuple):
lineno = extractLineNo(child)
if lineno is not None:
return lineno
def Node(*args):
kind = args[0]
if nodes.has_key(kind):
try:
return nodes[kind](*args[1:])
except TypeError:
print nodes[kind], len(args), args
raise
else:
raise WalkerError, "Can't find appropriate Node type: %s" % str(args)
#return apply(ast.Node, args)
class Transformer:
"""Utility object for transforming Python parse trees.
Exposes the following methods:
tree = transform(ast_tree)
tree = parsesuite(text)
tree = parseexpr(text)
tree = parsefile(fileob | filename)
"""
def __init__(self):
self._dispatch = {}
for value, name in symbol.sym_name.items():
if hasattr(self, name):
self._dispatch[value] = getattr(self, name)
self._dispatch[token.NEWLINE] = self.com_NEWLINE
self._atom_dispatch = {token.LPAR: self.atom_lpar,
token.LSQB: self.atom_lsqb,
token.LBRACE: self.atom_lbrace,
token.BACKQUOTE: self.atom_backquote,
token.NUMBER: self.atom_number,
token.STRING: self.atom_string,
token.NAME: self.atom_name,
}
self.encoding = None
def transform(self, tree):
"""Transform an AST into a modified parse tree."""
if not (isinstance(tree, tuple) or isinstance(tree, list)):
tree = parser.ast2tuple(tree, line_info=1)
return self.compile_node(tree)
def parsesuite(self, text):
"""Return a modified parse tree for the given suite text."""
return self.transform(parser.suite(text))
def parseexpr(self, text):
"""Return a modified parse tree for the given expression text."""
return self.transform(parser.expr(text))
def parsefile(self, file):
"""Return a modified parse tree for the contents of the given file."""
if type(file) == type(''):
file = open(file)
return self.parsesuite(file.read())
# --------------------------------------------------------------
#
# PRIVATE METHODS
#
def compile_node(self, node):
### emit a line-number node?
n = node[0]
if n == symbol.encoding_decl:
self.encoding = node[2]
node = node[1]
n = node[0]
if n == symbol.single_input:
return self.single_input(node[1:])
if n == symbol.file_input:
return self.file_input(node[1:])
if n == symbol.eval_input:
return self.eval_input(node[1:])
if n == symbol.lambdef:
return self.lambdef(node[1:])
if n == symbol.funcdef:
return self.funcdef(node[1:])
if n == symbol.classdef:
return self.classdef(node[1:])
raise WalkerError, ('unexpected node type', n)
def single_input(self, node):
### do we want to do anything about being "interactive" ?
# NEWLINE | simple_stmt | compound_stmt NEWLINE
n = node[0][0]
if n != token.NEWLINE:
return self.com_stmt(node[0])
return Pass()
def file_input(self, nodelist):
doc = self.get_docstring(nodelist, symbol.file_input)
if doc is not None:
i = 1
else:
i = 0
stmts = []
for node in nodelist[i:]:
if node[0] != token.ENDMARKER and node[0] != token.NEWLINE:
self.com_append_stmt(stmts, node)
return Module(doc, Stmt(stmts))
def eval_input(self, nodelist):
# from the built-in function input()
### is this sufficient?
return Expression(self.com_node(nodelist[0]))
def decorator_name(self, nodelist):
listlen = len(nodelist)
assert listlen >= 1 and listlen % 2 == 1
item = self.atom_name(nodelist)
i = 1
while i < listlen:
assert nodelist[i][0] == token.DOT
assert nodelist[i + 1][0] == token.NAME
item = Getattr(item, nodelist[i + 1][1])
i += 2
return item
def decorator(self, nodelist):
# '@' dotted_name [ '(' [arglist] ')' ]
assert len(nodelist) in (3, 5, 6)
assert nodelist[0][0] == token.AT
assert nodelist[-1][0] == token.NEWLINE
assert nodelist[1][0] == symbol.dotted_name
funcname = self.decorator_name(nodelist[1][1:])
if len(nodelist) > 3:
assert nodelist[2][0] == token.LPAR
expr = self.com_call_function(funcname, nodelist[3])
else:
expr = funcname
return expr
def decorators(self, nodelist):
# decorators: decorator ([NEWLINE] decorator)* NEWLINE
items = []
for dec_nodelist in nodelist:
assert dec_nodelist[0] == symbol.decorator
items.append(self.decorator(dec_nodelist[1:]))
return Decorators(items)
def decorated(self, nodelist):
assert nodelist[0][0] == symbol.decorators
if nodelist[1][0] == symbol.funcdef:
n = [nodelist[0]] + list(nodelist[1][1:])
return self.funcdef(n)
elif nodelist[1][0] == symbol.classdef:
decorators = self.decorators(nodelist[0][1:])
cls = self.classdef(nodelist[1][1:])
cls.decorators = decorators
return cls
raise WalkerError()
def funcdef(self, nodelist):
# -6 -5 -4 -3 -2 -1
# funcdef: [decorators] 'def' NAME parameters ':' suite
# parameters: '(' [varargslist] ')'
if len(nodelist) == 6:
assert nodelist[0][0] == symbol.decorators
decorators = self.decorators(nodelist[0][1:])
else:
assert len(nodelist) == 5
decorators = None
lineno = nodelist[-4][2]
name = nodelist[-4][1]
args = nodelist[-3][2]
if args[0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(args[1:])
else:
names = defaults = ()
flags = 0
doc = self.get_docstring(nodelist[-1])
# code for function
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Function(decorators, name, names, defaults, flags, doc, code,
lineno=lineno)
def lambdef(self, nodelist):
# lambdef: 'lambda' [varargslist] ':' test
if nodelist[2][0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(nodelist[2][1:])
else:
names = defaults = ()
flags = 0
# code for lambda
code = self.com_node(nodelist[-1])
return Lambda(names, defaults, flags, code, lineno=nodelist[1][2])
old_lambdef = lambdef
def classdef(self, nodelist):
# classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
name = nodelist[1][1]
doc = self.get_docstring(nodelist[-1])
if nodelist[2][0] == token.COLON:
bases = []
elif nodelist[3][0] == token.RPAR:
bases = []
else:
bases = self.com_bases(nodelist[3])
# code for class
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Class(name, bases, doc, code, lineno=nodelist[1][2])
def stmt(self, nodelist):
return self.com_stmt(nodelist[0])
small_stmt = stmt
flow_stmt = stmt
compound_stmt = stmt
def simple_stmt(self, nodelist):
# small_stmt (';' small_stmt)* [';'] NEWLINE
stmts = []
for i in range(0, len(nodelist), 2):
self.com_append_stmt(stmts, nodelist[i])
return Stmt(stmts)
def parameters(self, nodelist):
raise WalkerError
def varargslist(self, nodelist):
raise WalkerError
def fpdef(self, nodelist):
raise WalkerError
def fplist(self, nodelist):
raise WalkerError
def dotted_name(self, nodelist):
raise WalkerError
def comp_op(self, nodelist):
raise WalkerError
def trailer(self, nodelist):
raise WalkerError
def sliceop(self, nodelist):
raise WalkerError
def argument(self, nodelist):
raise WalkerError
# --------------------------------------------------------------
#
# STATEMENT NODES (invoked by com_node())
#
def expr_stmt(self, nodelist):
# augassign testlist | testlist ('=' testlist)*
en = nodelist[-1]
exprNode = self.lookup_node(en)(en[1:])
if len(nodelist) == 1:
return Discard(exprNode, lineno=exprNode.lineno)
if nodelist[1][0] == token.EQUAL:
nodesl = []
for i in range(0, len(nodelist) - 2, 2):
nodesl.append(self.com_assign(nodelist[i], OP_ASSIGN))
return Assign(nodesl, exprNode, lineno=nodelist[1][2])
else:
lval = self.com_augassign(nodelist[0])
op = self.com_augassign_op(nodelist[1])
return AugAssign(lval, op[1], exprNode, lineno=op[2])
raise WalkerError, "can't get here"
def print_stmt(self, nodelist):
# print ([ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ])
items = []
if len(nodelist) == 1:
start = 1
dest = None
elif nodelist[1][0] == token.RIGHTSHIFT:
assert len(nodelist) == 3 \
or nodelist[3][0] == token.COMMA
dest = self.com_node(nodelist[2])
start = 4
else:
dest = None
start = 1
for i in range(start, len(nodelist), 2):
items.append(self.com_node(nodelist[i]))
if nodelist[-1][0] == token.COMMA:
return Print(items, dest, lineno=nodelist[0][2])
return Printnl(items, dest, lineno=nodelist[0][2])
def del_stmt(self, nodelist):
return self.com_assign(nodelist[1], OP_DELETE)
def pass_stmt(self, nodelist):
return Pass(lineno=nodelist[0][2])
def break_stmt(self, nodelist):
return Break(lineno=nodelist[0][2])
def continue_stmt(self, nodelist):
return Continue(lineno=nodelist[0][2])
def return_stmt(self, nodelist):
# return: [testlist]
if len(nodelist) < 2:
return Return(Const(None), lineno=nodelist[0][2])
return Return(self.com_node(nodelist[1]), lineno=nodelist[0][2])
def yield_stmt(self, nodelist):
expr = self.com_node(nodelist[0])
return Discard(expr, lineno=expr.lineno)
def yield_expr(self, nodelist):
if len(nodelist) > 1:
value = self.com_node(nodelist[1])
else:
value = Const(None)
return Yield(value, lineno=nodelist[0][2])
def raise_stmt(self, nodelist):
# raise: [test [',' test [',' test]]]
if len(nodelist) > 5:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
if len(nodelist) > 3:
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
if len(nodelist) > 1:
expr1 = self.com_node(nodelist[1])
else:
expr1 = None
return Raise(expr1, expr2, expr3, lineno=nodelist[0][2])
def import_stmt(self, nodelist):
# import_stmt: import_name | import_from
assert len(nodelist) == 1
return self.com_node(nodelist[0])
def import_name(self, nodelist):
# import_name: 'import' dotted_as_names
return Import(self.com_dotted_as_names(nodelist[1]),
lineno=nodelist[0][2])
def import_from(self, nodelist):
# import_from: 'from' ('.'* dotted_name | '.') 'import' ('*' |
# '(' import_as_names ')' | import_as_names)
assert nodelist[0][1] == 'from'
idx = 1
while nodelist[idx][1] == '.':
idx += 1
level = idx - 1
if nodelist[idx][0] == symbol.dotted_name:
fromname = self.com_dotted_name(nodelist[idx])
idx += 1
else:
fromname = ""
assert nodelist[idx][1] == 'import'
if nodelist[idx + 1][0] == token.STAR:
return From(fromname, [('*', None)], level,
lineno=nodelist[0][2])
else:
node = nodelist[idx + 1 + (nodelist[idx + 1][0] == token.LPAR)]
return From(fromname, self.com_import_as_names(node), level,
lineno=nodelist[0][2])
def global_stmt(self, nodelist):
# global: NAME (',' NAME)*
names = []
for i in range(1, len(nodelist), 2):
names.append(nodelist[i][1])
return Global(names, lineno=nodelist[0][2])
def exec_stmt(self, nodelist):
# exec_stmt: 'exec' expr ['in' expr [',' expr]]
expr1 = self.com_node(nodelist[1])
if len(nodelist) >= 4:
expr2 = self.com_node(nodelist[3])
if len(nodelist) >= 6:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
else:
expr2 = expr3 = None
return Exec(expr1, expr2, expr3, lineno=nodelist[0][2])
def assert_stmt(self, nodelist):
# 'assert': test, [',' test]
expr1 = self.com_node(nodelist[1])
if (len(nodelist) == 4):
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
return Assert(expr1, expr2, lineno=nodelist[0][2])
def if_stmt(self, nodelist):
# if: test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
tests = []
for i in range(0, len(nodelist) - 3, 4):
testNode = self.com_node(nodelist[i + 1])
suiteNode = self.com_node(nodelist[i + 3])
tests.append((testNode, suiteNode))
if len(nodelist) % 4 == 3:
elseNode = self.com_node(nodelist[-1])
## elseNode.lineno = nodelist[-1][1][2]
else:
elseNode = None
return If(tests, elseNode, lineno=nodelist[0][2])
def while_stmt(self, nodelist):
# 'while' test ':' suite ['else' ':' suite]
testNode = self.com_node(nodelist[1])
bodyNode = self.com_node(nodelist[3])
if len(nodelist) > 4:
elseNode = self.com_node(nodelist[6])
else:
elseNode = None
return While(testNode, bodyNode, elseNode, lineno=nodelist[0][2])
def for_stmt(self, nodelist):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
assignNode = self.com_assign(nodelist[1], OP_ASSIGN)
listNode = self.com_node(nodelist[3])
bodyNode = self.com_node(nodelist[5])
if len(nodelist) > 8:
elseNode = self.com_node(nodelist[8])
else:
elseNode = None
return For(assignNode, listNode, bodyNode, elseNode,
lineno=nodelist[0][2])
def try_stmt(self, nodelist):
return self.com_try_except_finally(nodelist)
def with_stmt(self, nodelist):
return self.com_with(nodelist)
def with_var(self, nodelist):
return self.com_with_var(nodelist)
def suite(self, nodelist):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if len(nodelist) == 1:
return self.com_stmt(nodelist[0])
stmts = []
for node in nodelist:
if node[0] == symbol.stmt:
self.com_append_stmt(stmts, node)
return Stmt(stmts)
# --------------------------------------------------------------
#
# EXPRESSION NODES (invoked by com_node())
#
def testlist(self, nodelist):
# testlist: expr (',' expr)* [',']
# testlist_safe: test [(',' test)+ [',']]
# exprlist: expr (',' expr)* [',']
return self.com_binary(Tuple, nodelist)
testlist_safe = testlist # XXX
testlist1 = testlist
exprlist = testlist
def testlist_gexp(self, nodelist):
if len(nodelist) == 2 and nodelist[1][0] == symbol.gen_for:
test = self.com_node(nodelist[0])
return self.com_generator_expression(test, nodelist[1])
return self.testlist(nodelist)
def test(self, nodelist):
# or_test ['if' or_test 'else' test] | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
then = self.com_node(nodelist[0])
if len(nodelist) > 1:
assert len(nodelist) == 5
assert nodelist[1][1] == 'if'
assert nodelist[3][1] == 'else'
test = self.com_node(nodelist[2])
else_ = self.com_node(nodelist[4])
return IfExp(test, then, else_, lineno=nodelist[1][2])
return then
def or_test(self, nodelist):
# and_test ('or' and_test)* | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
return self.com_binary(Or, nodelist)
old_test = or_test
def and_test(self, nodelist):
# not_test ('and' not_test)*
return self.com_binary(And, nodelist)
def not_test(self, nodelist):
# 'not' not_test | comparison
result = self.com_node(nodelist[-1])
if len(nodelist) == 2:
return Not(result, lineno=nodelist[0][2])
return result
def comparison(self, nodelist):
# comparison: expr (comp_op expr)*
node = self.com_node(nodelist[0])
if len(nodelist) == 1:
return node
results = []
for i in range(2, len(nodelist), 2):
nl = nodelist[i-1]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
n = nl[1]
if n[0] == token.NAME:
type = n[1]
if len(nl) == 3:
if type == 'not':
type = 'not in'
else:
type = 'is not'
else:
type = _cmp_types[n[0]]
lineno = nl[1][2]
results.append((type, self.com_node(nodelist[i])))
# we need a special "compare" node so that we can distinguish
# 3 < x < 5 from (3 < x) < 5
# the two have very different semantics and results (note that the
# latter form is always true)
return Compare(node, results, lineno=lineno)
def expr(self, nodelist):
# xor_expr ('|' xor_expr)*
return self.com_binary(Bitor, nodelist)
def xor_expr(self, nodelist):
# xor_expr ('^' xor_expr)*
return self.com_binary(Bitxor, nodelist)
def and_expr(self, nodelist):
# xor_expr ('&' xor_expr)*
return self.com_binary(Bitand, nodelist)
def shift_expr(self, nodelist):
# shift_expr ('<<'|'>>' shift_expr)*
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.LEFTSHIFT:
node = LeftShift([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.RIGHTSHIFT:
node = RightShift([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def arith_expr(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.PLUS:
node = Add([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.MINUS:
node = Sub([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def term(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
t = nodelist[i-1][0]
if t == token.STAR:
node = Mul([node, right])
elif t == token.SLASH:
node = Div([node, right])
elif t == token.PERCENT:
node = Mod([node, right])
elif t == token.DOUBLESLASH:
node = FloorDiv([node, right])
else:
raise ValueError, "unexpected token: %s" % t
node.lineno = nodelist[1][2]
return node
def factor(self, nodelist):
elt = nodelist[0]
t = elt[0]
node = self.lookup_node(nodelist[-1])(nodelist[-1][1:])
# need to handle (unary op)constant here...
if t == token.PLUS:
return UnaryAdd(node, lineno=elt[2])
elif t == token.MINUS:
return UnarySub(node, lineno=elt[2])
elif t == token.TILDE:
node = Invert(node, lineno=elt[2])
return node
def power(self, nodelist):
# power: atom trailer* ('**' factor)*
node = self.com_node(nodelist[0])
for i in range(1, len(nodelist)):
elt = nodelist[i]
if elt[0] == token.DOUBLESTAR:
return Power([node, self.com_node(nodelist[i+1])],
lineno=elt[2])
node = self.com_apply_trailer(node, elt)
return node
def atom(self, nodelist):
return self._atom_dispatch[nodelist[0][0]](nodelist)
def atom_lpar(self, nodelist):
if nodelist[1][0] == token.RPAR:
return Tuple((), lineno=nodelist[0][2])
return self.com_node(nodelist[1])
def atom_lsqb(self, nodelist):
if nodelist[1][0] == token.RSQB:
return List((), lineno=nodelist[0][2])
return self.com_list_constructor(nodelist[1])
def atom_lbrace(self, nodelist):
if nodelist[1][0] == token.RBRACE:
return Dict((), lineno=nodelist[0][2])
return self.com_dictmaker(nodelist[1])
def atom_backquote(self, nodelist):
return Backquote(self.com_node(nodelist[1]))
def atom_number(self, nodelist):
### need to verify this matches compile.c
k = eval(nodelist[0][1])
return Const(k, lineno=nodelist[0][2])
def decode_literal(self, lit):
if self.encoding:
# this is particularly fragile & a bit of a
# hack... changes in compile.c:parsestr and
# tokenizer.c must be reflected here.
if self.encoding not in ['utf-8', 'iso-8859-1']:
lit = unicode(lit, 'utf-8').encode(self.encoding)
return eval("# coding: %s\n%s" % (self.encoding, lit))
else:
return eval(lit)
def atom_string(self, nodelist):
k = ''
for node in nodelist:
k += self.decode_literal(node[1])
return Const(k, lineno=nodelist[0][2])
def atom_name(self, nodelist):
return Name(nodelist[0][1], lineno=nodelist[0][2])
# --------------------------------------------------------------
#
# INTERNAL PARSING UTILITIES
#
# The use of com_node() introduces a lot of extra stack frames,
# enough to cause a stack overflow compiling test.test_parser with
# the standard interpreter recursionlimit. The com_node() is a
# convenience function that hides the dispatch details, but comes
# at a very high cost. It is more efficient to dispatch directly
# in the callers. In these cases, use lookup_node() and call the
# dispatched node directly.
def lookup_node(self, node):
return self._dispatch[node[0]]
def com_node(self, node):
# Note: compile.c has handling in com_node for del_stmt, pass_stmt,
# break_stmt, stmt, small_stmt, flow_stmt, simple_stmt,
# and compound_stmt.
# We'll just dispatch them.
return self._dispatch[node[0]](node[1:])
def com_NEWLINE(self, *args):
# A ';' at the end of a line can make a NEWLINE token appear
# here, Render it harmless. (genc discards ('discard',
# ('const', xxxx)) Nodes)
return Discard(Const(None))
def com_arglist(self, nodelist):
# varargslist:
# (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME)
# | fpdef ['=' test] (',' fpdef ['=' test])* [',']
# fpdef: NAME | '(' fplist ')'
# fplist: fpdef (',' fpdef)* [',']
names = []
defaults = []
flags = 0
i = 0
while i < len(nodelist):
node = nodelist[i]
if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
if node[0] == token.STAR:
node = nodelist[i+1]
if node[0] == token.NAME:
names.append(node[1])
flags = flags | CO_VARARGS
i = i + 3
if i < len(nodelist):
# should be DOUBLESTAR
t = nodelist[i][0]
if t == token.DOUBLESTAR:
node = nodelist[i+1]
else:
raise ValueError, "unexpected token: %s" % t
names.append(node[1])
flags = flags | CO_VARKEYWORDS
break
# fpdef: NAME | '(' fplist ')'
names.append(self.com_fpdef(node))
i = i + 1
if i < len(nodelist) and nodelist[i][0] == token.EQUAL:
defaults.append(self.com_node(nodelist[i + 1]))
i = i + 2
elif len(defaults):
# we have already seen an argument with default, but here
# came one without
raise SyntaxError, "non-default argument follows default argument"
# skip the comma
i = i + 1
return names, defaults, flags
def com_fpdef(self, node):
# fpdef: NAME | '(' fplist ')'
if node[1][0] == token.LPAR:
return self.com_fplist(node[2])
return node[1][1]
def com_fplist(self, node):
# fplist: fpdef (',' fpdef)* [',']
if len(node) == 2:
return self.com_fpdef(node[1])
list = []
for i in range(1, len(node), 2):
list.append(self.com_fpdef(node[i]))
return tuple(list)
def com_dotted_name(self, node):
# String together the dotted names and return the string
name = ""
for n in node:
if type(n) == type(()) and n[0] == 1:
name = name + n[1] + '.'
return name[:-1]
def com_dotted_as_name(self, node):
assert node[0] == symbol.dotted_as_name
node = node[1:]
dot = self.com_dotted_name(node[0][1:])
if len(node) == 1:
return dot, None
assert node[1][1] == 'as'
assert node[2][0] == token.NAME
return dot, node[2][1]
def com_dotted_as_names(self, node):
assert node[0] == symbol.dotted_as_names
node = node[1:]
names = [self.com_dotted_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_dotted_as_name(node[i]))
return names
def com_import_as_name(self, node):
assert node[0] == symbol.import_as_name
node = node[1:]
assert node[0][0] == token.NAME
if len(node) == 1:
return node[0][1], None
assert node[1][1] == 'as', node
assert node[2][0] == token.NAME
return node[0][1], node[2][1]
def com_import_as_names(self, node):
assert node[0] == symbol.import_as_names
node = node[1:]
names = [self.com_import_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_import_as_name(node[i]))
return names
def com_bases(self, node):
bases = []
for i in range(1, len(node), 2):
bases.append(self.com_node(node[i]))
return bases
def com_try_except_finally(self, nodelist):
# ('try' ':' suite
# ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite]
# | 'finally' ':' suite))
if nodelist[3][0] == token.NAME:
# first clause is a finally clause: only try-finally
return TryFinally(self.com_node(nodelist[2]),
self.com_node(nodelist[5]),
lineno=nodelist[0][2])
#tryexcept: [TryNode, [except_clauses], elseNode)]
clauses = []
elseNode = None
finallyNode = None
for i in range(3, len(nodelist), 3):
node = nodelist[i]
if node[0] == symbol.except_clause:
# except_clause: 'except' [expr [(',' | 'as') expr]] */
if len(node) > 2:
expr1 = self.com_node(node[2])
if len(node) > 4:
expr2 = self.com_assign(node[4], OP_ASSIGN)
else:
expr2 = None
else:
expr1 = expr2 = None
clauses.append((expr1, expr2, self.com_node(nodelist[i+2])))
if node[0] == token.NAME:
if node[1] == 'else':
elseNode = self.com_node(nodelist[i+2])
elif node[1] == 'finally':
finallyNode = self.com_node(nodelist[i+2])
try_except = TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
lineno=nodelist[0][2])
if finallyNode:
return TryFinally(try_except, finallyNode, lineno=nodelist[0][2])
else:
return try_except
def com_with(self, nodelist):
# with_stmt: 'with' expr [with_var] ':' suite
expr = self.com_node(nodelist[1])
body = self.com_node(nodelist[-1])
if nodelist[2][0] == token.COLON:
var = None
else:
var = self.com_assign(nodelist[2][2], OP_ASSIGN)
return With(expr, var, body, lineno=nodelist[0][2])
def com_with_var(self, nodelist):
# with_var: 'as' expr
return self.com_node(nodelist[1])
def com_augassign_op(self, node):
assert node[0] == symbol.augassign
return node[1]
def com_augassign(self, node):
"""Return node suitable for lvalue of augmented assignment
Names, slices, and attributes are the only allowable nodes.
"""
l = self.com_node(node)
if l.__class__ in (Name, Slice, Subscript, Getattr):
return l
raise SyntaxError, "can't assign to %s" % l.__class__.__name__
def com_assign(self, node, assigning):
# return a node suitable for use as an "lvalue"
# loop to avoid trivial recursion
while 1:
t = node[0]
if t in (symbol.exprlist, symbol.testlist, symbol.testlist_safe, symbol.testlist_gexp):
if len(node) > 2:
return self.com_assign_tuple(node, assigning)
node = node[1]
elif t in _assign_types:
if len(node) > 2:
raise SyntaxError, "can't assign to operator"
node = node[1]
elif t == symbol.power:
if node[1][0] != symbol.atom:
raise SyntaxError, "can't assign to operator"
if len(node) > 2:
primary = self.com_node(node[1])
for i in range(2, len(node)-1):
ch = node[i]
if ch[0] == token.DOUBLESTAR:
raise SyntaxError, "can't assign to operator"
primary = self.com_apply_trailer(primary, ch)
return self.com_assign_trailer(primary, node[-1],
assigning)
node = node[1]
elif t == symbol.atom:
t = node[1][0]
if t == token.LPAR:
node = node[2]
if node[0] == token.RPAR:
raise SyntaxError, "can't assign to ()"
elif t == token.LSQB:
node = node[2]
if node[0] == token.RSQB:
raise SyntaxError, "can't assign to []"
return self.com_assign_list(node, assigning)
elif t == token.NAME:
return self.com_assign_name(node[1], assigning)
else:
raise SyntaxError, "can't assign to literal"
else:
raise SyntaxError, "bad assignment (%s)" % t
def com_assign_tuple(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
assigns.append(self.com_assign(node[i], assigning))
return AssTuple(assigns, lineno=extractLineNo(node))
def com_assign_list(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
if i + 1 < len(node):
if node[i + 1][0] == symbol.list_for:
raise SyntaxError, "can't assign to list comprehension"
assert node[i + 1][0] == token.COMMA, node[i + 1]
assigns.append(self.com_assign(node[i], assigning))
return AssList(assigns, lineno=extractLineNo(node))
def com_assign_name(self, node, assigning):
return AssName(node[1], assigning, lineno=node[2])
def com_assign_trailer(self, primary, node, assigning):
t = node[1][0]
if t == token.DOT:
return self.com_assign_attr(primary, node[2], assigning)
if t == token.LSQB:
return self.com_subscriptlist(primary, node[2], assigning)
if t == token.LPAR:
raise SyntaxError, "can't assign to function call"
raise SyntaxError, "unknown trailer type: %s" % t
def com_assign_attr(self, primary, node, assigning):
return AssAttr(primary, node[1], assigning, lineno=node[-1])
def com_binary(self, constructor, nodelist):
"Compile 'NODE (OP NODE)*' into (type, [ node1, ..., nodeN ])."
l = len(nodelist)
if l == 1:
n = nodelist[0]
return self.lookup_node(n)(n[1:])
items = []
for i in range(0, l, 2):
n = nodelist[i]
items.append(self.lookup_node(n)(n[1:]))
return constructor(items, lineno=extractLineNo(nodelist))
def com_stmt(self, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
return result
return Stmt([result])
def com_append_stmt(self, stmts, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
stmts.extend(result.nodes)
else:
stmts.append(result)
if hasattr(symbol, 'list_for'):
def com_list_constructor(self, nodelist):
# listmaker: test ( list_for | (',' test)* [','] )
values = []
for i in range(1, len(nodelist)):
if nodelist[i][0] == symbol.list_for:
assert len(nodelist[i:]) == 1
return self.com_list_comprehension(values[0],
nodelist[i])
elif nodelist[i][0] == token.COMMA:
continue
values.append(self.com_node(nodelist[i]))
return List(values, lineno=values[0].lineno)
def com_list_comprehension(self, expr, node):
# list_iter: list_for | list_if
# list_for: 'for' exprlist 'in' testlist [list_iter]
# list_if: 'if' test [list_iter]
# XXX should raise SyntaxError for assignment
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
listNode = self.com_node(node[4])
newfor = ListCompFor(assignNode, listNode, [])
newfor.lineno = node[1][2]
fors.append(newfor)
if len(node) == 5:
node = None
else:
node = self.com_list_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = ListCompIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_list_iter(node[3])
else:
raise SyntaxError, \
("unexpected list comprehension element: %s %d"
% (node, lineno))
return ListComp(expr, fors, lineno=lineno)
def com_list_iter(self, node):
assert node[0] == symbol.list_iter
return node[1]
else:
def com_list_constructor(self, nodelist):
values = []
for i in range(1, len(nodelist), 2):
values.append(self.com_node(nodelist[i]))
return List(values, lineno=values[0].lineno)
if hasattr(symbol, 'gen_for'):
def com_generator_expression(self, expr, node):
# gen_iter: gen_for | gen_if
# gen_for: 'for' exprlist 'in' test [gen_iter]
# gen_if: 'if' test [gen_iter]
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
genNode = self.com_node(node[4])
newfor = GenExprFor(assignNode, genNode, [],
lineno=node[1][2])
fors.append(newfor)
if (len(node)) == 5:
node = None
else:
node = self.com_gen_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = GenExprIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_gen_iter(node[3])
else:
raise SyntaxError, \
("unexpected generator expression element: %s %d"
% (node, lineno))
fors[0].is_outmost = True
return GenExpr(GenExprInner(expr, fors), lineno=lineno)
def com_gen_iter(self, node):
assert node[0] == symbol.gen_iter
return node[1]
def com_dictmaker(self, nodelist):
# dictmaker: test ':' test (',' test ':' value)* [',']
items = []
for i in range(1, len(nodelist), 4):
items.append((self.com_node(nodelist[i]),
self.com_node(nodelist[i+2])))
return Dict(items, lineno=items[0][0].lineno)
def com_apply_trailer(self, primaryNode, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
return self.com_call_function(primaryNode, nodelist[2])
if t == token.DOT:
return self.com_select_member(primaryNode, nodelist[2])
if t == token.LSQB:
return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)
raise SyntaxError, 'unknown node type: %s' % t
def com_select_member(self, primaryNode, nodelist):
if nodelist[0] != token.NAME:
raise SyntaxError, "member must be a name"
return Getattr(primaryNode, nodelist[1], lineno=nodelist[2])
def com_call_function(self, primaryNode, nodelist):
if nodelist[0] == token.RPAR:
return CallFunc(primaryNode, [], lineno=extractLineNo(nodelist))
args = []
kw = 0
star_node = dstar_node = None
len_nodelist = len(nodelist)
i = 1
while i < len_nodelist:
node = nodelist[i]
if node[0]==token.STAR:
if star_node is not None:
raise SyntaxError, 'already have the varargs indentifier'
star_node = self.com_node(nodelist[i+1])
i = i + 3
continue
elif node[0]==token.DOUBLESTAR:
if dstar_node is not None:
raise SyntaxError, 'already have the kwargs indentifier'
dstar_node = self.com_node(nodelist[i+1])
i = i + 3
continue
# positional or named parameters
kw, result = self.com_argument(node, kw, star_node)
if len_nodelist != 2 and isinstance(result, GenExpr) \
and len(node) == 3 and node[2][0] == symbol.gen_for:
# allow f(x for x in y), but reject f(x for x in y, 1)
# should use f((x for x in y), 1) instead of f(x for x in y, 1)
raise SyntaxError, 'generator expression needs parenthesis'
args.append(result)
i = i + 2
return CallFunc(primaryNode, args, star_node, dstar_node,
lineno=extractLineNo(nodelist))
def com_argument(self, nodelist, kw, star_node):
if len(nodelist) == 3 and nodelist[2][0] == symbol.gen_for:
test = self.com_node(nodelist[1])
return 0, self.com_generator_expression(test, nodelist[2])
if len(nodelist) == 2:
if kw:
raise SyntaxError, "non-keyword arg after keyword arg"
if star_node:
raise SyntaxError, "only named arguments may follow *expression"
return 0, self.com_node(nodelist[1])
result = self.com_node(nodelist[3])
n = nodelist[1]
while len(n) == 2 and n[0] != token.NAME:
n = n[1]
if n[0] != token.NAME:
raise SyntaxError, "keyword can't be an expression (%s)"%n[0]
node = Keyword(n[1], result, lineno=n[2])
return 1, node
def com_subscriptlist(self, primary, nodelist, assigning):
# slicing: simple_slicing | extended_slicing
# simple_slicing: primary "[" short_slice "]"
# extended_slicing: primary "[" slice_list "]"
# slice_list: slice_item ("," slice_item)* [","]
# backwards compat slice for '[i:j]'
if len(nodelist) == 2:
sub = nodelist[1]
if (sub[1][0] == token.COLON or \
(len(sub) > 2 and sub[2][0] == token.COLON)) and \
sub[-1][0] != symbol.sliceop:
return self.com_slice(primary, sub, assigning)
subscripts = []
for i in range(1, len(nodelist), 2):
subscripts.append(self.com_subscript(nodelist[i]))
return Subscript(primary, assigning, subscripts,
lineno=extractLineNo(nodelist))
def com_subscript(self, node):
# slice_item: expression | proper_slice | ellipsis
ch = node[1]
t = ch[0]
if t == token.DOT and node[2][0] == token.DOT:
return Ellipsis()
if t == token.COLON or len(node) > 2:
return self.com_sliceobj(node)
return self.com_node(ch)
def com_sliceobj(self, node):
# proper_slice: short_slice | long_slice
# short_slice: [lower_bound] ":" [upper_bound]
# long_slice: short_slice ":" [stride]
# lower_bound: expression
# upper_bound: expression
# stride: expression
#
# Note: a stride may be further slicing...
items = []
if node[1][0] == token.COLON:
items.append(Const(None))
i = 2
else:
items.append(self.com_node(node[1]))
# i == 2 is a COLON
i = 3
if i < len(node) and node[i][0] == symbol.test:
items.append(self.com_node(node[i]))
i = i + 1
else:
items.append(Const(None))
# a short_slice has been built. look for long_slice now by looking
# for strides...
for j in range(i, len(node)):
ch = node[j]
if len(ch) == 2:
items.append(Const(None))
else:
items.append(self.com_node(ch[2]))
return Sliceobj(items, lineno=extractLineNo(node))
def com_slice(self, primary, node, assigning):
# short_slice: [lower_bound] ":" [upper_bound]
lower = upper = None
if len(node) == 3:
if node[1][0] == token.COLON:
upper = self.com_node(node[2])
else:
lower = self.com_node(node[1])
elif len(node) == 4:
lower = self.com_node(node[1])
upper = self.com_node(node[3])
return Slice(primary, assigning, lower, upper,
lineno=extractLineNo(node))
def get_docstring(self, node, n=None):
if n is None:
n = node[0]
node = node[1:]
if n == symbol.suite:
if len(node) == 1:
return self.get_docstring(node[0])
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.file_input:
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.atom:
if node[0][0] == token.STRING:
s = ''
for t in node:
s = s + eval(t[1])
return s
return None
if n == symbol.stmt or n == symbol.simple_stmt \
or n == symbol.small_stmt:
return self.get_docstring(node[0])
if n in _doc_nodes and len(node) == 1:
return self.get_docstring(node[0])
return None
_doc_nodes = [
symbol.expr_stmt,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
_cmp_types = {
token.LESS : '<',
token.GREATER : '>',
token.EQEQUAL : '==',
token.EQUAL : '==',
token.LESSEQUAL : '<=',
token.GREATEREQUAL : '>=',
token.NOTEQUAL : '!=',
}
_legal_node_types = [
symbol.funcdef,
symbol.classdef,
symbol.stmt,
symbol.small_stmt,
symbol.flow_stmt,
symbol.simple_stmt,
symbol.compound_stmt,
symbol.expr_stmt,
symbol.print_stmt,
symbol.del_stmt,
symbol.pass_stmt,
symbol.break_stmt,
symbol.continue_stmt,
symbol.return_stmt,
symbol.raise_stmt,
symbol.import_stmt,
symbol.global_stmt,
symbol.exec_stmt,
symbol.assert_stmt,
symbol.if_stmt,
symbol.while_stmt,
symbol.for_stmt,
symbol.try_stmt,
symbol.with_stmt,
symbol.suite,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.exprlist,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
symbol.atom,
]
if hasattr(symbol, 'yield_stmt'):
_legal_node_types.append(symbol.yield_stmt)
if hasattr(symbol, 'yield_expr'):
_legal_node_types.append(symbol.yield_expr)
_assign_types = [
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
]
_names = {}
for k, v in symbol.sym_name.items():
_names[k] = v
for k, v in token.tok_name.items():
_names[k] = v
def debug_tree(tree):
l = []
for elt in tree:
if isinstance(elt, int):
l.append(_names.get(elt, elt))
elif isinstance(elt, str):
l.append(elt)
else:
l.append(debug_tree(elt))
return l
|
vicalloy/django-lbattachment
|
refs/heads/master
|
lbattachment/admin.py
|
1
|
from django.contrib import admin
from .models import LBAttachment
class LBAttachmentAdmin(admin.ModelAdmin):
search_fields = ('created_by__username', 'filename', 'act_members_param', 'notice_members_param', 'share_members_param')
list_display = ('created_by', 'filename', 'is_img', 'num_downloads', 'is_active', )
list_filter = ('is_img', 'suffix')
raw_id_fields = ('created_by',)
admin.site.register(LBAttachment, LBAttachmentAdmin)
|
xuxiao19910803/edx-platform
|
refs/heads/master
|
pavelib/utils/envs.py
|
39
|
"""
Helper functions for loading environment settings.
"""
from __future__ import print_function
import os
import sys
import json
from lazy import lazy
from path import path
import memcache
class Env(object):
"""
Load information about the execution environment.
"""
# Root of the git repository (edx-platform)
REPO_ROOT = path(__file__).abspath().parent.parent.parent
# Reports Directory
REPORT_DIR = REPO_ROOT / 'reports'
METRICS_DIR = REPORT_DIR / 'metrics'
# Python unittest dirs
PYTHON_COVERAGERC = REPO_ROOT / ".coveragerc"
# Bok_choy dirs
BOK_CHOY_DIR = REPO_ROOT / "common" / "test" / "acceptance"
BOK_CHOY_LOG_DIR = REPO_ROOT / "test_root" / "log"
BOK_CHOY_REPORT_DIR = REPORT_DIR / "bok_choy"
BOK_CHOY_COVERAGERC = BOK_CHOY_DIR / ".coveragerc"
# If set, put reports for run in "unique" directories.
# The main purpose of this is to ensure that the reports can be 'slurped'
# in the main jenkins flow job without overwriting the reports from other
# build steps. For local development/testing, this shouldn't be needed.
if os.environ.get("SHARD", None):
shard_str = "shard_{}".format(os.environ.get("SHARD"))
BOK_CHOY_REPORT_DIR = BOK_CHOY_REPORT_DIR / shard_str
BOK_CHOY_LOG_DIR = BOK_CHOY_LOG_DIR / shard_str
# For the time being, stubs are used by both the bok-choy and lettuce acceptance tests
# For this reason, the stubs package is currently located in the Django app called "terrain"
# where other lettuce configuration is stored.
BOK_CHOY_STUB_DIR = REPO_ROOT / "common" / "djangoapps" / "terrain"
# Directory that videos are served from
VIDEO_SOURCE_DIR = REPO_ROOT / "test_root" / "data" / "video"
BOK_CHOY_SERVERS = {
'lms': {
'port': 8003,
'log': BOK_CHOY_LOG_DIR / "bok_choy_lms.log"
},
'cms': {
'port': 8031,
'log': BOK_CHOY_LOG_DIR / "bok_choy_studio.log"
}
}
BOK_CHOY_STUBS = {
'xqueue': {
'port': 8040,
'log': BOK_CHOY_LOG_DIR / "bok_choy_xqueue.log",
'config': 'register_submission_url=http://0.0.0.0:8041/test/register_submission',
},
'ora': {
'port': 8041,
'log': BOK_CHOY_LOG_DIR / "bok_choy_ora.log",
'config': '',
},
'comments': {
'port': 4567,
'log': BOK_CHOY_LOG_DIR / "bok_choy_comments.log",
},
'video': {
'port': 8777,
'log': BOK_CHOY_LOG_DIR / "bok_choy_video_sources.log",
'config': "root_dir={}".format(VIDEO_SOURCE_DIR),
},
'youtube': {
'port': 9080,
'log': BOK_CHOY_LOG_DIR / "bok_choy_youtube.log",
},
'edxnotes': {
'port': 8042,
'log': BOK_CHOY_LOG_DIR / "bok_choy_edxnotes.log",
}
}
# Mongo databases that will be dropped before/after the tests run
BOK_CHOY_MONGO_DATABASE = "test"
BOK_CHOY_CACHE = memcache.Client(['0.0.0.0:11211'], debug=0)
# Test Ids Directory
TEST_DIR = REPO_ROOT / ".testids"
# Files used to run each of the js test suites
# TODO: Store this as a dict. Order seems to matter for some
# reason. See issue TE-415.
JS_TEST_ID_FILES = [
REPO_ROOT / 'lms/static/js_test.yml',
REPO_ROOT / 'lms/static/js_test_coffee.yml',
REPO_ROOT / 'cms/static/js_test.yml',
REPO_ROOT / 'cms/static/js_test_squire.yml',
REPO_ROOT / 'common/lib/xmodule/xmodule/js/js_test.yml',
REPO_ROOT / 'common/static/js_test.yml',
REPO_ROOT / 'common/static/js_test_requirejs.yml',
]
JS_TEST_ID_KEYS = [
'lms',
'lms-coffee',
'cms',
'cms-squire',
'xmodule',
'common',
'common-requirejs'
]
JS_REPORT_DIR = REPORT_DIR / 'javascript'
# Directories used for common/lib/ tests
LIB_TEST_DIRS = []
for item in (REPO_ROOT / "common/lib").listdir():
if (REPO_ROOT / 'common/lib' / item).isdir():
LIB_TEST_DIRS.append(path("common/lib") / item.basename())
LIB_TEST_DIRS.append(path("pavelib/paver_tests"))
# Directory for i18n test reports
I18N_REPORT_DIR = REPORT_DIR / 'i18n'
# Service variant (lms, cms, etc.) configured with an environment variable
# We use this to determine which envs.json file to load.
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
# If service variant not configured in env, then pass the correct
# environment for lms / cms
if not SERVICE_VARIANT: # this will intentionally catch "";
if any(i in sys.argv[1:] for i in ('cms', 'studio')):
SERVICE_VARIANT = 'cms'
else:
SERVICE_VARIANT = 'lms'
@lazy
def env_tokens(self):
"""
Return a dict of environment settings.
If we couldn't find the JSON file, issue a warning and return an empty dict.
"""
# Find the env JSON file
if self.SERVICE_VARIANT:
env_path = self.REPO_ROOT.parent / "{service}.env.json".format(service=self.SERVICE_VARIANT)
else:
env_path = path("env.json").abspath()
# If the file does not exist, here or one level up,
# issue a warning and return an empty dict
if not env_path.isfile():
env_path = env_path.parent.parent / env_path.basename()
if not env_path.isfile():
print(
"Warning: could not find environment JSON file "
"at '{path}'".format(path=env_path),
file=sys.stderr,
)
return dict()
# Otherwise, load the file as JSON and return the resulting dict
try:
with open(env_path) as env_file:
return json.load(env_file)
except ValueError:
print(
"Error: Could not parse JSON "
"in {path}".format(path=env_path),
file=sys.stderr,
)
sys.exit(1)
@lazy
def feature_flags(self):
"""
Return a dictionary of feature flags configured by the environment.
"""
return self.env_tokens.get('FEATURES', dict())
|
midma101/AndIWasJustGoingToBed
|
refs/heads/master
|
.venv/lib/python2.7/site-packages/wtforms/ext/sqlalchemy/fields.py
|
54
|
"""
Useful form fields for use with SQLAlchemy ORM.
"""
from __future__ import unicode_literals
import operator
from wtforms import widgets
from wtforms.compat import text_type, string_types
from wtforms.fields import SelectFieldBase
from wtforms.validators import ValidationError
try:
from sqlalchemy.orm.util import identity_key
has_identity_key = True
except ImportError:
has_identity_key = False
__all__ = (
'QuerySelectField', 'QuerySelectMultipleField',
)
class QuerySelectField(SelectFieldBase):
"""
Will display a select drop-down field to choose between ORM results in a
sqlalchemy `Query`. The `data` property actually will store/keep an ORM
model instance, not the ID. Submitting a choice which is not in the query
will result in a validation error.
This field only works for queries on models whose primary key column(s)
have a consistent string representation. This means it mostly only works
for those composed of string, unicode, and integer types. For the most
part, the primary keys will be auto-detected from the model, alternately
pass a one-argument callable to `get_pk` which can return a unique
comparable key.
The `query` property on the field can be set from within a view to assign
a query per-instance to the field. If the property is not set, the
`query_factory` callable passed to the field constructor will be called to
obtain a query.
Specify `get_label` to customize the label associated with each option. If
a string, this is the name of an attribute on the model object to use as
the label text. If a one-argument callable, this callable will be passed
model instance and expected to return the label text. Otherwise, the model
object's `__str__` or `__unicode__` will be used.
If `allow_blank` is set to `True`, then a blank choice will be added to the
top of the list. Selecting this choice will result in the `data` property
being `None`. The label for this blank choice can be set by specifying the
`blank_text` parameter.
"""
widget = widgets.Select()
def __init__(self, label=None, validators=None, query_factory=None,
get_pk=None, get_label=None, allow_blank=False,
blank_text='', **kwargs):
super(QuerySelectField, self).__init__(label, validators, **kwargs)
self.query_factory = query_factory
if get_pk is None:
if not has_identity_key:
raise Exception('The sqlalchemy identity_key function could not be imported.')
self.get_pk = get_pk_from_identity
else:
self.get_pk = get_pk
if get_label is None:
self.get_label = lambda x: x
elif isinstance(get_label, string_types):
self.get_label = operator.attrgetter(get_label)
else:
self.get_label = get_label
self.allow_blank = allow_blank
self.blank_text = blank_text
self.query = None
self._object_list = None
def _get_data(self):
if self._formdata is not None:
for pk, obj in self._get_object_list():
if pk == self._formdata:
self._set_data(obj)
break
return self._data
def _set_data(self, data):
self._data = data
self._formdata = None
data = property(_get_data, _set_data)
def _get_object_list(self):
if self._object_list is None:
query = self.query or self.query_factory()
get_pk = self.get_pk
self._object_list = list((text_type(get_pk(obj)), obj) for obj in query)
return self._object_list
def iter_choices(self):
if self.allow_blank:
yield ('__None', self.blank_text, self.data is None)
for pk, obj in self._get_object_list():
yield (pk, self.get_label(obj), obj == self.data)
def process_formdata(self, valuelist):
if valuelist:
if self.allow_blank and valuelist[0] == '__None':
self.data = None
else:
self._data = None
self._formdata = valuelist[0]
def pre_validate(self, form):
data = self.data
if data is not None:
for pk, obj in self._get_object_list():
if data == obj:
break
else:
raise ValidationError(self.gettext('Not a valid choice'))
elif self._formdata or not self.allow_blank:
raise ValidationError(self.gettext('Not a valid choice'))
class QuerySelectMultipleField(QuerySelectField):
"""
Very similar to QuerySelectField with the difference that this will
display a multiple select. The data property will hold a list with ORM
model instances and will be an empty list when no value is selected.
If any of the items in the data list or submitted form data cannot be
found in the query, this will result in a validation error.
"""
widget = widgets.Select(multiple=True)
def __init__(self, label=None, validators=None, default=None, **kwargs):
if default is None:
default = []
super(QuerySelectMultipleField, self).__init__(label, validators, default=default, **kwargs)
self._invalid_formdata = False
def _get_data(self):
formdata = self._formdata
if formdata is not None:
data = []
for pk, obj in self._get_object_list():
if not formdata:
break
elif pk in formdata:
formdata.remove(pk)
data.append(obj)
if formdata:
self._invalid_formdata = True
self._set_data(data)
return self._data
def _set_data(self, data):
self._data = data
self._formdata = None
data = property(_get_data, _set_data)
def iter_choices(self):
for pk, obj in self._get_object_list():
yield (pk, self.get_label(obj), obj in self.data)
def process_formdata(self, valuelist):
self._formdata = set(valuelist)
def pre_validate(self, form):
if self._invalid_formdata:
raise ValidationError(self.gettext('Not a valid choice'))
elif self.data:
obj_list = list(x[1] for x in self._get_object_list())
for v in self.data:
if v not in obj_list:
raise ValidationError(self.gettext('Not a valid choice'))
def get_pk_from_identity(obj):
cls, key = identity_key(instance=obj)
return ':'.join(text_type(x) for x in key)
|
jalonsob/Informes
|
refs/heads/master
|
build/lib.linux-x86_64-2.7/vizgrimoire/metrics/irc_metrics.py
|
4
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# This file is a part of GrimoireLib
# (an Python library for the MetricsGrimoire and vizGrimoire systems)
#
#
# Authors:
# Alvaro del Castillo <acs@bitergia.com>
# Daniel Izquierdo <dizquierdo@bitergia.com>
# Santiago Dueñas <sduenas@bitergia.com>
#
from vizgrimoire.metrics.metrics import Metrics
from vizgrimoire.metrics.metrics_filter import MetricFilters
from vizgrimoire.metrics.query_builder import IRCQuery
from vizgrimoire.IRC import IRC
from sets import Set
class Sent(Metrics):
"""Messages sent metric class for IRC channels"""
id = "sent"
name = "Sent messages"
desc = "Number of messages sent to IRC channels"
data_source = IRC
def _get_sql(self, evolutionary):
fields = Set([])
tables = Set([])
filters = Set([])
fields.add("COUNT(i.message) AS sent")
tables.add("irclog i")
tables.union_update(self.db.GetSQLReportFrom(self.filters))
filters.add("i.type = 'COMMENT'")
filters.union_update(self.db.GetSQLReportWhere(self.filters))
query = self.db.BuildQuery(self.filters.period, self.filters.startdate,
self.filters.enddate, " i.date ", fields,
tables, filters, evolutionary, self.filters.type_analysis)
return query
class Senders(Metrics):
"""Messages senders class for IRC channels"""
id = "senders"
name = "Message senders"
desc = "Number of message senders to IRC channels"
data_source = IRC
def _get_top_global (self, days = 0, metric_filters = None):
if metric_filters == None:
metric_filters = self.filters
startdate = metric_filters.startdate
enddate = metric_filters.enddate
limit = metric_filters.npeople
filter_bots = self.db.get_bots_filter_sql(self.data_source, metric_filters)
if filter_bots != "": filter_bots += " AND "
date_limit = ""
if (days != 0 ):
sql = "SELECT @maxdate:=max(date) from irclog limit 1"
res = self.db.ExecuteQuery(sql)
date_limit = " AND DATEDIFF(@maxdate, date)<"+str(days)
q = "SELECT up.uuid as id, up.identifier as senders,"+\
" COUNT(irclog.id) as sent "+\
" FROM irclog, people_uidentities pup, "+self.db.identities_db+".uidentities up "+\
" WHERE "+ filter_bots +\
" irclog.type = 'COMMENT' and "+\
" irclog.nick = pup.people_id and "+\
" pup.uuid = up.uuid and "+\
" date >= "+ startdate+ " and "+\
" date < "+ enddate+ " "+ date_limit +\
" GROUP BY senders "+\
" ORDER BY sent desc, senders "+\
" LIMIT " + str(limit)
return(self.db.ExecuteQuery(q))
def _get_sql(self, evolutionary):
fields = Set([])
tables = Set([])
filters = Set([])
fields.add("COUNT(DISTINCT(i.nick)) AS senders")
tables.add("irclog i")
tables.union_update(self.db.GetSQLReportFrom(self.filters))
filters.add("type = 'COMMENT'")
filters.union_update(self.db.GetSQLReportWhere(self.filters))
query = self.db.BuildQuery(self.filters.period, self.filters.startdate,
self.filters.enddate, " i.date ", fields,
tables, filters, evolutionary, self.filters.type_analysis)
return query
class Repositories(Metrics):
"""Repositories metric class for IRC channels"""
id = "repositories"
name = "Repositories"
desc = "Number of active repositories"
data_source = IRC
def _get_sql(self, evolutionary):
fields = Set([])
tables = Set([])
filters = Set([])
fields.add("COUNT(DISTINCT(i.channel_id)) AS repositories")
tables.add("irclog i")
tables.union_update(self.db.GetSQLReportFrom(self.filters))
filters.union_update(self.db.GetSQLReportWhere(self.filters))
query = self.db.BuildQuery(self.filters.period, self.filters.startdate,
self.filters.enddate, " i.date ", fields,
tables, filters, evolutionary, self.filters.type_analysis)
return query
def get_list (self):
tables_set = Set([])
tables_set.add("irclog i")
tables_set.add("channels chan")
filters_set = Set([])
filters_set.add("i.channel_id = chan.id")
tables_set.union_update(self.db.GetSQLReportFrom(self.filters))
filters_set.union_update(self.db.GetSQLReportWhere(self.filters))
tables = self.db._get_fields_query(tables_set)
filters = self.db._get_filters_query(filters_set)
q = "SELECT name, count(i.id) AS total "+\
" FROM " + tables +\
" WHERE " + filters +\
" GROUP BY name ORDER BY total DESC"
return(self.db.ExecuteQuery(q)['name'])
class RegisteredUsers(Metrics):
"""Total number of registered users in the service (Slack supported)"""
id = "registered_users"
name = "Registered Users"
desc = "Total number of registered users in the service (Slack supported)"
data_source = IRC
def get_agg(self):
q = "SELECT COUNT(*) as registered_users FROM people"
return(self.db.ExecuteQuery(q))
def get_ts(self):
return {}
# Examples of use
if __name__ == '__main__':
filters = MetricFilters("week", "'2010-01-01'", "'2014-01-01'", ["company", "'Red Hat'"])
dbcon = IRCQuery("root", "", "cp_irc_SingleProject", "cp_irc_SingleProject",)
redhat = Sent(dbcon, filters)
all = Sent(dbcon)
# print redhat.get_ts()
print redhat.get_agg()
print all.get_agg()
|
www220/esp-idf
|
refs/heads/master
|
docs/gen-toolchain-links.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This script generates toolchain download links and toolchain unpacking
# code snippets based on information found in $IDF_PATH/tools/toolchain_versions.mk
#
from __future__ import print_function
import sys
import os
def main():
if len(sys.argv) != 4:
print("Usage: gen-toolchain-links.py <versions file> <base download URL> <output directory>")
sys.exit(1)
out_dir = sys.argv[3]
if not os.path.exists(out_dir):
print("Creating directory %s" % out_dir)
os.mkdir(out_dir)
base_url = sys.argv[2]
versions_file = sys.argv[1]
version_vars = {}
with open(versions_file) as f:
for line in f:
name, var = line.partition("=")[::2]
version_vars[name.strip()] = var.strip()
gcc_version = version_vars["CURRENT_TOOLCHAIN_GCC_VERSION"]
toolchain_desc = version_vars["CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT"]
unpack_code_linux_macos = """
::
mkdir -p ~/esp
cd ~/esp
tar -x{}f ~/Downloads/{}
"""
scratch_build_code_linux_macos = """
::
cd ~/esp
git clone -b xtensa-1.22.x https://github.com/espressif/crosstool-NG.git
cd crosstool-NG
./bootstrap && ./configure --enable-local && make install
"""
platform_info = [ ["linux64", "tar.gz", "z", unpack_code_linux_macos],
["linux32", "tar.gz", "z", unpack_code_linux_macos],
["osx", "tar.gz", "z", unpack_code_linux_macos],
["win32", "zip", None, None]]
with open(os.path.join(out_dir, 'download-links.inc'), "w") as links_file:
for p in platform_info:
platform_name = p[0]
extension = p[1]
unpack_cmd = p[2]
unpack_code = p[3]
archive_name = 'xtensa-esp32-elf-{}-{}-{}.{}'.format(
platform_name, toolchain_desc, gcc_version, extension)
print('.. |download_link_{}| replace:: {}{}'.format(
platform_name, base_url, archive_name), file=links_file)
if unpack_code is not None:
with open(os.path.join(out_dir, 'unpack-code-%s.inc' % platform_name), "w") as f:
print(unpack_code.format(unpack_cmd, archive_name), file=f)
with open(os.path.join(out_dir, 'scratch-build-code.inc'), "w") as code_file:
print(scratch_build_code_linux_macos, file=code_file)
if __name__ == "__main__":
main()
|
zimmermegan/smarda
|
refs/heads/master
|
nltk-3.0.3/nltk/tag/brill.py
|
7
|
# -*- coding: utf-8 -*-
# Natural Language Toolkit: Transformation-based learning
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Marcus Uneson <marcus.uneson@gmail.com>
# based on previous (nltk2) version by
# Christopher Maloof, Edward Loper, Steven Bird
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, division
from collections import defaultdict
from nltk.compat import Counter
from nltk.tag import TaggerI
from nltk.tbl import Feature, Template
from nltk import jsontags
######################################################################
# Brill Templates
######################################################################
@jsontags.register_tag
class Word(Feature):
"""
Feature which examines the text (word) of nearby tokens.
"""
json_tag = 'nltk.tag.brill.Word'
@staticmethod
def extract_property(tokens, index):
"""@return: The given token's text."""
return tokens[index][0]
@jsontags.register_tag
class Pos(Feature):
"""
Feature which examines the tags of nearby tokens.
"""
json_tag = 'nltk.tag.brill.Pos'
@staticmethod
def extract_property(tokens, index):
"""@return: The given token's tag."""
return tokens[index][1]
def nltkdemo18():
"""
Return 18 templates, from the original nltk demo, in multi-feature syntax
"""
return [
Template(Pos([-1])),
Template(Pos([1])),
Template(Pos([-2])),
Template(Pos([2])),
Template(Pos([-2, -1])),
Template(Pos([1, 2])),
Template(Pos([-3, -2, -1])),
Template(Pos([1, 2, 3])),
Template(Pos([-1]), Pos([1])),
Template(Word([-1])),
Template(Word([1])),
Template(Word([-2])),
Template(Word([2])),
Template(Word([-2, -1])),
Template(Word([1, 2])),
Template(Word([-3, -2, -1])),
Template(Word([1, 2, 3])),
Template(Word([-1]), Word([1])),
]
def nltkdemo18plus():
"""
Return 18 templates, from the original nltk demo, and additionally a few
multi-feature ones (the motivation is easy comparison with nltkdemo18)
"""
return nltkdemo18() + [
Template(Word([-1]), Pos([1])),
Template(Pos([-1]), Word([1])),
Template(Word([-1]), Word([0]), Pos([1])),
Template(Pos([-1]), Word([0]), Word([1])),
Template(Pos([-1]), Word([0]), Pos([1])),
]
def fntbl37():
"""
Return 37 templates taken from the postagging task of the
fntbl distribution http://www.cs.jhu.edu/~rflorian/fntbl/
(37 is after excluding a handful which do not condition on Pos[0];
fntbl can do that but the current nltk implementation cannot.)
"""
return [
Template(Word([0]), Word([1]), Word([2])),
Template(Word([-1]), Word([0]), Word([1])),
Template(Word([0]), Word([-1])),
Template(Word([0]), Word([1])),
Template(Word([0]), Word([2])),
Template(Word([0]), Word([-2])),
Template(Word([1, 2])),
Template(Word([-2, -1])),
Template(Word([1, 2, 3])),
Template(Word([-3, -2, -1])),
Template(Word([0]), Pos([2])),
Template(Word([0]), Pos([-2])),
Template(Word([0]), Pos([1])),
Template(Word([0]), Pos([-1])),
Template(Word([0])),
Template(Word([-2])),
Template(Word([2])),
Template(Word([1])),
Template(Word([-1])),
Template(Pos([-1]), Pos([1])),
Template(Pos([1]), Pos([2])),
Template(Pos([-1]), Pos([-2])),
Template(Pos([1])),
Template(Pos([-1])),
Template(Pos([-2])),
Template(Pos([2])),
Template(Pos([1, 2, 3])),
Template(Pos([1, 2])),
Template(Pos([-3, -2, -1])),
Template(Pos([-2, -1])),
Template(Pos([1]), Word([0]), Word([1])),
Template(Pos([1]), Word([0]), Word([-1])),
Template(Pos([-1]), Word([-1]), Word([0])),
Template(Pos([-1]), Word([0]), Word([1])),
Template(Pos([-2]), Pos([-1])),
Template(Pos([1]), Pos([2])),
Template(Pos([1]), Pos([2]), Word([1]))
]
def brill24():
"""
Return 24 templates of the seminal TBL paper, Brill (1995)
"""
return [
Template(Pos([-1])),
Template(Pos([1])),
Template(Pos([-2])),
Template(Pos([2])),
Template(Pos([-2, -1])),
Template(Pos([1, 2])),
Template(Pos([-3, -2, -1])),
Template(Pos([1, 2, 3])),
Template(Pos([-1]), Pos([1])),
Template(Pos([-2]), Pos([-1])),
Template(Pos([1]), Pos([2])),
Template(Word([-1])),
Template(Word([1])),
Template(Word([-2])),
Template(Word([2])),
Template(Word([-2, -1])),
Template(Word([1, 2])),
Template(Word([-1, 0])),
Template(Word([0, 1])),
Template(Word([0])),
Template(Word([-1]), Pos([-1])),
Template(Word([1]), Pos([1])),
Template(Word([0]), Word([-1]), Pos([-1])),
Template(Word([0]), Word([1]), Pos([1])),
]
def describe_template_sets():
"""
Print the available template sets in this demo, with a short description"
"""
import inspect
import sys
# a bit of magic to get all functions in this module
templatesets = inspect.getmembers(sys.modules[__name__], inspect.isfunction)
for (name, obj) in templatesets:
if name == "describe_template_sets":
continue
print(name, obj.__doc__, "\n")
######################################################################
# The Brill Tagger
######################################################################
@jsontags.register_tag
class BrillTagger(TaggerI):
"""
Brill's transformational rule-based tagger. Brill taggers use an
initial tagger (such as ``tag.DefaultTagger``) to assign an initial
tag sequence to a text; and then apply an ordered list of
transformational rules to correct the tags of individual tokens.
These transformation rules are specified by the ``TagRule``
interface.
Brill taggers can be created directly, from an initial tagger and
a list of transformational rules; but more often, Brill taggers
are created by learning rules from a training corpus, using one
of the TaggerTrainers available.
"""
json_tag = 'nltk.tag.BrillTagger'
def __init__(self, initial_tagger, rules, training_stats=None):
"""
:param initial_tagger: The initial tagger
:type initial_tagger: TaggerI
:param rules: An ordered list of transformation rules that
should be used to correct the initial tagging.
:type rules: list(TagRule)
:param training_stats: A dictionary of statistics collected
during training, for possible later use
:type training_stats: dict
"""
self._initial_tagger = initial_tagger
self._rules = tuple(rules)
self._training_stats = training_stats
def encode_json_obj(self):
return self._initial_tagger, self._rules, self._training_stats
@classmethod
def decode_json_obj(cls, obj):
_initial_tagger, _rules, _training_stats = obj
return cls(_initial_tagger, _rules, _training_stats)
def rules(self):
"""
Return the ordered list of transformation rules that this tagger has learnt
:return: the ordered list of transformation rules that correct the initial tagging
:rtype: list of Rules
"""
return self._rules
def train_stats(self, statistic=None):
"""
Return a named statistic collected during training, or a dictionary of all
available statistics if no name given
:param statistic: name of statistic
:type statistic: str
:return: some statistic collected during training of this tagger
:rtype: any (but usually a number)
"""
if statistic is None:
return self._training_stats
else:
return self._training_stats.get(statistic)
def tag(self, tokens):
# Inherit documentation from TaggerI
# Run the initial tagger.
tagged_tokens = self._initial_tagger.tag(tokens)
# Create a dictionary that maps each tag to a list of the
# indices of tokens that have that tag.
tag_to_positions = defaultdict(set)
for i, (token, tag) in enumerate(tagged_tokens):
tag_to_positions[tag].add(i)
# Apply each rule, in order. Only try to apply rules at
# positions that have the desired original tag.
for rule in self._rules:
# Find the positions where it might apply
positions = tag_to_positions.get(rule.original_tag, [])
# Apply the rule at those positions.
changed = rule.apply(tagged_tokens, positions)
# Update tag_to_positions with the positions of tags that
# were modified.
for i in changed:
tag_to_positions[rule.original_tag].remove(i)
tag_to_positions[rule.replacement_tag].add(i)
return tagged_tokens
def print_template_statistics(self, test_stats=None, printunused=True):
"""
Print a list of all templates, ranked according to efficiency.
If test_stats is available, the templates are ranked according to their
relative contribution (summed for all rules created from a given template,
weighted by score) to the performance on the test set. If no test_stats, then
statistics collected during training are used instead. There is also
an unweighted measure (just counting the rules). This is less informative,
though, as many low-score rules will appear towards end of training.
:param test_stats: dictionary of statistics collected during testing
:type test_stats: dict of str -> any (but usually numbers)
:param printunused: if True, print a list of all unused templates
:type printunused: bool
:return: None
:rtype: None
"""
tids = [r.templateid for r in self._rules]
train_stats = self.train_stats()
trainscores = train_stats['rulescores']
assert len(trainscores) == len(tids), "corrupt statistics: " \
"{0} train scores for {1} rules".format(trainscores, tids)
template_counts = Counter(tids)
weighted_traincounts = Counter()
for (tid, score) in zip(tids, trainscores):
weighted_traincounts[tid] += score
tottrainscores = sum(trainscores)
# det_tplsort() is for deterministic sorting;
# the otherwise convenient Counter.most_common() unfortunately
# does not break ties deterministically
# between python versions and will break cross-version tests
def det_tplsort(tpl_value):
return (tpl_value[1], repr(tpl_value[0]))
def print_train_stats():
print("TEMPLATE STATISTICS (TRAIN) {0} templates, {1} rules)".format(
len(template_counts),
len(tids))
)
print("TRAIN ({tokencount:7d} tokens) initial {initialerrors:5d} {initialacc:.4f} "
"final: {finalerrors:5d} {finalacc:.4f} ".format(**train_stats))
head = "#ID | Score (train) | #Rules | Template"
print(head, "\n", "-" * len(head), sep="")
train_tplscores = sorted(weighted_traincounts.items(), key=det_tplsort, reverse=True)
for (tid, trainscore) in train_tplscores:
s = "{0} | {1:5d} {2:5.3f} |{3:4d} {4:.3f} | {5}".format(
tid,
trainscore,
trainscore/tottrainscores,
template_counts[tid],
template_counts[tid]/len(tids),
Template.ALLTEMPLATES[int(tid)],
)
print(s)
def print_testtrain_stats():
testscores = test_stats['rulescores']
print("TEMPLATE STATISTICS (TEST AND TRAIN) ({0} templates, {1} rules)".format(
len(template_counts),
len(tids)),
)
print("TEST ({tokencount:7d} tokens) initial {initialerrors:5d} {initialacc:.4f} "
"final: {finalerrors:5d} {finalacc:.4f} ".format(**test_stats))
print("TRAIN ({tokencount:7d} tokens) initial {initialerrors:5d} {initialacc:.4f} "
"final: {finalerrors:5d} {finalacc:.4f} ".format(**train_stats))
weighted_testcounts = Counter()
for (tid, score) in zip(tids, testscores):
weighted_testcounts[tid] += score
tottestscores = sum(testscores)
head = "#ID | Score (test) | Score (train) | #Rules | Template"
print(head, "\n", "-" * len(head), sep="")
test_tplscores = sorted(weighted_testcounts.items(), key=det_tplsort, reverse=True)
for (tid, testscore) in test_tplscores:
s = "{0:s} |{1:5d} {2:6.3f} | {3:4d} {4:.3f} |{5:4d} {6:.3f} | {7:s}".format(
tid,
testscore,
testscore/tottestscores,
weighted_traincounts[tid],
weighted_traincounts[tid]/tottrainscores,
template_counts[tid],
template_counts[tid]/len(tids),
Template.ALLTEMPLATES[int(tid)],
)
print(s)
def print_unused_templates():
usedtpls = set([int(tid) for tid in tids])
unused = [(tid, tpl) for (tid, tpl) in enumerate(Template.ALLTEMPLATES) if tid not in usedtpls]
print("UNUSED TEMPLATES ({0})".format(len(unused)))
for (tid, tpl) in unused:
print("{0:03d} {1:s}".format(tid, tpl))
if test_stats is None:
print_train_stats()
else:
print_testtrain_stats()
print()
if printunused:
print_unused_templates()
print()
def batch_tag_incremental(self, sequences, gold):
"""
Tags by applying each rule to the entire corpus (rather than all rules to a
single sequence). The point is to collect statistics on the test set for
individual rules.
NOTE: This is inefficient (does not build any index, so will traverse the entire
corpus N times for N rules) -- usually you would not care about statistics for
individual rules and thus use batch_tag() instead
:param sequences: lists of token sequences (sentences, in some applications) to be tagged
:type sequences: list of list of strings
:param gold: the gold standard
:type gold: list of list of strings
:returns: tuple of (tagged_sequences, ordered list of rule scores (one for each rule))
"""
def counterrors(xs):
return sum(t[1] != g[1] for pair in zip(xs, gold) for (t, g) in zip(*pair))
testing_stats = {}
testing_stats['tokencount'] = sum(len(t) for t in sequences)
testing_stats['sequencecount'] = len(sequences)
tagged_tokenses = [self._initial_tagger.tag(tokens) for tokens in sequences]
testing_stats['initialerrors'] = counterrors(tagged_tokenses)
testing_stats['initialacc'] = 1 - testing_stats['initialerrors']/testing_stats['tokencount']
# Apply each rule to the entire corpus, in order
errors = [testing_stats['initialerrors']]
for rule in self._rules:
for tagged_tokens in tagged_tokenses:
rule.apply(tagged_tokens)
errors.append(counterrors(tagged_tokenses))
testing_stats['rulescores'] = [err0 - err1 for (err0, err1) in zip(errors, errors[1:])]
testing_stats['finalerrors'] = errors[-1]
testing_stats['finalacc'] = 1 - testing_stats['finalerrors']/testing_stats['tokencount']
return (tagged_tokenses, testing_stats)
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
|
chen0510566/MissionPlanner
|
refs/heads/master
|
Lib/site-packages/scipy/fftpack/_fftpack.py
|
53
|
import sys
if sys.platform == 'cli':
import clr
clr.AddReference("_fftpack")
from scipy__fftpack___fftpack import *
|
2013Commons/hue
|
refs/heads/master
|
desktop/core/src/desktop/lib/paths.py
|
31
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Path-related utilities
#
# These are useful for testing, but shouldn't be heavily depended on,
# since paths have a tendency to change during packaging.
import os
"""
The project directory structure:
root/ <-- Project root (build root, run root)
apps/ <-- Apps root
beeswax/
desktop/ <-- Desktop root
core/
src/desktop/lib/paths.py <-- You're reading this file
...
ext/
thirdparty/...
"""
def __get_root(*append):
"""
Returns the root directory of the project.
"""
if append is None:
append = [ ]
path = os.path.join(
os.path.dirname(__file__),
"..", "..", "..", "..", "..",
*append)
return os.path.abspath(path)
def get_build_dir(*append):
"""
Returns 'build' directory for Desktop.
This is used for temporary (and testing) artifacts.
This is not the root source path.
"""
return __get_root('build', *append)
def get_desktop_root(*append):
"""
Returns the directory for Desktop.
"""
return __get_root("desktop", *append)
def get_apps_root(*append):
"""
Returns the directory for apps.
"""
return __get_root("apps", *append)
def get_thirdparty_root(*append):
"""
Returns the ext/thirdparty directory.
"""
return __get_root("ext", "thirdparty", *append)
def get_run_root(*append):
"""
Returns the run time root directory
"""
return __get_root(*append)
|
Bachaco-ve/odoo
|
refs/heads/8.0
|
addons/point_of_sale/report/__init__.py
|
381
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pos_users_product
import account_statement
import pos_receipt
import pos_invoice
import pos_lines
import pos_details
import pos_payment_report
import pos_report
import pos_order_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
saurabh6790/omnit-app
|
refs/heads/master
|
setup/doctype/backup_manager/backup_googledrive.py
|
30
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
# SETUP:
# install pip install --upgrade google-api-python-client
#
# In Google API
# - create new API project
# - create new oauth2 client (create installed app type as google \
# does not support subdomains)
#
# in conf.py, set oauth2 settings
# gdrive_client_id
# gdrive_client_secret
from __future__ import unicode_literals
import httplib2
import os
import mimetypes
import webnotes
import oauth2client.client
from webnotes.utils import get_base_path, cstr
from webnotes import _, msgprint
from apiclient.discovery import build
from apiclient.http import MediaFileUpload
# define log config for google drive api's log messages
# basicConfig redirects log to stderr
import logging
logging.basicConfig()
@webnotes.whitelist()
def get_gdrive_authorize_url():
flow = get_gdrive_flow()
authorize_url = flow.step1_get_authorize_url()
return {
"authorize_url": authorize_url,
}
def upload_files(name, mimetype, service, folder_id):
if not webnotes.conn:
webnotes.connect()
file_name = os.path.basename(name)
media_body = MediaFileUpload(name, mimetype=mimetype, resumable=True)
body = {
'title': file_name,
'description': 'Backup File',
'mimetype': mimetype,
'parents': [{
'kind': 'drive#filelink',
'id': folder_id
}]
}
request = service.files().insert(body=body, media_body=media_body)
response = None
while response is None:
status, response = request.next_chunk()
def backup_to_gdrive():
from webnotes.utils.backups import new_backup
if not webnotes.conn:
webnotes.connect()
get_gdrive_flow()
credentials_json = webnotes.conn.get_value("Backup Manager", None, "gdrive_credentials")
credentials = oauth2client.client.Credentials.new_from_json(credentials_json)
http = httplib2.Http()
http = credentials.authorize(http)
drive_service = build('drive', 'v2', http=http)
# upload database
backup = new_backup()
path = os.path.join(get_base_path(), "public", "backups")
filename = os.path.join(path, os.path.basename(backup.backup_path_db))
# upload files to database folder
upload_files(filename, 'application/x-gzip', drive_service,
webnotes.conn.get_value("Backup Manager", None, "database_folder_id"))
# upload files to files folder
did_not_upload = []
error_log = []
files_folder_id = webnotes.conn.get_value("Backup Manager", None, "files_folder_id")
webnotes.conn.close()
path = os.path.join(get_base_path(), "public", "files")
for filename in os.listdir(path):
filename = cstr(filename)
found = False
filepath = os.path.join(path, filename)
ext = filename.split('.')[-1]
size = os.path.getsize(filepath)
if ext == 'gz' or ext == 'gzip':
mimetype = 'application/x-gzip'
else:
mimetype = mimetypes.types_map.get("." + ext) or "application/octet-stream"
#Compare Local File with Server File
children = drive_service.children().list(folderId=files_folder_id).execute()
for child in children.get('items', []):
file = drive_service.files().get(fileId=child['id']).execute()
if filename == file['title'] and size == int(file['fileSize']):
found = True
break
if not found:
try:
upload_files(filepath, mimetype, drive_service, files_folder_id)
except Exception, e:
did_not_upload.append(filename)
error_log.append(cstr(e))
webnotes.connect()
return did_not_upload, list(set(error_log))
def get_gdrive_flow():
from oauth2client.client import OAuth2WebServerFlow
from webnotes import conf
if not "gdrive_client_id" in conf:
webnotes.msgprint(_("Please set Google Drive access keys in") + " conf.py",
raise_exception=True)
flow = OAuth2WebServerFlow(conf.gdrive_client_id, conf.gdrive_client_secret,
"https://www.googleapis.com/auth/drive", 'urn:ietf:wg:oauth:2.0:oob')
return flow
@webnotes.whitelist()
def gdrive_callback(verification_code = None):
flow = get_gdrive_flow()
if verification_code:
credentials = flow.step2_exchange(verification_code)
allowed = 1
# make folders to save id
http = httplib2.Http()
http = credentials.authorize(http)
drive_service = build('drive', 'v2', http=http)
erpnext_folder_id = create_erpnext_folder(drive_service)
database_folder_id = create_folder('database', drive_service, erpnext_folder_id)
files_folder_id = create_folder('files', drive_service, erpnext_folder_id)
webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_access_allowed", allowed)
webnotes.conn.set_value("Backup Manager", "Backup Manager", "database_folder_id", database_folder_id)
webnotes.conn.set_value("Backup Manager", "Backup Manager", "files_folder_id", files_folder_id)
final_credentials = credentials.to_json()
webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_credentials", final_credentials)
webnotes.msgprint("Updated")
def create_erpnext_folder(service):
if not webnotes.conn:
webnotes.connect()
erpnext = {
'title': 'erpnext',
'mimeType': 'application/vnd.google-apps.folder'
}
erpnext = service.files().insert(body=erpnext).execute()
return erpnext['id']
def create_folder(name, service, folder_id):
database = {
'title': name,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [{
'kind': 'drive#fileLink',
'id': folder_id
}]
}
database = service.files().insert(body=database).execute()
return database['id']
if __name__=="__main__":
backup_to_gdrive()
|
sserrot/champion_relationships
|
refs/heads/master
|
venv/Lib/site-packages/setuptools/dep_util.py
|
20
|
from distutils.dep_util import newer_group
# yes, this is was almost entirely copy-pasted from
# 'newer_pairwise()', this is just another convenience
# function.
def newer_pairwise_group(sources_groups, targets):
"""Walk both arguments in parallel, testing if each source group is newer
than its corresponding target. Returns a pair of lists (sources_groups,
targets) where sources is newer than target, according to the semantics
of 'newer_group()'.
"""
if len(sources_groups) != len(targets):
raise ValueError(
"'sources_group' and 'targets' must be the same length")
# build a pair of lists (sources_groups, targets) where source is newer
n_sources = []
n_targets = []
for i in range(len(sources_groups)):
if newer_group(sources_groups[i], targets[i]):
n_sources.append(sources_groups[i])
n_targets.append(targets[i])
return n_sources, n_targets
|
devurandom/portage
|
refs/heads/master
|
pym/portage/util/_eventloop/EventLoop.py
|
1
|
# Copyright 1999-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import errno
import fcntl
import logging
import os
import select
import signal
import time
from portage.util import writemsg_level
from ..SlotObject import SlotObject
from .PollConstants import PollConstants
from .PollSelectAdapter import PollSelectAdapter
class EventLoop(object):
supports_multiprocessing = True
# TODO: Find out why SIGCHLD signals aren't delivered during poll
# calls, forcing us to wakeup in order to receive them.
_sigchld_interval = 250
class _child_callback_class(SlotObject):
__slots__ = ("callback", "data", "pid", "source_id")
class _idle_callback_class(SlotObject):
__slots__ = ("args", "callback", "calling", "source_id")
class _io_handler_class(SlotObject):
__slots__ = ("args", "callback", "f", "source_id")
class _timeout_handler_class(SlotObject):
__slots__ = ("args", "function", "calling", "interval", "source_id",
"timestamp")
def __init__(self, main=True):
"""
@param main: If True then this is a singleton instance for use
in the main thread, otherwise it is a local instance which
can safely be use in a non-main thread (default is True, so
that global_event_loop does not need constructor arguments)
@type main: bool
"""
self._use_signal = main
self._poll_event_queue = []
self._poll_event_handlers = {}
self._poll_event_handler_ids = {}
# Increment id for each new handler.
self._event_handler_id = 0
self._idle_callbacks = {}
self._timeout_handlers = {}
self._timeout_interval = None
try:
select.epoll
except AttributeError:
self._poll_obj = create_poll_instance()
self.IO_ERR = PollConstants.POLLERR
self.IO_HUP = PollConstants.POLLHUP
self.IO_IN = PollConstants.POLLIN
self.IO_NVAL = PollConstants.POLLNVAL
self.IO_OUT = PollConstants.POLLOUT
self.IO_PRI = PollConstants.POLLPRI
else:
self._poll_obj = _epoll_adapter(select.epoll())
self.IO_ERR = select.EPOLLERR
self.IO_HUP = select.EPOLLHUP
self.IO_IN = select.EPOLLIN
self.IO_NVAL = 0
self.IO_OUT = select.EPOLLOUT
self.IO_PRI = select.EPOLLPRI
self._child_handlers = {}
self._sigchld_read = None
self._sigchld_write = None
self._sigchld_src_id = None
self._pid = os.getpid()
def _poll(self, timeout=None):
"""
All poll() calls pass through here. The poll events
are added directly to self._poll_event_queue.
In order to avoid endless blocking, this raises
StopIteration if timeout is None and there are
no file descriptors to poll.
"""
if timeout is None and \
not self._poll_event_handlers:
raise StopIteration(
"timeout is None and there are no poll() event handlers")
while True:
try:
self._poll_event_queue.extend(self._poll_obj.poll(timeout))
break
except (IOError, select.error) as e:
# Silently handle EINTR, which is normal when we have
# received a signal such as SIGINT (epoll objects may
# raise IOError rather than select.error, at least in
# Python 3.2).
if not (e.args and e.args[0] == errno.EINTR):
writemsg_level("\n!!! select error: %s\n" % (e,),
level=logging.ERROR, noiselevel=-1)
del e
# This typically means that we've received a SIGINT, so
# raise StopIteration in order to break out of our current
# iteration and respond appropriately to the signal as soon
# as possible.
raise StopIteration("interrupted")
def iteration(self, *args):
"""
Like glib.MainContext.iteration(), runs a single iteration.
@type may_block: bool
@param may_block: if True the call may block waiting for an event
(default is True).
@rtype: bool
@return: True if events were dispatched.
"""
may_block = True
if args:
if len(args) > 1:
raise TypeError(
"expected at most 1 argument (%s given)" % len(args))
may_block = args[0]
event_queue = self._poll_event_queue
event_handlers = self._poll_event_handlers
events_handled = 0
if not event_handlers:
if self._run_timeouts():
events_handled += 1
if not event_handlers:
if not events_handled and may_block and \
self._timeout_interval is not None:
# Block so that we don't waste cpu time by looping too
# quickly. This makes EventLoop useful for code that needs
# to wait for timeout callbacks regardless of whether or
# not any IO handlers are currently registered.
try:
self._poll(timeout=self._timeout_interval)
except StopIteration:
pass
if self._run_timeouts():
events_handled += 1
# If any timeouts have executed, then return immediately,
# in order to minimize latency in termination of iteration
# loops that they may control.
if events_handled or not event_handlers:
return bool(events_handled)
if not event_queue:
if may_block:
if self._child_handlers:
if self._timeout_interval is None:
timeout = self._sigchld_interval
else:
timeout = min(self._sigchld_interval,
self._timeout_interval)
else:
timeout = self._timeout_interval
else:
timeout = 0
try:
self._poll(timeout=timeout)
except StopIteration:
# This can be triggered by EINTR which is caused by signals.
pass
# NOTE: IO event handlers may be re-entrant, in case something
# like AbstractPollTask._wait_loop() needs to be called inside
# a handler for some reason.
while event_queue:
events_handled += 1
f, event = event_queue.pop()
x = event_handlers[f]
if not x.callback(f, event, *x.args):
self.source_remove(x.source_id)
# Run timeouts last, in order to minimize latency in
# termination of iteration loops that they may control.
if self._run_timeouts():
events_handled += 1
return bool(events_handled)
def child_watch_add(self, pid, callback, data=None):
"""
Like glib.child_watch_add(), sets callback to be called with the
user data specified by data when the child indicated by pid exits.
The signature for the callback is:
def callback(pid, condition, user_data)
where pid is is the child process id, condition is the status
information about the child process and user_data is data.
@type int
@param pid: process id of a child process to watch
@type callback: callable
@param callback: a function to call
@type data: object
@param data: the optional data to pass to function
@rtype: int
@return: an integer ID
"""
self._event_handler_id += 1
source_id = self._event_handler_id
self._child_handlers[source_id] = self._child_callback_class(
callback=callback, data=data, pid=pid, source_id=source_id)
if self._use_signal:
if self._sigchld_read is None:
self._sigchld_read, self._sigchld_write = os.pipe()
fcntl.fcntl(self._sigchld_read, fcntl.F_SETFL,
fcntl.fcntl(self._sigchld_read,
fcntl.F_GETFL) | os.O_NONBLOCK)
# The IO watch is dynamically registered and unregistered as
# needed, since we don't want to consider it as a valid source
# of events when there are no child listeners. It's important
# to distinguish when there are no valid sources of IO events,
# in order to avoid an endless poll call if there's no timeout.
if self._sigchld_src_id is None:
self._sigchld_src_id = self.io_add_watch(
self._sigchld_read, self.IO_IN, self._sigchld_io_cb)
signal.signal(signal.SIGCHLD, self._sigchld_sig_cb)
# poll now, in case the SIGCHLD has already arrived
self._poll_child_processes()
return source_id
def _sigchld_sig_cb(self, signum, frame):
# If this signal handler was not installed by the
# current process then the signal doesn't belong to
# this EventLoop instance.
if os.getpid() == self._pid:
os.write(self._sigchld_write, b'\0')
def _sigchld_io_cb(self, fd, events):
try:
while True:
os.read(self._sigchld_read, 4096)
except OSError:
# read until EAGAIN
pass
self._poll_child_processes()
return True
def _poll_child_processes(self):
if not self._child_handlers:
return False
calls = 0
for x in list(self._child_handlers.values()):
if x.source_id not in self._child_handlers:
# it's already been called via re-entrance
continue
try:
wait_retval = os.waitpid(x.pid, os.WNOHANG)
except OSError as e:
if e.errno != errno.ECHILD:
raise
del e
self.source_remove(x.source_id)
else:
# With waitpid and WNOHANG, only check the
# first element of the tuple since the second
# element may vary (bug #337465).
if wait_retval[0] != 0:
calls += 1
self.source_remove(x.source_id)
x.callback(x.pid, wait_retval[1], x.data)
return bool(calls)
def idle_add(self, callback, *args):
"""
Like glib.idle_add(), if callback returns False it is
automatically removed from the list of event sources and will
not be called again.
@type callback: callable
@param callback: a function to call
@rtype: int
@return: an integer ID
"""
self._event_handler_id += 1
source_id = self._event_handler_id
self._idle_callbacks[source_id] = self._idle_callback_class(
args=args, callback=callback, source_id=source_id)
return source_id
def _run_idle_callbacks(self):
if not self._idle_callbacks:
return
# Iterate of our local list, since self._idle_callbacks can be
# modified during the exection of these callbacks.
for x in list(self._idle_callbacks.values()):
if x.source_id not in self._idle_callbacks:
# it got cancelled while executing another callback
continue
if x.calling:
# don't call it recursively
continue
x.calling = True
try:
if not x.callback(*x.args):
self.source_remove(x.source_id)
finally:
x.calling = False
def timeout_add(self, interval, function, *args):
"""
Like glib.timeout_add(), interval argument is the number of
milliseconds between calls to your function, and your function
should return False to stop being called, or True to continue
being called. Any additional positional arguments given here
are passed to your function when it's called.
"""
self._event_handler_id += 1
source_id = self._event_handler_id
self._timeout_handlers[source_id] = \
self._timeout_handler_class(
interval=interval, function=function, args=args,
source_id=source_id, timestamp=time.time())
if self._timeout_interval is None or self._timeout_interval > interval:
self._timeout_interval = interval
return source_id
def _run_timeouts(self):
calls = 0
if not self._use_signal:
if self._poll_child_processes():
calls += 1
self._run_idle_callbacks()
if not self._timeout_handlers:
return bool(calls)
ready_timeouts = []
current_time = time.time()
for x in self._timeout_handlers.values():
elapsed_seconds = current_time - x.timestamp
# elapsed_seconds < 0 means the system clock has been adjusted
if elapsed_seconds < 0 or \
(x.interval - 1000 * elapsed_seconds) <= 0:
ready_timeouts.append(x)
# Iterate of our local list, since self._timeout_handlers can be
# modified during the exection of these callbacks.
for x in ready_timeouts:
if x.source_id not in self._timeout_handlers:
# it got cancelled while executing another timeout
continue
if x.calling:
# don't call it recursively
continue
calls += 1
x.calling = True
try:
x.timestamp = time.time()
if not x.function(*x.args):
self.source_remove(x.source_id)
finally:
x.calling = False
return bool(calls)
def io_add_watch(self, f, condition, callback, *args):
"""
Like glib.io_add_watch(), your function should return False to
stop being called, or True to continue being called. Any
additional positional arguments given here are passed to your
function when it's called.
@type f: int or object with fileno() method
@param f: a file descriptor to monitor
@type condition: int
@param condition: a condition mask
@type callback: callable
@param callback: a function to call
@rtype: int
@return: an integer ID of the event source
"""
if f in self._poll_event_handlers:
raise AssertionError("fd %d is already registered" % f)
self._event_handler_id += 1
source_id = self._event_handler_id
self._poll_event_handler_ids[source_id] = f
self._poll_event_handlers[f] = self._io_handler_class(
args=args, callback=callback, f=f, source_id=source_id)
self._poll_obj.register(f, condition)
return source_id
def source_remove(self, reg_id):
"""
Like glib.source_remove(), this returns True if the given reg_id
is found and removed, and False if the reg_id is invalid or has
already been removed.
"""
x = self._child_handlers.pop(reg_id, None)
if x is not None:
if not self._child_handlers and self._use_signal:
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
self.source_remove(self._sigchld_src_id)
self._sigchld_src_id = None
return True
idle_callback = self._idle_callbacks.pop(reg_id, None)
if idle_callback is not None:
return True
timeout_handler = self._timeout_handlers.pop(reg_id, None)
if timeout_handler is not None:
if timeout_handler.interval == self._timeout_interval:
if self._timeout_handlers:
self._timeout_interval = \
min(x.interval for x in self._timeout_handlers.values())
else:
self._timeout_interval = None
return True
f = self._poll_event_handler_ids.pop(reg_id, None)
if f is None:
return False
self._poll_obj.unregister(f)
if self._poll_event_queue:
# Discard any unhandled events that belong to this file,
# in order to prevent these events from being erroneously
# delivered to a future handler that is using a reallocated
# file descriptor of the same numeric value (causing
# extremely confusing bugs).
remaining_events = []
discarded_events = False
for event in self._poll_event_queue:
if event[0] == f:
discarded_events = True
else:
remaining_events.append(event)
if discarded_events:
self._poll_event_queue[:] = remaining_events
del self._poll_event_handlers[f]
return True
_can_poll_device = None
def can_poll_device():
"""
Test if it's possible to use poll() on a device such as a pty. This
is known to fail on Darwin.
@rtype: bool
@return: True if poll() on a device succeeds, False otherwise.
"""
global _can_poll_device
if _can_poll_device is not None:
return _can_poll_device
if not hasattr(select, "poll"):
_can_poll_device = False
return _can_poll_device
try:
dev_null = open('/dev/null', 'rb')
except IOError:
_can_poll_device = False
return _can_poll_device
p = select.poll()
p.register(dev_null.fileno(), PollConstants.POLLIN)
invalid_request = False
for f, event in p.poll():
if event & PollConstants.POLLNVAL:
invalid_request = True
break
dev_null.close()
_can_poll_device = not invalid_request
return _can_poll_device
def create_poll_instance():
"""
Create an instance of select.poll, or an instance of
PollSelectAdapter there is no poll() implementation or
it is broken somehow.
"""
if can_poll_device():
return select.poll()
return PollSelectAdapter()
class _epoll_adapter(object):
"""
Wraps a select.epoll instance in order to make it compatible
with select.poll instances. This is necessary since epoll instances
interpret timeout arguments differently. Note that the file descriptor
that is associated with an epoll instance will close automatically when
it is garbage collected, so it's not necessary to close it explicitly.
"""
__slots__ = ('_epoll_obj',)
def __init__(self, epoll_obj):
self._epoll_obj = epoll_obj
def register(self, fd, *args):
self._epoll_obj.register(fd, *args)
def unregister(self, fd):
self._epoll_obj.unregister(fd)
def poll(self, *args):
if len(args) > 1:
raise TypeError(
"poll expected at most 2 arguments, got " + \
repr(1 + len(args)))
timeout = -1
if args:
timeout = args[0]
if timeout is None or timeout < 0:
timeout = -1
elif timeout != 0:
timeout = timeout / 1000
return self._epoll_obj.poll(timeout)
|
emmuchira/kps_erp
|
refs/heads/develop
|
erpnext/patches/v7_2/stock_uom_in_selling.py
|
34
|
import frappe
def execute():
frappe.reload_doctype('Sales Order')
frappe.reload_doctype('Sales Invoice')
frappe.reload_doctype('Quotation')
frappe.reload_doctype('Delivery Note')
doctype_list = ['Sales Order Item', 'Delivery Note Item', 'Quotation Item', 'Sales Invoice Item']
for doctype in doctype_list:
frappe.reload_doctype(doctype)
frappe.db.sql("""update `tab{doctype}`
set uom = stock_uom, conversion_factor = 1, stock_qty = qty""".format(doctype=doctype))
|
dkentw/robotframework
|
refs/heads/master
|
src/robot/model/keyword.py
|
12
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from operator import attrgetter
from robot.utils import setter, unic
from .itemlist import ItemList
from .message import Message, Messages
from .modelobject import ModelObject
from .tags import Tags
class Keyword(ModelObject):
"""Base model for single keyword."""
__slots__ = ['parent', '_name', 'doc', 'args', 'assign', 'tags', 'timeout',
'type', '_sort_key', '_next_child_sort_key']
KEYWORD_TYPE = 'kw'
SETUP_TYPE = 'setup'
TEARDOWN_TYPE = 'teardown'
FOR_LOOP_TYPE = 'for'
FOR_ITEM_TYPE = 'foritem'
keyword_class = None
message_class = Message
def __init__(self, name='', doc='', args=(), assign=(), tags=(),
timeout=None, type='kw'):
#: :class:`~.model.testsuite.TestSuite` or
#: :class:`~.model.testcase.TestCase` or
#: :class:`~.model.keyword.Keyword` that contains this keyword.
self.parent = None
self._name = name
#: Keyword documentation.
self.doc = doc
#: Keyword arguments as a list of strings.
self.args = args
#: Assigned variables as a list of strings.
self.assign = assign
#: Keyword tags as a list like :class:`~.model.tags.Tags` object.
self.tags = tags
#: Keyword timeout.
self.timeout = timeout
#: Keyword type as a string. See class level ``XXX_TYPE`` constants.
self.type = type
#: Keyword messages as :class:`~.model.message.Message` instances.
self.messages = None
#: Child keywords as :class:`~.model.keyword.Keyword` instances.
self.keywords = None
self._sort_key = -1
self._next_child_sort_key = 0
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@setter
def parent(self, parent):
if parent and parent is not self.parent:
self._sort_key = getattr(parent, '_child_sort_key', -1)
return parent
@property
def _child_sort_key(self):
self._next_child_sort_key += 1
return self._next_child_sort_key
@setter
def tags(self, tags):
return Tags(tags)
@setter
def keywords(self, keywords):
return Keywords(self.keyword_class or self.__class__, self, keywords)
@setter
def messages(self, messages):
return Messages(self.message_class, self, messages)
@property
def children(self):
"""Child keywords and messages in creation order."""
# It would be cleaner to store keywords/messages in same `children`
# list and turn `keywords` and `messages` to properties that pick items
# from it. That would require bigger changes to the model, though.
return sorted(chain(self.keywords, self.messages),
key=attrgetter('_sort_key'))
@property
def id(self):
if not self.parent:
return 'k1'
return '%s-k%d' % (self.parent.id, self.parent.keywords.index(self)+1)
def visit(self, visitor):
visitor.visit_keyword(self)
class Keywords(ItemList):
__slots__ = []
def __init__(self, keyword_class=Keyword, parent=None, keywords=None):
ItemList.__init__(self, keyword_class, {'parent': parent}, keywords)
@property
def setup(self):
return self[0] if (self and self[0].type == 'setup') else None
@property
def teardown(self):
return self[-1] if (self and self[-1].type == 'teardown') else None
@property
def all(self):
return self
@property
def normal(self):
kws = [kw for kw in self if kw.type not in ('setup', 'teardown')]
return Keywords(self._item_class, self._common_attrs['parent'], kws)
def __setitem__(self, index, item):
old = self[index]
ItemList.__setitem__(self, index, item)
self[index]._sort_key = old._sort_key
|
blesscoin/blesscoin
|
refs/heads/master
|
contrib/wallettools/walletunlock.py
|
782
|
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:9332")
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
|
mesosphere/storm
|
refs/heads/master
|
storm-core/src/py/storm/ttypes.py
|
28
|
#
# Autogenerated by Thrift Compiler (0.7.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class TopologyInitialStatus:
ACTIVE = 1
INACTIVE = 2
_VALUES_TO_NAMES = {
1: "ACTIVE",
2: "INACTIVE",
}
_NAMES_TO_VALUES = {
"ACTIVE": 1,
"INACTIVE": 2,
}
class JavaObjectArg:
"""
Attributes:
- int_arg
- long_arg
- string_arg
- bool_arg
- binary_arg
- double_arg
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'int_arg', None, None, ), # 1
(2, TType.I64, 'long_arg', None, None, ), # 2
(3, TType.STRING, 'string_arg', None, None, ), # 3
(4, TType.BOOL, 'bool_arg', None, None, ), # 4
(5, TType.STRING, 'binary_arg', None, None, ), # 5
(6, TType.DOUBLE, 'double_arg', None, None, ), # 6
)
def __hash__(self):
return 0 + hash(self.int_arg) + hash(self.long_arg) + hash(self.string_arg) + hash(self.bool_arg) + hash(self.binary_arg) + hash(self.double_arg)
def __init__(self, int_arg=None, long_arg=None, string_arg=None, bool_arg=None, binary_arg=None, double_arg=None,):
self.int_arg = int_arg
self.long_arg = long_arg
self.string_arg = string_arg
self.bool_arg = bool_arg
self.binary_arg = binary_arg
self.double_arg = double_arg
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.int_arg = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.long_arg = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.string_arg = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.bool_arg = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.binary_arg = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.double_arg = iprot.readDouble();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JavaObjectArg')
if self.int_arg is not None:
oprot.writeFieldBegin('int_arg', TType.I32, 1)
oprot.writeI32(self.int_arg)
oprot.writeFieldEnd()
if self.long_arg is not None:
oprot.writeFieldBegin('long_arg', TType.I64, 2)
oprot.writeI64(self.long_arg)
oprot.writeFieldEnd()
if self.string_arg is not None:
oprot.writeFieldBegin('string_arg', TType.STRING, 3)
oprot.writeString(self.string_arg.encode('utf-8'))
oprot.writeFieldEnd()
if self.bool_arg is not None:
oprot.writeFieldBegin('bool_arg', TType.BOOL, 4)
oprot.writeBool(self.bool_arg)
oprot.writeFieldEnd()
if self.binary_arg is not None:
oprot.writeFieldBegin('binary_arg', TType.STRING, 5)
oprot.writeString(self.binary_arg)
oprot.writeFieldEnd()
if self.double_arg is not None:
oprot.writeFieldBegin('double_arg', TType.DOUBLE, 6)
oprot.writeDouble(self.double_arg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JavaObject:
"""
Attributes:
- full_class_name
- args_list
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'full_class_name', None, None, ), # 1
(2, TType.LIST, 'args_list', (TType.STRUCT,(JavaObjectArg, JavaObjectArg.thrift_spec)), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.full_class_name) + hash(self.args_list)
def __init__(self, full_class_name=None, args_list=None,):
self.full_class_name = full_class_name
self.args_list = args_list
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.full_class_name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.args_list = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = JavaObjectArg()
_elem5.read(iprot)
self.args_list.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JavaObject')
if self.full_class_name is not None:
oprot.writeFieldBegin('full_class_name', TType.STRING, 1)
oprot.writeString(self.full_class_name.encode('utf-8'))
oprot.writeFieldEnd()
if self.args_list is not None:
oprot.writeFieldBegin('args_list', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.args_list))
for iter6 in self.args_list:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.full_class_name is None:
raise TProtocol.TProtocolException(message='Required field full_class_name is unset!')
if self.args_list is None:
raise TProtocol.TProtocolException(message='Required field args_list is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NullStruct:
thrift_spec = (
)
def __hash__(self):
return 0
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NullStruct')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GlobalStreamId:
"""
Attributes:
- componentId
- streamId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'componentId', None, None, ), # 1
(2, TType.STRING, 'streamId', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.componentId) + hash(self.streamId)
def __init__(self, componentId=None, streamId=None,):
self.componentId = componentId
self.streamId = streamId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.componentId = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.streamId = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GlobalStreamId')
if self.componentId is not None:
oprot.writeFieldBegin('componentId', TType.STRING, 1)
oprot.writeString(self.componentId.encode('utf-8'))
oprot.writeFieldEnd()
if self.streamId is not None:
oprot.writeFieldBegin('streamId', TType.STRING, 2)
oprot.writeString(self.streamId.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.componentId is None:
raise TProtocol.TProtocolException(message='Required field componentId is unset!')
if self.streamId is None:
raise TProtocol.TProtocolException(message='Required field streamId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Grouping:
"""
Attributes:
- fields
- shuffle
- all
- none
- direct
- custom_object
- custom_serialized
- local_or_shuffle
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'fields', (TType.STRING,None), None, ), # 1
(2, TType.STRUCT, 'shuffle', (NullStruct, NullStruct.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'all', (NullStruct, NullStruct.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'none', (NullStruct, NullStruct.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'direct', (NullStruct, NullStruct.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'custom_object', (JavaObject, JavaObject.thrift_spec), None, ), # 6
(7, TType.STRING, 'custom_serialized', None, None, ), # 7
(8, TType.STRUCT, 'local_or_shuffle', (NullStruct, NullStruct.thrift_spec), None, ), # 8
)
def __hash__(self):
return 0 + hash(self.fields) + hash(self.shuffle) + hash(self.all) + hash(self.none) + hash(self.direct) + hash(self.custom_object) + hash(self.custom_serialized) + hash(self.local_or_shuffle)
def __init__(self, fields=None, shuffle=None, all=None, none=None, direct=None, custom_object=None, custom_serialized=None, local_or_shuffle=None,):
self.fields = fields
self.shuffle = shuffle
self.all = all
self.none = none
self.direct = direct
self.custom_object = custom_object
self.custom_serialized = custom_serialized
self.local_or_shuffle = local_or_shuffle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.fields = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in xrange(_size7):
_elem12 = iprot.readString().decode('utf-8')
self.fields.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.shuffle = NullStruct()
self.shuffle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.all = NullStruct()
self.all.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.none = NullStruct()
self.none.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.direct = NullStruct()
self.direct.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.custom_object = JavaObject()
self.custom_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.custom_serialized = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRUCT:
self.local_or_shuffle = NullStruct()
self.local_or_shuffle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Grouping')
if self.fields is not None:
oprot.writeFieldBegin('fields', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.fields))
for iter13 in self.fields:
oprot.writeString(iter13.encode('utf-8'))
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.shuffle is not None:
oprot.writeFieldBegin('shuffle', TType.STRUCT, 2)
self.shuffle.write(oprot)
oprot.writeFieldEnd()
if self.all is not None:
oprot.writeFieldBegin('all', TType.STRUCT, 3)
self.all.write(oprot)
oprot.writeFieldEnd()
if self.none is not None:
oprot.writeFieldBegin('none', TType.STRUCT, 4)
self.none.write(oprot)
oprot.writeFieldEnd()
if self.direct is not None:
oprot.writeFieldBegin('direct', TType.STRUCT, 5)
self.direct.write(oprot)
oprot.writeFieldEnd()
if self.custom_object is not None:
oprot.writeFieldBegin('custom_object', TType.STRUCT, 6)
self.custom_object.write(oprot)
oprot.writeFieldEnd()
if self.custom_serialized is not None:
oprot.writeFieldBegin('custom_serialized', TType.STRING, 7)
oprot.writeString(self.custom_serialized)
oprot.writeFieldEnd()
if self.local_or_shuffle is not None:
oprot.writeFieldBegin('local_or_shuffle', TType.STRUCT, 8)
self.local_or_shuffle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StreamInfo:
"""
Attributes:
- output_fields
- direct
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'output_fields', (TType.STRING,None), None, ), # 1
(2, TType.BOOL, 'direct', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.output_fields) + hash(self.direct)
def __init__(self, output_fields=None, direct=None,):
self.output_fields = output_fields
self.direct = direct
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.output_fields = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in xrange(_size14):
_elem19 = iprot.readString().decode('utf-8')
self.output_fields.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.direct = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('StreamInfo')
if self.output_fields is not None:
oprot.writeFieldBegin('output_fields', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.output_fields))
for iter20 in self.output_fields:
oprot.writeString(iter20.encode('utf-8'))
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.direct is not None:
oprot.writeFieldBegin('direct', TType.BOOL, 2)
oprot.writeBool(self.direct)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.output_fields is None:
raise TProtocol.TProtocolException(message='Required field output_fields is unset!')
if self.direct is None:
raise TProtocol.TProtocolException(message='Required field direct is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ShellComponent:
"""
Attributes:
- execution_command
- script
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'execution_command', None, None, ), # 1
(2, TType.STRING, 'script', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.execution_command) + hash(self.script)
def __init__(self, execution_command=None, script=None,):
self.execution_command = execution_command
self.script = script
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.execution_command = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.script = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ShellComponent')
if self.execution_command is not None:
oprot.writeFieldBegin('execution_command', TType.STRING, 1)
oprot.writeString(self.execution_command.encode('utf-8'))
oprot.writeFieldEnd()
if self.script is not None:
oprot.writeFieldBegin('script', TType.STRING, 2)
oprot.writeString(self.script.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ComponentObject:
"""
Attributes:
- serialized_java
- shell
- java_object
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'serialized_java', None, None, ), # 1
(2, TType.STRUCT, 'shell', (ShellComponent, ShellComponent.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'java_object', (JavaObject, JavaObject.thrift_spec), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.serialized_java) + hash(self.shell) + hash(self.java_object)
def __init__(self, serialized_java=None, shell=None, java_object=None,):
self.serialized_java = serialized_java
self.shell = shell
self.java_object = java_object
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.serialized_java = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.shell = ShellComponent()
self.shell.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.java_object = JavaObject()
self.java_object.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ComponentObject')
if self.serialized_java is not None:
oprot.writeFieldBegin('serialized_java', TType.STRING, 1)
oprot.writeString(self.serialized_java)
oprot.writeFieldEnd()
if self.shell is not None:
oprot.writeFieldBegin('shell', TType.STRUCT, 2)
self.shell.write(oprot)
oprot.writeFieldEnd()
if self.java_object is not None:
oprot.writeFieldBegin('java_object', TType.STRUCT, 3)
self.java_object.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ComponentCommon:
"""
Attributes:
- inputs
- streams
- parallelism_hint
- json_conf
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'inputs', (TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.STRUCT,(Grouping, Grouping.thrift_spec)), None, ), # 1
(2, TType.MAP, 'streams', (TType.STRING,None,TType.STRUCT,(StreamInfo, StreamInfo.thrift_spec)), None, ), # 2
(3, TType.I32, 'parallelism_hint', None, None, ), # 3
(4, TType.STRING, 'json_conf', None, None, ), # 4
)
def __hash__(self):
return 0 + hash(self.inputs) + hash(self.streams) + hash(self.parallelism_hint) + hash(self.json_conf)
def __init__(self, inputs=None, streams=None, parallelism_hint=None, json_conf=None,):
self.inputs = inputs
self.streams = streams
self.parallelism_hint = parallelism_hint
self.json_conf = json_conf
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.inputs = {}
(_ktype22, _vtype23, _size21 ) = iprot.readMapBegin()
for _i25 in xrange(_size21):
_key26 = GlobalStreamId()
_key26.read(iprot)
_val27 = Grouping()
_val27.read(iprot)
self.inputs[_key26] = _val27
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.streams = {}
(_ktype29, _vtype30, _size28 ) = iprot.readMapBegin()
for _i32 in xrange(_size28):
_key33 = iprot.readString().decode('utf-8')
_val34 = StreamInfo()
_val34.read(iprot)
self.streams[_key33] = _val34
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.parallelism_hint = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.json_conf = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ComponentCommon')
if self.inputs is not None:
oprot.writeFieldBegin('inputs', TType.MAP, 1)
oprot.writeMapBegin(TType.STRUCT, TType.STRUCT, len(self.inputs))
for kiter35,viter36 in self.inputs.items():
kiter35.write(oprot)
viter36.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.streams is not None:
oprot.writeFieldBegin('streams', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.streams))
for kiter37,viter38 in self.streams.items():
oprot.writeString(kiter37.encode('utf-8'))
viter38.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.parallelism_hint is not None:
oprot.writeFieldBegin('parallelism_hint', TType.I32, 3)
oprot.writeI32(self.parallelism_hint)
oprot.writeFieldEnd()
if self.json_conf is not None:
oprot.writeFieldBegin('json_conf', TType.STRING, 4)
oprot.writeString(self.json_conf.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.inputs is None:
raise TProtocol.TProtocolException(message='Required field inputs is unset!')
if self.streams is None:
raise TProtocol.TProtocolException(message='Required field streams is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SpoutSpec:
"""
Attributes:
- spout_object
- common
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'spout_object', (ComponentObject, ComponentObject.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'common', (ComponentCommon, ComponentCommon.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.spout_object) + hash(self.common)
def __init__(self, spout_object=None, common=None,):
self.spout_object = spout_object
self.common = common
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.spout_object = ComponentObject()
self.spout_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.common = ComponentCommon()
self.common.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SpoutSpec')
if self.spout_object is not None:
oprot.writeFieldBegin('spout_object', TType.STRUCT, 1)
self.spout_object.write(oprot)
oprot.writeFieldEnd()
if self.common is not None:
oprot.writeFieldBegin('common', TType.STRUCT, 2)
self.common.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.spout_object is None:
raise TProtocol.TProtocolException(message='Required field spout_object is unset!')
if self.common is None:
raise TProtocol.TProtocolException(message='Required field common is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Bolt:
"""
Attributes:
- bolt_object
- common
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bolt_object', (ComponentObject, ComponentObject.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'common', (ComponentCommon, ComponentCommon.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.bolt_object) + hash(self.common)
def __init__(self, bolt_object=None, common=None,):
self.bolt_object = bolt_object
self.common = common
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bolt_object = ComponentObject()
self.bolt_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.common = ComponentCommon()
self.common.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Bolt')
if self.bolt_object is not None:
oprot.writeFieldBegin('bolt_object', TType.STRUCT, 1)
self.bolt_object.write(oprot)
oprot.writeFieldEnd()
if self.common is not None:
oprot.writeFieldBegin('common', TType.STRUCT, 2)
self.common.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.bolt_object is None:
raise TProtocol.TProtocolException(message='Required field bolt_object is unset!')
if self.common is None:
raise TProtocol.TProtocolException(message='Required field common is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StateSpoutSpec:
"""
Attributes:
- state_spout_object
- common
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'state_spout_object', (ComponentObject, ComponentObject.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'common', (ComponentCommon, ComponentCommon.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.state_spout_object) + hash(self.common)
def __init__(self, state_spout_object=None, common=None,):
self.state_spout_object = state_spout_object
self.common = common
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.state_spout_object = ComponentObject()
self.state_spout_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.common = ComponentCommon()
self.common.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('StateSpoutSpec')
if self.state_spout_object is not None:
oprot.writeFieldBegin('state_spout_object', TType.STRUCT, 1)
self.state_spout_object.write(oprot)
oprot.writeFieldEnd()
if self.common is not None:
oprot.writeFieldBegin('common', TType.STRUCT, 2)
self.common.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state_spout_object is None:
raise TProtocol.TProtocolException(message='Required field state_spout_object is unset!')
if self.common is None:
raise TProtocol.TProtocolException(message='Required field common is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StormTopology:
"""
Attributes:
- spouts
- bolts
- state_spouts
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'spouts', (TType.STRING,None,TType.STRUCT,(SpoutSpec, SpoutSpec.thrift_spec)), None, ), # 1
(2, TType.MAP, 'bolts', (TType.STRING,None,TType.STRUCT,(Bolt, Bolt.thrift_spec)), None, ), # 2
(3, TType.MAP, 'state_spouts', (TType.STRING,None,TType.STRUCT,(StateSpoutSpec, StateSpoutSpec.thrift_spec)), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.spouts) + hash(self.bolts) + hash(self.state_spouts)
def __init__(self, spouts=None, bolts=None, state_spouts=None,):
self.spouts = spouts
self.bolts = bolts
self.state_spouts = state_spouts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.spouts = {}
(_ktype40, _vtype41, _size39 ) = iprot.readMapBegin()
for _i43 in xrange(_size39):
_key44 = iprot.readString().decode('utf-8')
_val45 = SpoutSpec()
_val45.read(iprot)
self.spouts[_key44] = _val45
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.bolts = {}
(_ktype47, _vtype48, _size46 ) = iprot.readMapBegin()
for _i50 in xrange(_size46):
_key51 = iprot.readString().decode('utf-8')
_val52 = Bolt()
_val52.read(iprot)
self.bolts[_key51] = _val52
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.state_spouts = {}
(_ktype54, _vtype55, _size53 ) = iprot.readMapBegin()
for _i57 in xrange(_size53):
_key58 = iprot.readString().decode('utf-8')
_val59 = StateSpoutSpec()
_val59.read(iprot)
self.state_spouts[_key58] = _val59
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('StormTopology')
if self.spouts is not None:
oprot.writeFieldBegin('spouts', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.spouts))
for kiter60,viter61 in self.spouts.items():
oprot.writeString(kiter60.encode('utf-8'))
viter61.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.bolts is not None:
oprot.writeFieldBegin('bolts', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.bolts))
for kiter62,viter63 in self.bolts.items():
oprot.writeString(kiter62.encode('utf-8'))
viter63.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.state_spouts is not None:
oprot.writeFieldBegin('state_spouts', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.state_spouts))
for kiter64,viter65 in self.state_spouts.items():
oprot.writeString(kiter64.encode('utf-8'))
viter65.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.spouts is None:
raise TProtocol.TProtocolException(message='Required field spouts is unset!')
if self.bolts is None:
raise TProtocol.TProtocolException(message='Required field bolts is unset!')
if self.state_spouts is None:
raise TProtocol.TProtocolException(message='Required field state_spouts is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AlreadyAliveException(Exception):
"""
Attributes:
- msg
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.msg)
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AlreadyAliveException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocol.TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NotAliveException(Exception):
"""
Attributes:
- msg
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.msg)
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NotAliveException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocol.TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InvalidTopologyException(Exception):
"""
Attributes:
- msg
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.msg)
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('InvalidTopologyException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocol.TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologySummary:
"""
Attributes:
- id
- name
- num_tasks
- num_executors
- num_workers
- uptime_secs
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.I32, 'num_tasks', None, None, ), # 3
(4, TType.I32, 'num_executors', None, None, ), # 4
(5, TType.I32, 'num_workers', None, None, ), # 5
(6, TType.I32, 'uptime_secs', None, None, ), # 6
(7, TType.STRING, 'status', None, None, ), # 7
)
def __hash__(self):
return 0 + hash(self.id) + hash(self.name) + hash(self.num_tasks) + hash(self.num_executors) + hash(self.num_workers) + hash(self.uptime_secs) + hash(self.status)
def __init__(self, id=None, name=None, num_tasks=None, num_executors=None, num_workers=None, uptime_secs=None, status=None,):
self.id = id
self.name = name
self.num_tasks = num_tasks
self.num_executors = num_executors
self.num_workers = num_workers
self.uptime_secs = uptime_secs
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.num_tasks = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.num_executors = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.num_workers = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.status = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TopologySummary')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8'))
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.num_tasks is not None:
oprot.writeFieldBegin('num_tasks', TType.I32, 3)
oprot.writeI32(self.num_tasks)
oprot.writeFieldEnd()
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.I32, 4)
oprot.writeI32(self.num_executors)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 5)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 6)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRING, 7)
oprot.writeString(self.status.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocol.TProtocolException(message='Required field id is unset!')
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.num_tasks is None:
raise TProtocol.TProtocolException(message='Required field num_tasks is unset!')
if self.num_executors is None:
raise TProtocol.TProtocolException(message='Required field num_executors is unset!')
if self.num_workers is None:
raise TProtocol.TProtocolException(message='Required field num_workers is unset!')
if self.uptime_secs is None:
raise TProtocol.TProtocolException(message='Required field uptime_secs is unset!')
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SupervisorSummary:
"""
Attributes:
- host
- uptime_secs
- num_workers
- num_used_workers
- supervisor_id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'host', None, None, ), # 1
(2, TType.I32, 'uptime_secs', None, None, ), # 2
(3, TType.I32, 'num_workers', None, None, ), # 3
(4, TType.I32, 'num_used_workers', None, None, ), # 4
(5, TType.STRING, 'supervisor_id', None, None, ), # 5
)
def __hash__(self):
return 0 + hash(self.host) + hash(self.uptime_secs) + hash(self.num_workers) + hash(self.num_used_workers) + hash(self.supervisor_id)
def __init__(self, host=None, uptime_secs=None, num_workers=None, num_used_workers=None, supervisor_id=None,):
self.host = host
self.uptime_secs = uptime_secs
self.num_workers = num_workers
self.num_used_workers = num_used_workers
self.supervisor_id = supervisor_id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.num_workers = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.num_used_workers = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.supervisor_id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SupervisorSummary')
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 1)
oprot.writeString(self.host.encode('utf-8'))
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 2)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 3)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.num_used_workers is not None:
oprot.writeFieldBegin('num_used_workers', TType.I32, 4)
oprot.writeI32(self.num_used_workers)
oprot.writeFieldEnd()
if self.supervisor_id is not None:
oprot.writeFieldBegin('supervisor_id', TType.STRING, 5)
oprot.writeString(self.supervisor_id.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.host is None:
raise TProtocol.TProtocolException(message='Required field host is unset!')
if self.uptime_secs is None:
raise TProtocol.TProtocolException(message='Required field uptime_secs is unset!')
if self.num_workers is None:
raise TProtocol.TProtocolException(message='Required field num_workers is unset!')
if self.num_used_workers is None:
raise TProtocol.TProtocolException(message='Required field num_used_workers is unset!')
if self.supervisor_id is None:
raise TProtocol.TProtocolException(message='Required field supervisor_id is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ClusterSummary:
"""
Attributes:
- supervisors
- nimbus_uptime_secs
- topologies
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'supervisors', (TType.STRUCT,(SupervisorSummary, SupervisorSummary.thrift_spec)), None, ), # 1
(2, TType.I32, 'nimbus_uptime_secs', None, None, ), # 2
(3, TType.LIST, 'topologies', (TType.STRUCT,(TopologySummary, TopologySummary.thrift_spec)), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.supervisors) + hash(self.nimbus_uptime_secs) + hash(self.topologies)
def __init__(self, supervisors=None, nimbus_uptime_secs=None, topologies=None,):
self.supervisors = supervisors
self.nimbus_uptime_secs = nimbus_uptime_secs
self.topologies = topologies
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.supervisors = []
(_etype69, _size66) = iprot.readListBegin()
for _i70 in xrange(_size66):
_elem71 = SupervisorSummary()
_elem71.read(iprot)
self.supervisors.append(_elem71)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.nimbus_uptime_secs = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.topologies = []
(_etype75, _size72) = iprot.readListBegin()
for _i76 in xrange(_size72):
_elem77 = TopologySummary()
_elem77.read(iprot)
self.topologies.append(_elem77)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ClusterSummary')
if self.supervisors is not None:
oprot.writeFieldBegin('supervisors', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.supervisors))
for iter78 in self.supervisors:
iter78.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.nimbus_uptime_secs is not None:
oprot.writeFieldBegin('nimbus_uptime_secs', TType.I32, 2)
oprot.writeI32(self.nimbus_uptime_secs)
oprot.writeFieldEnd()
if self.topologies is not None:
oprot.writeFieldBegin('topologies', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.topologies))
for iter79 in self.topologies:
iter79.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.supervisors is None:
raise TProtocol.TProtocolException(message='Required field supervisors is unset!')
if self.nimbus_uptime_secs is None:
raise TProtocol.TProtocolException(message='Required field nimbus_uptime_secs is unset!')
if self.topologies is None:
raise TProtocol.TProtocolException(message='Required field topologies is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ErrorInfo:
"""
Attributes:
- error
- error_time_secs
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'error', None, None, ), # 1
(2, TType.I32, 'error_time_secs', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.error) + hash(self.error_time_secs)
def __init__(self, error=None, error_time_secs=None,):
self.error = error
self.error_time_secs = error_time_secs
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.error = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.error_time_secs = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ErrorInfo')
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRING, 1)
oprot.writeString(self.error.encode('utf-8'))
oprot.writeFieldEnd()
if self.error_time_secs is not None:
oprot.writeFieldBegin('error_time_secs', TType.I32, 2)
oprot.writeI32(self.error_time_secs)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.error is None:
raise TProtocol.TProtocolException(message='Required field error is unset!')
if self.error_time_secs is None:
raise TProtocol.TProtocolException(message='Required field error_time_secs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BoltStats:
"""
Attributes:
- acked
- failed
- process_ms_avg
- executed
- execute_ms_avg
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'acked', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 1
(2, TType.MAP, 'failed', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 2
(3, TType.MAP, 'process_ms_avg', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.DOUBLE,None)), None, ), # 3
(4, TType.MAP, 'executed', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 4
(5, TType.MAP, 'execute_ms_avg', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.DOUBLE,None)), None, ), # 5
)
def __hash__(self):
return 0 + hash(self.acked) + hash(self.failed) + hash(self.process_ms_avg) + hash(self.executed) + hash(self.execute_ms_avg)
def __init__(self, acked=None, failed=None, process_ms_avg=None, executed=None, execute_ms_avg=None,):
self.acked = acked
self.failed = failed
self.process_ms_avg = process_ms_avg
self.executed = executed
self.execute_ms_avg = execute_ms_avg
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.acked = {}
(_ktype81, _vtype82, _size80 ) = iprot.readMapBegin()
for _i84 in xrange(_size80):
_key85 = iprot.readString().decode('utf-8')
_val86 = {}
(_ktype88, _vtype89, _size87 ) = iprot.readMapBegin()
for _i91 in xrange(_size87):
_key92 = GlobalStreamId()
_key92.read(iprot)
_val93 = iprot.readI64();
_val86[_key92] = _val93
iprot.readMapEnd()
self.acked[_key85] = _val86
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.failed = {}
(_ktype95, _vtype96, _size94 ) = iprot.readMapBegin()
for _i98 in xrange(_size94):
_key99 = iprot.readString().decode('utf-8')
_val100 = {}
(_ktype102, _vtype103, _size101 ) = iprot.readMapBegin()
for _i105 in xrange(_size101):
_key106 = GlobalStreamId()
_key106.read(iprot)
_val107 = iprot.readI64();
_val100[_key106] = _val107
iprot.readMapEnd()
self.failed[_key99] = _val100
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.process_ms_avg = {}
(_ktype109, _vtype110, _size108 ) = iprot.readMapBegin()
for _i112 in xrange(_size108):
_key113 = iprot.readString().decode('utf-8')
_val114 = {}
(_ktype116, _vtype117, _size115 ) = iprot.readMapBegin()
for _i119 in xrange(_size115):
_key120 = GlobalStreamId()
_key120.read(iprot)
_val121 = iprot.readDouble();
_val114[_key120] = _val121
iprot.readMapEnd()
self.process_ms_avg[_key113] = _val114
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.executed = {}
(_ktype123, _vtype124, _size122 ) = iprot.readMapBegin()
for _i126 in xrange(_size122):
_key127 = iprot.readString().decode('utf-8')
_val128 = {}
(_ktype130, _vtype131, _size129 ) = iprot.readMapBegin()
for _i133 in xrange(_size129):
_key134 = GlobalStreamId()
_key134.read(iprot)
_val135 = iprot.readI64();
_val128[_key134] = _val135
iprot.readMapEnd()
self.executed[_key127] = _val128
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.MAP:
self.execute_ms_avg = {}
(_ktype137, _vtype138, _size136 ) = iprot.readMapBegin()
for _i140 in xrange(_size136):
_key141 = iprot.readString().decode('utf-8')
_val142 = {}
(_ktype144, _vtype145, _size143 ) = iprot.readMapBegin()
for _i147 in xrange(_size143):
_key148 = GlobalStreamId()
_key148.read(iprot)
_val149 = iprot.readDouble();
_val142[_key148] = _val149
iprot.readMapEnd()
self.execute_ms_avg[_key141] = _val142
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('BoltStats')
if self.acked is not None:
oprot.writeFieldBegin('acked', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.acked))
for kiter150,viter151 in self.acked.items():
oprot.writeString(kiter150.encode('utf-8'))
oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter151))
for kiter152,viter153 in viter151.items():
kiter152.write(oprot)
oprot.writeI64(viter153)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.failed is not None:
oprot.writeFieldBegin('failed', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.failed))
for kiter154,viter155 in self.failed.items():
oprot.writeString(kiter154.encode('utf-8'))
oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter155))
for kiter156,viter157 in viter155.items():
kiter156.write(oprot)
oprot.writeI64(viter157)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.process_ms_avg is not None:
oprot.writeFieldBegin('process_ms_avg', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.process_ms_avg))
for kiter158,viter159 in self.process_ms_avg.items():
oprot.writeString(kiter158.encode('utf-8'))
oprot.writeMapBegin(TType.STRUCT, TType.DOUBLE, len(viter159))
for kiter160,viter161 in viter159.items():
kiter160.write(oprot)
oprot.writeDouble(viter161)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.executed is not None:
oprot.writeFieldBegin('executed', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.executed))
for kiter162,viter163 in self.executed.items():
oprot.writeString(kiter162.encode('utf-8'))
oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter163))
for kiter164,viter165 in viter163.items():
kiter164.write(oprot)
oprot.writeI64(viter165)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.execute_ms_avg is not None:
oprot.writeFieldBegin('execute_ms_avg', TType.MAP, 5)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.execute_ms_avg))
for kiter166,viter167 in self.execute_ms_avg.items():
oprot.writeString(kiter166.encode('utf-8'))
oprot.writeMapBegin(TType.STRUCT, TType.DOUBLE, len(viter167))
for kiter168,viter169 in viter167.items():
kiter168.write(oprot)
oprot.writeDouble(viter169)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.acked is None:
raise TProtocol.TProtocolException(message='Required field acked is unset!')
if self.failed is None:
raise TProtocol.TProtocolException(message='Required field failed is unset!')
if self.process_ms_avg is None:
raise TProtocol.TProtocolException(message='Required field process_ms_avg is unset!')
if self.executed is None:
raise TProtocol.TProtocolException(message='Required field executed is unset!')
if self.execute_ms_avg is None:
raise TProtocol.TProtocolException(message='Required field execute_ms_avg is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SpoutStats:
"""
Attributes:
- acked
- failed
- complete_ms_avg
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'acked', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.I64,None)), None, ), # 1
(2, TType.MAP, 'failed', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.I64,None)), None, ), # 2
(3, TType.MAP, 'complete_ms_avg', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.DOUBLE,None)), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.acked) + hash(self.failed) + hash(self.complete_ms_avg)
def __init__(self, acked=None, failed=None, complete_ms_avg=None,):
self.acked = acked
self.failed = failed
self.complete_ms_avg = complete_ms_avg
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.acked = {}
(_ktype171, _vtype172, _size170 ) = iprot.readMapBegin()
for _i174 in xrange(_size170):
_key175 = iprot.readString().decode('utf-8')
_val176 = {}
(_ktype178, _vtype179, _size177 ) = iprot.readMapBegin()
for _i181 in xrange(_size177):
_key182 = iprot.readString().decode('utf-8')
_val183 = iprot.readI64();
_val176[_key182] = _val183
iprot.readMapEnd()
self.acked[_key175] = _val176
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.failed = {}
(_ktype185, _vtype186, _size184 ) = iprot.readMapBegin()
for _i188 in xrange(_size184):
_key189 = iprot.readString().decode('utf-8')
_val190 = {}
(_ktype192, _vtype193, _size191 ) = iprot.readMapBegin()
for _i195 in xrange(_size191):
_key196 = iprot.readString().decode('utf-8')
_val197 = iprot.readI64();
_val190[_key196] = _val197
iprot.readMapEnd()
self.failed[_key189] = _val190
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.complete_ms_avg = {}
(_ktype199, _vtype200, _size198 ) = iprot.readMapBegin()
for _i202 in xrange(_size198):
_key203 = iprot.readString().decode('utf-8')
_val204 = {}
(_ktype206, _vtype207, _size205 ) = iprot.readMapBegin()
for _i209 in xrange(_size205):
_key210 = iprot.readString().decode('utf-8')
_val211 = iprot.readDouble();
_val204[_key210] = _val211
iprot.readMapEnd()
self.complete_ms_avg[_key203] = _val204
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SpoutStats')
if self.acked is not None:
oprot.writeFieldBegin('acked', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.acked))
for kiter212,viter213 in self.acked.items():
oprot.writeString(kiter212.encode('utf-8'))
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter213))
for kiter214,viter215 in viter213.items():
oprot.writeString(kiter214.encode('utf-8'))
oprot.writeI64(viter215)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.failed is not None:
oprot.writeFieldBegin('failed', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.failed))
for kiter216,viter217 in self.failed.items():
oprot.writeString(kiter216.encode('utf-8'))
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter217))
for kiter218,viter219 in viter217.items():
oprot.writeString(kiter218.encode('utf-8'))
oprot.writeI64(viter219)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.complete_ms_avg is not None:
oprot.writeFieldBegin('complete_ms_avg', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.complete_ms_avg))
for kiter220,viter221 in self.complete_ms_avg.items():
oprot.writeString(kiter220.encode('utf-8'))
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(viter221))
for kiter222,viter223 in viter221.items():
oprot.writeString(kiter222.encode('utf-8'))
oprot.writeDouble(viter223)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.acked is None:
raise TProtocol.TProtocolException(message='Required field acked is unset!')
if self.failed is None:
raise TProtocol.TProtocolException(message='Required field failed is unset!')
if self.complete_ms_avg is None:
raise TProtocol.TProtocolException(message='Required field complete_ms_avg is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorSpecificStats:
"""
Attributes:
- bolt
- spout
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bolt', (BoltStats, BoltStats.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'spout', (SpoutStats, SpoutStats.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.bolt) + hash(self.spout)
def __init__(self, bolt=None, spout=None,):
self.bolt = bolt
self.spout = spout
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bolt = BoltStats()
self.bolt.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.spout = SpoutStats()
self.spout.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecutorSpecificStats')
if self.bolt is not None:
oprot.writeFieldBegin('bolt', TType.STRUCT, 1)
self.bolt.write(oprot)
oprot.writeFieldEnd()
if self.spout is not None:
oprot.writeFieldBegin('spout', TType.STRUCT, 2)
self.spout.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorStats:
"""
Attributes:
- emitted
- transferred
- specific
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'emitted', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.I64,None)), None, ), # 1
(2, TType.MAP, 'transferred', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.I64,None)), None, ), # 2
(3, TType.STRUCT, 'specific', (ExecutorSpecificStats, ExecutorSpecificStats.thrift_spec), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.emitted) + hash(self.transferred) + hash(self.specific)
def __init__(self, emitted=None, transferred=None, specific=None,):
self.emitted = emitted
self.transferred = transferred
self.specific = specific
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.emitted = {}
(_ktype225, _vtype226, _size224 ) = iprot.readMapBegin()
for _i228 in xrange(_size224):
_key229 = iprot.readString().decode('utf-8')
_val230 = {}
(_ktype232, _vtype233, _size231 ) = iprot.readMapBegin()
for _i235 in xrange(_size231):
_key236 = iprot.readString().decode('utf-8')
_val237 = iprot.readI64();
_val230[_key236] = _val237
iprot.readMapEnd()
self.emitted[_key229] = _val230
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.transferred = {}
(_ktype239, _vtype240, _size238 ) = iprot.readMapBegin()
for _i242 in xrange(_size238):
_key243 = iprot.readString().decode('utf-8')
_val244 = {}
(_ktype246, _vtype247, _size245 ) = iprot.readMapBegin()
for _i249 in xrange(_size245):
_key250 = iprot.readString().decode('utf-8')
_val251 = iprot.readI64();
_val244[_key250] = _val251
iprot.readMapEnd()
self.transferred[_key243] = _val244
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.specific = ExecutorSpecificStats()
self.specific.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecutorStats')
if self.emitted is not None:
oprot.writeFieldBegin('emitted', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.emitted))
for kiter252,viter253 in self.emitted.items():
oprot.writeString(kiter252.encode('utf-8'))
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter253))
for kiter254,viter255 in viter253.items():
oprot.writeString(kiter254.encode('utf-8'))
oprot.writeI64(viter255)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.transferred is not None:
oprot.writeFieldBegin('transferred', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.transferred))
for kiter256,viter257 in self.transferred.items():
oprot.writeString(kiter256.encode('utf-8'))
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter257))
for kiter258,viter259 in viter257.items():
oprot.writeString(kiter258.encode('utf-8'))
oprot.writeI64(viter259)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.specific is not None:
oprot.writeFieldBegin('specific', TType.STRUCT, 3)
self.specific.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.emitted is None:
raise TProtocol.TProtocolException(message='Required field emitted is unset!')
if self.transferred is None:
raise TProtocol.TProtocolException(message='Required field transferred is unset!')
if self.specific is None:
raise TProtocol.TProtocolException(message='Required field specific is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorInfo:
"""
Attributes:
- task_start
- task_end
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'task_start', None, None, ), # 1
(2, TType.I32, 'task_end', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.task_start) + hash(self.task_end)
def __init__(self, task_start=None, task_end=None,):
self.task_start = task_start
self.task_end = task_end
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.task_start = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.task_end = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecutorInfo')
if self.task_start is not None:
oprot.writeFieldBegin('task_start', TType.I32, 1)
oprot.writeI32(self.task_start)
oprot.writeFieldEnd()
if self.task_end is not None:
oprot.writeFieldBegin('task_end', TType.I32, 2)
oprot.writeI32(self.task_end)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.task_start is None:
raise TProtocol.TProtocolException(message='Required field task_start is unset!')
if self.task_end is None:
raise TProtocol.TProtocolException(message='Required field task_end is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorSummary:
"""
Attributes:
- executor_info
- component_id
- host
- port
- uptime_secs
- stats
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'executor_info', (ExecutorInfo, ExecutorInfo.thrift_spec), None, ), # 1
(2, TType.STRING, 'component_id', None, None, ), # 2
(3, TType.STRING, 'host', None, None, ), # 3
(4, TType.I32, 'port', None, None, ), # 4
(5, TType.I32, 'uptime_secs', None, None, ), # 5
None, # 6
(7, TType.STRUCT, 'stats', (ExecutorStats, ExecutorStats.thrift_spec), None, ), # 7
)
def __hash__(self):
return 0 + hash(self.executor_info) + hash(self.component_id) + hash(self.host) + hash(self.port) + hash(self.uptime_secs) + hash(self.stats)
def __init__(self, executor_info=None, component_id=None, host=None, port=None, uptime_secs=None, stats=None,):
self.executor_info = executor_info
self.component_id = component_id
self.host = host
self.port = port
self.uptime_secs = uptime_secs
self.stats = stats
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.executor_info = ExecutorInfo()
self.executor_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.component_id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.port = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.stats = ExecutorStats()
self.stats.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecutorSummary')
if self.executor_info is not None:
oprot.writeFieldBegin('executor_info', TType.STRUCT, 1)
self.executor_info.write(oprot)
oprot.writeFieldEnd()
if self.component_id is not None:
oprot.writeFieldBegin('component_id', TType.STRING, 2)
oprot.writeString(self.component_id.encode('utf-8'))
oprot.writeFieldEnd()
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 3)
oprot.writeString(self.host.encode('utf-8'))
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 4)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 5)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.stats is not None:
oprot.writeFieldBegin('stats', TType.STRUCT, 7)
self.stats.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.executor_info is None:
raise TProtocol.TProtocolException(message='Required field executor_info is unset!')
if self.component_id is None:
raise TProtocol.TProtocolException(message='Required field component_id is unset!')
if self.host is None:
raise TProtocol.TProtocolException(message='Required field host is unset!')
if self.port is None:
raise TProtocol.TProtocolException(message='Required field port is unset!')
if self.uptime_secs is None:
raise TProtocol.TProtocolException(message='Required field uptime_secs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologyInfo:
"""
Attributes:
- id
- name
- uptime_secs
- executors
- status
- errors
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.I32, 'uptime_secs', None, None, ), # 3
(4, TType.LIST, 'executors', (TType.STRUCT,(ExecutorSummary, ExecutorSummary.thrift_spec)), None, ), # 4
(5, TType.STRING, 'status', None, None, ), # 5
(6, TType.MAP, 'errors', (TType.STRING,None,TType.LIST,(TType.STRUCT,(ErrorInfo, ErrorInfo.thrift_spec))), None, ), # 6
)
def __hash__(self):
return 0 + hash(self.id) + hash(self.name) + hash(self.uptime_secs) + hash(self.executors) + hash(self.status) + hash(self.errors)
def __init__(self, id=None, name=None, uptime_secs=None, executors=None, status=None, errors=None,):
self.id = id
self.name = name
self.uptime_secs = uptime_secs
self.executors = executors
self.status = status
self.errors = errors
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.executors = []
(_etype263, _size260) = iprot.readListBegin()
for _i264 in xrange(_size260):
_elem265 = ExecutorSummary()
_elem265.read(iprot)
self.executors.append(_elem265)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.status = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.MAP:
self.errors = {}
(_ktype267, _vtype268, _size266 ) = iprot.readMapBegin()
for _i270 in xrange(_size266):
_key271 = iprot.readString().decode('utf-8')
_val272 = []
(_etype276, _size273) = iprot.readListBegin()
for _i277 in xrange(_size273):
_elem278 = ErrorInfo()
_elem278.read(iprot)
_val272.append(_elem278)
iprot.readListEnd()
self.errors[_key271] = _val272
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TopologyInfo')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8'))
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 3)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.executors is not None:
oprot.writeFieldBegin('executors', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.executors))
for iter279 in self.executors:
iter279.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRING, 5)
oprot.writeString(self.status.encode('utf-8'))
oprot.writeFieldEnd()
if self.errors is not None:
oprot.writeFieldBegin('errors', TType.MAP, 6)
oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.errors))
for kiter280,viter281 in self.errors.items():
oprot.writeString(kiter280.encode('utf-8'))
oprot.writeListBegin(TType.STRUCT, len(viter281))
for iter282 in viter281:
iter282.write(oprot)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocol.TProtocolException(message='Required field id is unset!')
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.uptime_secs is None:
raise TProtocol.TProtocolException(message='Required field uptime_secs is unset!')
if self.executors is None:
raise TProtocol.TProtocolException(message='Required field executors is unset!')
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
if self.errors is None:
raise TProtocol.TProtocolException(message='Required field errors is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KillOptions:
"""
Attributes:
- wait_secs
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'wait_secs', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.wait_secs)
def __init__(self, wait_secs=None,):
self.wait_secs = wait_secs
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.wait_secs = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KillOptions')
if self.wait_secs is not None:
oprot.writeFieldBegin('wait_secs', TType.I32, 1)
oprot.writeI32(self.wait_secs)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RebalanceOptions:
"""
Attributes:
- wait_secs
- num_workers
- num_executors
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'wait_secs', None, None, ), # 1
(2, TType.I32, 'num_workers', None, None, ), # 2
(3, TType.MAP, 'num_executors', (TType.STRING,None,TType.I32,None), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.wait_secs) + hash(self.num_workers) + hash(self.num_executors)
def __init__(self, wait_secs=None, num_workers=None, num_executors=None,):
self.wait_secs = wait_secs
self.num_workers = num_workers
self.num_executors = num_executors
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.wait_secs = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.num_workers = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.num_executors = {}
(_ktype284, _vtype285, _size283 ) = iprot.readMapBegin()
for _i287 in xrange(_size283):
_key288 = iprot.readString().decode('utf-8')
_val289 = iprot.readI32();
self.num_executors[_key288] = _val289
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('RebalanceOptions')
if self.wait_secs is not None:
oprot.writeFieldBegin('wait_secs', TType.I32, 1)
oprot.writeI32(self.wait_secs)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 2)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.num_executors))
for kiter290,viter291 in self.num_executors.items():
oprot.writeString(kiter290.encode('utf-8'))
oprot.writeI32(viter291)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SubmitOptions:
"""
Attributes:
- initial_status
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'initial_status', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.initial_status)
def __init__(self, initial_status=None,):
self.initial_status = initial_status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.initial_status = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SubmitOptions')
if self.initial_status is not None:
oprot.writeFieldBegin('initial_status', TType.I32, 1)
oprot.writeI32(self.initial_status)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.initial_status is None:
raise TProtocol.TProtocolException(message='Required field initial_status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DRPCRequest:
"""
Attributes:
- func_args
- request_id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'func_args', None, None, ), # 1
(2, TType.STRING, 'request_id', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.func_args) + hash(self.request_id)
def __init__(self, func_args=None, request_id=None,):
self.func_args = func_args
self.request_id = request_id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.func_args = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.request_id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DRPCRequest')
if self.func_args is not None:
oprot.writeFieldBegin('func_args', TType.STRING, 1)
oprot.writeString(self.func_args.encode('utf-8'))
oprot.writeFieldEnd()
if self.request_id is not None:
oprot.writeFieldBegin('request_id', TType.STRING, 2)
oprot.writeString(self.request_id.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.func_args is None:
raise TProtocol.TProtocolException(message='Required field func_args is unset!')
if self.request_id is None:
raise TProtocol.TProtocolException(message='Required field request_id is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DRPCExecutionException(Exception):
"""
Attributes:
- msg
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.msg)
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DRPCExecutionException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocol.TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
thorkd1t/lurkbot
|
refs/heads/master
|
lurkbotclass.py
|
1
|
# -*- coding: utf_8 -*-
import socket
import ssl
import time
import re
import random
from threading import *
import sqlite3
import string
import joinlist
import os.path
from subprocess import Popen, CREATE_NEW_CONSOLE
import shutil
from itertools import count
class LurkBot(object):
_ids = count(0)
CREATE_NEW_CONSOLE = (0x00000010)
def __init__(self, name):
print "hi"
self.name = name
def tablecheck(self, name):
tempy = name + ".db"
connx = sqlite3.connect(tempy)
cu = connx.cursor()
try:
blah = "select * from chat"
cu.execute(blah)
connx.close()
print "table exists already, skipping"
return True
except Exception as (e):
print e
date = time.strftime('%d/%m/%Y')
firsts = "create table if not exists chat"
print name
firststart = firsts
firststart += """ (
usr text,
mesg text,
id integer primary key,
flags text,
channel text,
date_time text
);"""
print "firststart ran"
time.sleep(2)
cu.execute(firststart)
date = time.strftime("%Y-%m-%dT%H:%M:%S")
print date
strings = "insert into chat"
cu.execute(strings + " values (?,?,?,?,?,?)",
("username", "message", 1, "flags", "channel", date))
connx.commit()
connx.close()
def ircJoin(self, channohash = joinlist.joinlist[0]):
os.system("title " + channohash)
print channohash
self.name = channohash
if not self.tablecheck(channohash):
print "creating table for channel : " + channohash
re.purge() # housekeeping?
server = "irc.chat.twitch.tv"
port = 443
tempy = channohash + ".db"
conn = sqlite3.connect(tempy)
c = conn.cursor()
# init socket
startplz = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
# connect to said socket
startplz.connect((server, port))
# wrap in ssl
irc = ssl.wrap_socket(startplz)
##################################
# login #
# irc.send("PASS " + botcfg.oa + '\r\n')
irc.send("NICK " + "justinfan420" + '\r\n')
# capabilities request
irc.send("CAP REQ :twitch.tv/membership" + "\r\n")
# and join channel
if not os.path.exists(channohash):
os.mkdir(channohash)
irc.send("JOIN " + "#" + channohash + '\r\n')
print "joining " + channohash
else:
print "folder exists, is another bot active in this channel?"
time.sleep(20)
quit()
##############################################################
irc.send("CAP REQ :twitch.tv/tags" + "\r\n")
########"@"@"@"@"@"@"@"@"@"@""@"@
CHAT_MSG=re.compile(r"@.+?PRIVMSG.+?(:){1}") # New (for irc flags mode)
messagetemp = ""
messagcount = 0
# print str(self.id) + " class instances running.."
while True:
#gets output from IRC server
#data = irc.recv(1204)
try:
data = irc.recv(1204)
except Exception as e:
print e
# ping/pong
if data == "PING :tmi.twitch.tv\r\n":
irc.send("PONG :tmi.twitch.tv\r\n")
user = data.split('!', 1)[-1]
user = user.split('@')[0]
message = CHAT_MSG.sub("", data)
flags = data.split(':', 1)[0]
if (messagetemp) == (message) and (message) == "" or (message) == None:
messagcount += 1
if messagcount > 10:
print "dropped connection, to " + channohash + " reconnecting..."
thepath = os.path.abspath("gogogo.py")
cmmd = 'python ' + 'gogogo.py ' + channohash
if os.path.exists(channohash):
shutil.rmtree(channohash)
messagcount = 0
Popen(cmmd, creationflags=CREATE_NEW_CONSOLE)
quit()
messagetemp = message
print (user + ": " + message) # new (for flags mode)
try:
unicode(message[0:5], "utf-8")
if "tmi.twitch.tv" not in (user) and "tmi.twitch.tv" not in (message) and (user) != "":
if "jtv MODE" not in (user) and "justinfan" not in (user) and (user) != "twitchnotify":
date = time.strftime("%Y-%m-%dT%H:%M:%S")
blah = "select count (*) from chat"
c.execute(blah)
temp = c.fetchone()
#print temp[0]
temp = temp[0] + 1
blah = "insert into chat"
c.execute(blah + " values (?,?,?,?,?,?)",
(user, message, temp, flags, channohash, date))
conn.commit()
except Exception as (e):
if (user) != "":
date = time.strftime("%Y-%m-%dT%H:%M:%S")
blah = "select count (*) from chat"
c.execute(blah)
temp = c.fetchone()
#print temp[0]
temp = temp[0] + 1
conn.text_factory = 'utf-8'
blah = "insert into chat"
c.execute(blah + " values (?,?,?,?,?,?)",
(user, message, temp, flags, channohash, date))
conn.commit()
conn.text_factory = 'string'
time.sleep(0.1)
############################
if __name__ == "__main__":
print "__main__ is running"
if os.path.exists(joinlist.joinlist[0]):
shutil.rmtree(joinlist.joinlist[0])
x = LurkBot(joinlist.joinlist[0])
x.ircJoin(joinlist.joinlist[0])
|
ankanaan/chimera
|
refs/heads/master
|
src/chimera/instruments/faketelescope.py
|
1
|
#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# chimera - observatory automation system
# Copyright (C) 2006-2007 P. Henrique Silva <henrique@astro.ufsc.br>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
import time
import threading
from chimera.interfaces.telescope import SlewRate, TelescopeStatus
from chimera.instruments.telescope import TelescopeBase
from chimera.core.lock import lock
from chimera.core.site import Site
from chimera.util.coord import Coord
from chimera.util.position import Position, Epoch
class FakeTelescope (TelescopeBase):
def __init__(self):
TelescopeBase.__init__(self)
self.__slewing = False
self._az = Coord.fromDMS(0)
self._alt = Coord.fromDMS(70)
self._slewing = False
self._tracking = True
self._parked = False
self._abort = threading.Event()
try:
self._site = self.getManager().getProxy("/Site/0")
self._gotSite = True
except:
self._site = Site()
self._gotSite = False
self._setRaDecFromAltAz()
def _getSite(self):
if self._gotSite:
self._site._transferThread()
return self._site
else:
try:
self._site = self.getManager().getProxy("/Site/0")
self._gotSite = True
except:
pass
return self._site
def _setRaDecFromAltAz(self):
raDec = self._getSite().altAzToRaDec(
Position.fromAltAz(self._alt, self._az))
self._ra = raDec.ra
self._dec = raDec.dec
def _setAltAzFromRaDec(self):
altAz = self._getSite().raDecToAltAz(
Position.fromRaDec(self._ra, self._dec))
self._alt = altAz.alt
self._az = altAz.az
def __start__(self):
self.setHz(1)
@lock
def control(self):
self._getSite()
if not self._slewing:
if self._tracking:
self._setAltAzFromRaDec()
else:
self._setRaDecFromAltAz()
return True
def slewToRaDec(self, position):
if not isinstance(position, Position):
position = Position.fromRaDec(
position[0], position[1], epoch=Epoch.J2000)
self._validateRaDec(position)
self.slewBegin(position)
ra_steps = position.ra - self.getRa()
ra_steps = float(ra_steps / 10.0)
dec_steps = position.dec - self.getDec()
dec_steps = float(dec_steps / 10.0)
self._slewing = True
self._abort.clear()
status = TelescopeStatus.OK
t = 0
while t < 5:
if self._abort.isSet():
self._slewing = False
status = TelescopeStatus.ABORTED
break
self._ra += ra_steps
self._dec += dec_steps
self._setAltAzFromRaDec()
time.sleep(0.5)
t += 0.5
self._slewing = False
self.slewComplete(self.getPositionRaDec(), status)
@lock
def slewToAltAz(self, position):
if not isinstance(position, Position):
position = Position.fromAltAz(*position)
self._validateAltAz(position)
self.slewBegin(self._getSite().altAzToRaDec(position))
alt_steps = position.alt - self.getAlt()
alt_steps = float(alt_steps / 10.0)
az_steps = position.az - self.getAz()
az_steps = float(az_steps / 10.0)
self._slewing = True
self._abort.clear()
status = TelescopeStatus.OK
t = 0
while t < 5:
if self._abort.isSet():
self._slewing = False
status = TelescopeStatus.ABORTED
break
self._alt += alt_steps
self._az += az_steps
self._setRaDecFromAltAz()
time.sleep(0.5)
t += 0.5
self._slewing = False
self.slewComplete(self.getPositionRaDec(), status)
def abortSlew(self):
self._abort.set()
while self.isSlewing():
time.sleep(0.1)
self._slewing = False
def isSlewing(self):
return self._slewing
@lock
def moveEast(self, offset, rate=SlewRate.MAX):
self._slewing = True
pos = self.getPositionRaDec()
pos = Position.fromRaDec(pos.ra + Coord.fromAS(offset), pos.dec)
self.slewBegin(pos)
self._ra += Coord.fromAS(offset)
self._setAltAzFromRaDec()
self._slewing = False
self.slewComplete(self.getPositionRaDec(), TelescopeStatus.OK)
@lock
def moveWest(self, offset, rate=SlewRate.MAX):
self._slewing = True
pos = self.getPositionRaDec()
pos = Position.fromRaDec(pos.ra + Coord.fromAS(-offset), pos.dec)
self.slewBegin(pos)
self._ra += Coord.fromAS(-offset)
self._setAltAzFromRaDec()
self._slewing = False
self.slewComplete(self.getPositionRaDec(), TelescopeStatus.OK)
@lock
def moveNorth(self, offset, rate=SlewRate.MAX):
self._slewing = True
pos = self.getPositionRaDec()
pos = Position.fromRaDec(pos.ra, pos.dec + Coord.fromAS(offset))
self.slewBegin(pos)
self._dec += Coord.fromAS(offset)
self._setAltAzFromRaDec()
self._slewing = False
self.slewComplete(self.getPositionRaDec(), TelescopeStatus.OK)
@lock
def moveSouth(self, offset, rate=SlewRate.MAX):
self._slewing = True
pos = self.getPositionRaDec()
pos = Position.fromRaDec(pos.ra, pos.dec + Coord.fromAS(-offset))
self.slewBegin(pos)
self._dec += Coord.fromAS(-offset)
self._setAltAzFromRaDec()
self._slewing = False
self.slewComplete(self.getPositionRaDec(), TelescopeStatus.OK)
@lock
def getRa(self):
return self._ra
@lock
def getDec(self):
return self._dec
@lock
def getAz(self):
return self._az
@lock
def getAlt(self):
return self._alt
@lock
def getPositionRaDec(self):
return Position.fromRaDec(self.getRa(), self.getDec())
@lock
def getPositionAltAz(self):
return Position.fromAltAz(self.getAlt(), self.getAz())
@lock
def getTargetRaDec(self):
return Position.fromRaDec(self.getRa(), self.getDec())
@lock
def getTargetAltAz(self):
return Position.fromAltAz(self.getAlt(), self.getAz())
@lock
def syncRaDec(self, position):
if not isinstance(position, Position):
position = Position.fromRaDec(*position)
self._ra = position.ra
self._dec = position.dec
@lock
def park(self):
self.log.info("Parking...")
self._parked = True
self.parkComplete()
@lock
def unpark(self):
self.log.info("Unparking...")
self._parked = False
self.unparkComplete()
def isParked(self):
return self._parked
@lock
def startTracking(self):
self._tracking = True
@lock
def stopTracking(self):
self._tracking = False
def isTracking(self):
return self._tracking
|
freudFlintstone/pjbank-python-sdk
|
refs/heads/master
|
tests/test_recebimentos.py
|
1
|
# the inclusion of the tests module is not meant to offer best practices for
# testing in general, but rather to support the `find_packages` example in
# setup.py that excludes installing the "tests" package
import unittest
import pickle
import datetime
import random
from pjbank import Boleto
from dados import dados
class DadosTeste(object):
def __init__(self):
super(DadosTeste, self).__init__()
self._dados = dados
@property
def dados(self):
return self._dados
class BoletoTestCase(unittest.TestCase):
def setUp(self):
self.dados = DadosTeste().dados
creds = self.dados['recebimentos']['boleto']['credenciamento']
self.boleto = Boleto(creds['credencial'], creds['chave'])
def test_dados(self):
self.assertGreaterEqual(len(self.dados), 0)
self.assert_(self.boleto.credencial)
self.assert_(self.boleto.chave)
def emitir_boleto(self, dados, random_pedido=False):
if random_pedido:
dados['pedido_numero'] = random.randint(1000,99999)
return self.boleto.emitir(dados)
def test_emitir_boleto(self):
dados_emis = self.dados['recebimentos']['boleto']['emitir']
data = (datetime.date.today()+datetime.timedelta(days=1))
dados_emis['vencimento'] = data.strftime('%m/%d/%Y')
res = self.emitir_boleto(dados_emis, True)
response = res.json()
self.assertIn("id_unico", response)
self.assertIn("nossonumero", response)
self.assertIn("banco_numero", response)
self.assertIn("linkBoleto", response)
self.assertIn("linkGrupo", response)
self.assertIn("linhaDigitavel", response)
def test_editar_boleto(self):
dados_emis = self.dados['recebimentos']['boleto']['emitir']
bol = self.emitir_boleto(dados_emis, True)
bol.r = bol.json()
self.assertEqual(bol.r['status'], '201')
dados_emis['valor'] = 50.50
dados_emis['pedido_numero'] = bol.r['pedido_numero']
bol2 = self.emitir_boleto(dados_emis, False)
bol.r = bol.json()
self.assertEqual(bol.r["linkBoleto"], bol2.r["linkBoleto"])
self.assertEqual(bol.r["linkGrupo"], bol2.r["linkGrupo"])
self.assertEqual(bol.r["linhaDigitavel"], bol2.r["linhaDigitavel"])
|
nylas/sync-engine
|
refs/heads/master
|
tests/general/test_concurrency.py
|
3
|
import time
import pytest
from gevent import GreenletExit
from gevent import socket
from inbox.util.concurrency import retry_with_logging
class MockLogger(object):
def __init__(self):
self.call_count = 0
def error(self, *args, **kwargs):
self.call_count += 1
class FailingFunction(object):
__name__ = 'FailingFunction'
def __init__(self, exc_type, max_executions=3, delay=0):
self.exc_type = exc_type
self.max_executions = max_executions
self.delay = delay
self.call_count = 0
def __call__(self):
self.call_count += 1
time.sleep(self.delay)
if self.call_count < self.max_executions:
raise self.exc_type
return
@pytest.mark.usefixtures('mock_gevent_sleep')
def test_retry_with_logging():
logger = MockLogger()
failing_function = FailingFunction(ValueError)
retry_with_logging(failing_function, logger=logger, backoff_delay=0)
assert logger.call_count == failing_function.max_executions - 1
assert failing_function.call_count == failing_function.max_executions
def test_no_logging_on_greenlet_exit():
logger = MockLogger()
failing_function = FailingFunction(GreenletExit)
with pytest.raises(GreenletExit):
retry_with_logging(failing_function, logger=logger)
assert logger.call_count == 0
assert failing_function.call_count == 1
def test_selective_retry():
logger = MockLogger()
failing_function = FailingFunction(ValueError)
with pytest.raises(ValueError):
retry_with_logging(failing_function, logger=logger,
fail_classes=[ValueError])
assert logger.call_count == 0
assert failing_function.call_count == 1
@pytest.mark.usefixtures('mock_gevent_sleep')
def test_no_logging_until_many_transient_error():
logger = MockLogger()
failing_function = FailingFunction(socket.error, max_executions=2)
retry_with_logging(failing_function, logger=logger)
assert logger.call_count == 0
assert failing_function.call_count == 2
failing_function = FailingFunction(socket.error, max_executions=21)
retry_with_logging(failing_function, logger=logger)
assert logger.call_count == 1
assert failing_function.call_count == 21
|
sharph/lifx-python
|
refs/heads/master
|
lifx/lifx.py
|
1
|
from . import network
from . import packetcodec
import socket
from time import clock
from binascii import hexlify, unhexlify
from datetime import datetime
import struct
lights = {}
def inttohex(n):
return str(hexlify(struct.pack('>H', n)), encoding='utf-8')
class LIFXBulb:
def __init__(self, lightstatus):
self.recv_lightstatus(lightstatus)
def __str__(self):
return "<LIFXBulb %s hue:%s sat:%s bright:%s kelvin:%s on:%s>" % \
(self.get_addr(),
inttohex(self.hue),
inttohex(self.saturation),
inttohex(self.brightness),
self.kelvin,
self.power)
def get_addr(self):
return str(hexlify(self.addr), encoding='utf-8')
def deliverpacket(self, packet):
if isinstance(packet.payload, packetcodec.LightStatusPayload):
self.recv_lightstatus(packet)
elif isinstance(packet.payload, packetcodec.PowerStatePayload):
self.recv_powerstate(packet)
elif isinstance(packet.payload, packetcodec.BulbLabelPayload):
self.recv_bulblabelstate(packet)
elif isinstance(packet.payload, packetcodec.TimeStatePayload):
self.recv_timestate(packet)
elif isinstance(packet.payload, packetcodec.VersionStatePayload):
self.recv_versionstate(packet)
elif isinstance(packet.payload, packetcodec.InfoStatePayload):
self.recv_infostate(packet)
def recv_lightstatus(self, lightstatus):
self.addr = lightstatus.target
self.hue = lightstatus.payload.data['hue']
self.saturation = lightstatus.payload.data['saturation']
self.brightness = lightstatus.payload.data['brightness']
self.kelvin = lightstatus.payload.data['kelvin']
self.dim = lightstatus.payload.data['dim']
if lightstatus.payload.data['power'] > 0:
self.power = True
else:
self.power = False
self.bulb_label = str(lightstatus.payload.data['bulb_label'],
encoding='utf-8').strip('\00')
self.tags = lightstatus.payload.data['tags']
def recv_powerstate(self, powerstate):
if powerstate.payload.data['onoff'] > 0:
self.power = True
else:
self.power = False
def recv_bulblabelstate(self, labelstate):
self.bulb_label = str(labelstate.payload.data['bulb_label'],
encoding='utf-8').strip('\00')
def recv_timestate(self, timestate):
self.time = timestate.payload.data['time']
self.update_datetime()
def recv_versionstate(self, versionstate):
self.vendor = versionstate.payload.data['vendor']
self.product = versionstate.payload.data['product']
self.version = versionstate.payload.data['version']
def recv_infostate(self, infostate):
self.time = infostate.payload.data['time']
self.uptime = infostate.payload.data['uptime']
self.downtime = infostate.payload.data['downtime']
self.update_datetime()
def get_state(self):
clear_buffer()
p = packetcodec.Packet(packetcodec.GetLightStatePayload())
p.target = self.addr
network.sendpacket(p)
listen_and_interpret(5, packetcodec.LightStatusPayload, self.addr)
def set_power(self, power):
set_power(self.addr, power)
#listen_and_interpret(5, packetcodec.PowerStatePayload, self.addr)
def set_color(self, hue, saturation, brightness, kelvin, fade_time):
set_color(self.addr, hue, saturation, brightness, kelvin, fade_time)
def get_label(self):
clear_buffer()
p = packetcodec.Packet(packetcodec.GetBulbLabelPayload())
p.target = self.addr
network.sendpacket(p)
listen_and_interpret(5, packetcodec.BulbLabelPayload, self.addr)
def set_label(self, label):
label = bytearray(label, encoding="utf-8")[0:32]
if len(label) == 0:
return
clear_buffer()
p = packetcodec.Packet(packetcodec.SetBulbLabelPayload())
p.payload.data['bulb_label'] = label
p.target = self.addr
network.sendpacket(p)
clear_buffer()
def update_datetime(self):
self.datetime = datetime.fromtimestamp(self.time / 1e+9)
def get_time(self):
p = packetcodec.Packet(packetcodec.GetTimeStatePayload())
p.target = self.addr
clear_buffer()
network.sendpacket(p)
listen_and_interpret(5, packetcodec.TimeStatePayload, self.addr)
def get_version(self):
p = packetcodec.Packet(packetcodec.GetVersionPayload())
p.target = self.addr
clear_buffer()
network.sendpacket(p)
listen_and_interpret(5, packetcodec.VersionStatePayload, self.addr)
def get_info(self):
p = packetcodec.Packet(packetcodec.GetInfoPayload())
p.target = self.addr
clear_buffer()
network.sendpacket(p)
listen_and_interpret(5, packetcodec.InfoStatePayload, self.addr)
def sanitize_addr(addr):
if len(addr) > 6:
return unhexlify(bytes(addr, encoding='utf-8'))
return addr
def set_color(addr, hue, saturation, brightness, kelvin, fade_time):
addr = sanitize_addr(addr)
clear_buffer()
p = packetcodec.Packet(packetcodec.SetLightColorPayload())
p.payload.data['hue'] = hue
p.payload.data['saturation'] = saturation
p.payload.data['brightness'] = brightness
p.payload.data['kelvin'] = kelvin
p.payload.data['fade_time'] = fade_time
p.target = addr
network.sendpacket(p)
clear_buffer()
def set_power(addr, power):
addr = sanitize_addr(addr)
clear_buffer()
p = packetcodec.Packet(packetcodec.SetPowerStatePayload())
p.target = addr
if power:
p.payload.data['onoff'] = 0x0001
else:
p.payload.data['onoff'] = 0x0000
network.sendpacket(p)
def pause(sec):
listen_and_interpret(sec)
def listen_and_interpret(sec, desired = None, target = None):
global lights
packets = network.listenforpackets(sec, desired, target)
for p in packets:
if p.target not in lights:
if isinstance(p.payload, packetcodec.LightStatusPayload):
lights[p.target] = LIFXBulb(p)
else:
lights[p.target].deliverpacket(p)
def get_lights():
global lights
p = packetcodec.Packet(packetcodec.GetLightStatePayload())
network.sendpacket(p)
listen_and_interpret(2)
return list(lights.values())
def clear_buffer():
listen_and_interpret(0.05)
|
mhotwagner/abackend
|
refs/heads/master
|
abackend-env/lib/python3.5/site-packages/django/contrib/gis/geos/prototypes/geom.py
|
103
|
from ctypes import POINTER, c_char_p, c_int, c_size_t, c_ubyte
from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom, check_minus_one, check_sized_string, check_string, check_zero,
)
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
# This is the return type used by binary output (WKB, HEX) routines.
c_uchar_p = POINTER(c_ubyte)
# We create a simple subclass of c_char_p here because when the response
# type is set to c_char_p, you get a _Python_ string and there's no way
# to access the string's address inside the error checking function.
# In other words, you can't free the memory allocated inside GEOS. Previously,
# the return type would just be omitted and the integer address would be
# used -- but this allows us to be specific in the function definition and
# keeps the reference so it may be free'd.
class geos_char_p(c_char_p):
pass
# ### ctypes generation functions ###
def bin_constructor(func):
"Generates a prototype for binary construction (HEX, WKB) GEOS routines."
func.argtypes = [c_char_p, c_size_t]
func.restype = GEOM_PTR
func.errcheck = check_geom
return func
# HEX & WKB output
def bin_output(func):
"Generates a prototype for the routines that return a sized string."
func.argtypes = [GEOM_PTR, POINTER(c_size_t)]
func.errcheck = check_sized_string
func.restype = c_uchar_p
return func
def geom_output(func, argtypes):
"For GEOS routines that return a geometry."
if argtypes:
func.argtypes = argtypes
func.restype = GEOM_PTR
func.errcheck = check_geom
return func
def geom_index(func):
"For GEOS routines that return geometries from an index."
return geom_output(func, [GEOM_PTR, c_int])
def int_from_geom(func, zero=False):
"Argument is a geometry, return type is an integer."
func.argtypes = [GEOM_PTR]
func.restype = c_int
if zero:
func.errcheck = check_zero
else:
func.errcheck = check_minus_one
return func
def string_from_geom(func):
"Argument is a Geometry, return type is a string."
func.argtypes = [GEOM_PTR]
func.restype = geos_char_p
func.errcheck = check_string
return func
# ### ctypes prototypes ###
# Deprecated creation routines from WKB, HEX, WKT
from_hex = bin_constructor(GEOSFunc('GEOSGeomFromHEX_buf'))
from_wkb = bin_constructor(GEOSFunc('GEOSGeomFromWKB_buf'))
from_wkt = geom_output(GEOSFunc('GEOSGeomFromWKT'), [c_char_p])
# Deprecated output routines
to_hex = bin_output(GEOSFunc('GEOSGeomToHEX_buf'))
to_wkb = bin_output(GEOSFunc('GEOSGeomToWKB_buf'))
to_wkt = string_from_geom(GEOSFunc('GEOSGeomToWKT'))
# The GEOS geometry type, typeid, num_coordites and number of geometries
geos_normalize = int_from_geom(GEOSFunc('GEOSNormalize'))
geos_type = string_from_geom(GEOSFunc('GEOSGeomType'))
geos_typeid = int_from_geom(GEOSFunc('GEOSGeomTypeId'))
get_dims = int_from_geom(GEOSFunc('GEOSGeom_getDimensions'), zero=True)
get_num_coords = int_from_geom(GEOSFunc('GEOSGetNumCoordinates'))
get_num_geoms = int_from_geom(GEOSFunc('GEOSGetNumGeometries'))
# Geometry creation factories
create_point = geom_output(GEOSFunc('GEOSGeom_createPoint'), [CS_PTR])
create_linestring = geom_output(GEOSFunc('GEOSGeom_createLineString'), [CS_PTR])
create_linearring = geom_output(GEOSFunc('GEOSGeom_createLinearRing'), [CS_PTR])
# Polygon and collection creation routines are special and will not
# have their argument types defined.
create_polygon = geom_output(GEOSFunc('GEOSGeom_createPolygon'), None)
create_collection = geom_output(GEOSFunc('GEOSGeom_createCollection'), None)
# Ring routines
get_extring = geom_output(GEOSFunc('GEOSGetExteriorRing'), [GEOM_PTR])
get_intring = geom_index(GEOSFunc('GEOSGetInteriorRingN'))
get_nrings = int_from_geom(GEOSFunc('GEOSGetNumInteriorRings'))
# Collection Routines
get_geomn = geom_index(GEOSFunc('GEOSGetGeometryN'))
# Cloning
geom_clone = GEOSFunc('GEOSGeom_clone')
geom_clone.argtypes = [GEOM_PTR]
geom_clone.restype = GEOM_PTR
# Destruction routine.
destroy_geom = GEOSFunc('GEOSGeom_destroy')
destroy_geom.argtypes = [GEOM_PTR]
destroy_geom.restype = None
# SRID routines
geos_get_srid = GEOSFunc('GEOSGetSRID')
geos_get_srid.argtypes = [GEOM_PTR]
geos_get_srid.restype = c_int
geos_set_srid = GEOSFunc('GEOSSetSRID')
geos_set_srid.argtypes = [GEOM_PTR, c_int]
geos_set_srid.restype = None
|
beni55/sympy
|
refs/heads/master
|
sympy/utilities/misc.py
|
24
|
"""Miscellaneous stuff that doesn't really fit anywhere else."""
from __future__ import print_function, division
import sys
import os
from textwrap import fill, dedent
from sympy.core.compatibility import get_function_name
# if you use
# filldedent('''
# the text''')
# a space will be put before the first line because dedent will
# put a \n as the first line and fill replaces \n with spaces
# so we strip off any leading and trailing \n since printed wrapped
# text should not have leading or trailing spaces.
filldedent = lambda s, w=70: '\n' + fill(dedent(str(s)).strip('\n'), width=w)
def rawlines(s):
"""Return a cut-and-pastable string that, when printed, is equivalent
to the input. The string returned is formatted so it can be indented
nicely within tests; in some cases it is wrapped in the dedent
function which has to be imported from textwrap.
Examples
========
Note: because there are characters in the examples below that need
to be escaped because they are themselves within a triple quoted
docstring, expressions below look more complicated than they would
be if they were printed in an interpreter window.
>>> from sympy.utilities.misc import rawlines
>>> from sympy import TableForm
>>> s = str(TableForm([[1, 10]], headings=(None, ['a', 'bee'])))
>>> print(rawlines(s)) # the \\ appears as \ when printed
(
'a bee\\n'
'-----\\n'
'1 10 '
)
>>> print(rawlines('''this
... that'''))
dedent('''\\
this
that''')
>>> print(rawlines('''this
... that
... '''))
dedent('''\\
this
that
''')
>>> s = \"\"\"this
... is a triple '''
... \"\"\"
>>> print(rawlines(s))
dedent(\"\"\"\\
this
is a triple '''
\"\"\")
>>> print(rawlines('''this
... that
... '''))
(
'this\\n'
'that\\n'
' '
)
"""
lines = s.split('\n')
if len(lines) == 1:
return repr(lines[0])
triple = ["'''" in s, '"""' in s]
if any(li.endswith(' ') for li in lines) or '\\' in s or all(triple):
rv = ["("]
# add on the newlines
trailing = s.endswith('\n')
last = len(lines) - 1
for i, li in enumerate(lines):
if i != last or trailing:
rv.append(repr(li)[:-1] + '\\n\'')
else:
rv.append(repr(li))
return '\n '.join(rv) + '\n)'
else:
rv = '\n '.join(lines)
if triple[0]:
return 'dedent("""\\\n %s""")' % rv
else:
return "dedent('''\\\n %s''')" % rv
size = getattr(sys, "maxint", None)
if size is None: # Python 3 doesn't have maxint
size = sys.maxsize
if size > 2**32:
ARCH = "64-bit"
else:
ARCH = "32-bit"
# XXX: PyPy doesn't support hash randomization
HASH_RANDOMIZATION = getattr(sys.flags, 'hash_randomization', False)
_debug_tmp = []
_debug_iter = 0
def debug_decorator(func):
"""If SYMPY_DEBUG is True, it will print a nice execution tree with
arguments and results of all decorated functions, else do nothing.
"""
from sympy import SYMPY_DEBUG
if not SYMPY_DEBUG:
return func
def maketree(f, *args, **kw):
global _debug_tmp
global _debug_iter
oldtmp = _debug_tmp
_debug_tmp = []
_debug_iter += 1
def tree(subtrees):
def indent(s, type=1):
x = s.split("\n")
r = "+-%s\n" % x[0]
for a in x[1:]:
if a == "":
continue
if type == 1:
r += "| %s\n" % a
else:
r += " %s\n" % a
return r
if len(subtrees) == 0:
return ""
f = []
for a in subtrees[:-1]:
f.append(indent(a))
f.append(indent(subtrees[-1], 2))
return ''.join(f)
# If there is a bug and the algorithm enters an infinite loop, enable the
# following lines. It will print the names and parameters of all major functions
# that are called, *before* they are called
#from sympy.core.compatibility import reduce
#print("%s%s %s%s" % (_debug_iter, reduce(lambda x, y: x + y, \
# map(lambda x: '-', range(1, 2 + _debug_iter))), get_function_name(f), args))
r = f(*args, **kw)
_debug_iter -= 1
s = "%s%s = %s\n" % (get_function_name(f), args, r)
if _debug_tmp != []:
s += tree(_debug_tmp)
_debug_tmp = oldtmp
_debug_tmp.append(s)
if _debug_iter == 0:
print((_debug_tmp[0]))
_debug_tmp = []
return r
def decorated(*args, **kwargs):
return maketree(func, *args, **kwargs)
return decorated
def debug(*args):
"""
Print ``*args`` if SYMPY_DEBUG is True, else do nothing.
"""
from sympy import SYMPY_DEBUG
if SYMPY_DEBUG:
print(*args, file=sys.stderr)
def find_executable(executable, path=None):
"""Try to find 'executable' in the directories listed in 'path' (a
string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']). Returns the complete filename or None if not
found
"""
if path is None:
path = os.environ['PATH']
paths = path.split(os.pathsep)
extlist = ['']
if os.name == 'os2':
(base, ext) = os.path.splitext(executable)
# executable files on OS/2 can have an arbitrary extension, but
# .exe is automatically appended if no dot is present in the name
if not ext:
executable = executable + ".exe"
elif sys.platform == 'win32':
pathext = os.environ['PATHEXT'].lower().split(os.pathsep)
(base, ext) = os.path.splitext(executable)
if ext.lower() not in pathext:
extlist = pathext
for ext in extlist:
execname = executable + ext
if os.path.isfile(execname):
return execname
else:
for p in paths:
f = os.path.join(p, execname)
if os.path.isfile(f):
return f
else:
return None
|
alirizakeles/tendenci
|
refs/heads/master
|
tendenci/apps/profiles/tests.py
|
1940
|
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
|
invisiblek/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/conch/ssh/filetransfer.py
|
60
|
# -*- test-case-name: twisted.conch.test.test_filetransfer -*-
#
# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
import struct, errno
from twisted.internet import defer, protocol
from twisted.python import failure, log
from common import NS, getNS
from twisted.conch.interfaces import ISFTPServer, ISFTPFile
from zope import interface
class FileTransferBase(protocol.Protocol):
versions = (3, )
packetTypes = {}
def __init__(self):
self.buf = ''
self.otherVersion = None # this gets set
def sendPacket(self, kind, data):
self.transport.write(struct.pack('!LB', len(data)+1, kind) + data)
def dataReceived(self, data):
self.buf += data
while len(self.buf) > 5:
length, kind = struct.unpack('!LB', self.buf[:5])
if len(self.buf) < 4 + length:
return
data, self.buf = self.buf[5:4+length], self.buf[4+length:]
packetType = self.packetTypes.get(kind, None)
if not packetType:
log.msg('no packet type for', kind)
continue
f = getattr(self, 'packet_%s' % packetType, None)
if not f:
log.msg('not implemented: %s' % packetType)
log.msg(repr(data[4:]))
reqId, = struct.unpack('!L', data[:4])
self._sendStatus(reqId, FX_OP_UNSUPPORTED,
"don't understand %s" % packetType)
#XXX not implemented
continue
try:
f(data)
except:
log.err()
continue
reqId ,= struct.unpack('!L', data[:4])
self._ebStatus(failure.Failure(e), reqId)
def _parseAttributes(self, data):
flags ,= struct.unpack('!L', data[:4])
attrs = {}
data = data[4:]
if flags & FILEXFER_ATTR_SIZE == FILEXFER_ATTR_SIZE:
size ,= struct.unpack('!Q', data[:8])
attrs['size'] = size
data = data[8:]
if flags & FILEXFER_ATTR_OWNERGROUP == FILEXFER_ATTR_OWNERGROUP:
uid, gid = struct.unpack('!2L', data[:8])
attrs['uid'] = uid
attrs['gid'] = gid
data = data[8:]
if flags & FILEXFER_ATTR_PERMISSIONS == FILEXFER_ATTR_PERMISSIONS:
perms ,= struct.unpack('!L', data[:4])
attrs['permissions'] = perms
data = data[4:]
if flags & FILEXFER_ATTR_ACMODTIME == FILEXFER_ATTR_ACMODTIME:
atime, mtime = struct.unpack('!2L', data[:8])
attrs['atime'] = atime
attrs['mtime'] = mtime
data = data[8:]
if flags & FILEXFER_ATTR_EXTENDED == FILEXFER_ATTR_EXTENDED:
extended_count ,= struct.unpack('!L', data[:4])
data = data[4:]
for i in xrange(extended_count):
extended_type, data = getNS(data)
extended_data, data = getNS(data)
attrs['ext_%s' % extended_type] = extended_data
return attrs, data
def _packAttributes(self, attrs):
flags = 0
data = ''
if 'size' in attrs:
data += struct.pack('!Q', attrs['size'])
flags |= FILEXFER_ATTR_SIZE
if 'uid' in attrs and 'gid' in attrs:
data += struct.pack('!2L', attrs['uid'], attrs['gid'])
flags |= FILEXFER_ATTR_OWNERGROUP
if 'permissions' in attrs:
data += struct.pack('!L', attrs['permissions'])
flags |= FILEXFER_ATTR_PERMISSIONS
if 'atime' in attrs and 'mtime' in attrs:
data += struct.pack('!2L', attrs['atime'], attrs['mtime'])
flags |= FILEXFER_ATTR_ACMODTIME
extended = []
for k in attrs:
if k.startswith('ext_'):
ext_type = NS(k[4:])
ext_data = NS(attrs[k])
extended.append(ext_type+ext_data)
if extended:
data += struct.pack('!L', len(extended))
data += ''.join(extended)
flags |= FILEXFER_ATTR_EXTENDED
return struct.pack('!L', flags) + data
class FileTransferServer(FileTransferBase):
def __init__(self, data=None, avatar=None):
FileTransferBase.__init__(self)
self.client = ISFTPServer(avatar) # yay interfaces
self.openFiles = {}
self.openDirs = {}
def packet_INIT(self, data):
version ,= struct.unpack('!L', data[:4])
self.version = min(list(self.versions) + [version])
data = data[4:]
ext = {}
while data:
ext_name, data = getNS(data)
ext_data, data = getNS(data)
ext[ext_name] = ext_data
our_ext = self.client.gotVersion(version, ext)
our_ext_data = ""
for (k,v) in our_ext.items():
our_ext_data += NS(k) + NS(v)
self.sendPacket(FXP_VERSION, struct.pack('!L', self.version) + \
our_ext_data)
def packet_OPEN(self, data):
requestId = data[:4]
data = data[4:]
filename, data = getNS(data)
flags ,= struct.unpack('!L', data[:4])
data = data[4:]
attrs, data = self._parseAttributes(data)
assert data == '', 'still have data in OPEN: %s' % repr(data)
d = defer.maybeDeferred(self.client.openFile, filename, flags, attrs)
d.addCallback(self._cbOpenFile, requestId)
d.addErrback(self._ebStatus, requestId, "open failed")
def _cbOpenFile(self, fileObj, requestId):
fileId = str(hash(fileObj))
if fileId in self.openFiles:
raise KeyError, 'id already open'
self.openFiles[fileId] = fileObj
self.sendPacket(FXP_HANDLE, requestId + NS(fileId))
def packet_CLOSE(self, data):
requestId = data[:4]
data = data[4:]
handle, data = getNS(data)
assert data == '', 'still have data in CLOSE: %s' % repr(data)
if handle in self.openFiles:
fileObj = self.openFiles[handle]
d = defer.maybeDeferred(fileObj.close)
d.addCallback(self._cbClose, handle, requestId)
d.addErrback(self._ebStatus, requestId, "close failed")
elif handle in self.openDirs:
dirObj = self.openDirs[handle][0]
d = defer.maybeDeferred(dirObj.close)
d.addCallback(self._cbClose, handle, requestId, 1)
d.addErrback(self._ebStatus, requestId, "close failed")
else:
self._ebClose(failure.Failure(KeyError()), requestId)
def _cbClose(self, result, handle, requestId, isDir = 0):
if isDir:
del self.openDirs[handle]
else:
del self.openFiles[handle]
self._sendStatus(requestId, FX_OK, 'file closed')
def packet_READ(self, data):
requestId = data[:4]
data = data[4:]
handle, data = getNS(data)
(offset, length), data = struct.unpack('!QL', data[:12]), data[12:]
assert data == '', 'still have data in READ: %s' % repr(data)
if handle not in self.openFiles:
self._ebRead(failure.Failure(KeyError()), requestId)
else:
fileObj = self.openFiles[handle]
d = defer.maybeDeferred(fileObj.readChunk, offset, length)
d.addCallback(self._cbRead, requestId)
d.addErrback(self._ebStatus, requestId, "read failed")
def _cbRead(self, result, requestId):
if result == '': # python's read will return this for EOF
raise EOFError()
self.sendPacket(FXP_DATA, requestId + NS(result))
def packet_WRITE(self, data):
requestId = data[:4]
data = data[4:]
handle, data = getNS(data)
offset, = struct.unpack('!Q', data[:8])
data = data[8:]
writeData, data = getNS(data)
assert data == '', 'still have data in WRITE: %s' % repr(data)
if handle not in self.openFiles:
self._ebWrite(failure.Failure(KeyError()), requestId)
else:
fileObj = self.openFiles[handle]
d = defer.maybeDeferred(fileObj.writeChunk, offset, writeData)
d.addCallback(self._cbStatus, requestId, "write succeeded")
d.addErrback(self._ebStatus, requestId, "write failed")
def packet_REMOVE(self, data):
requestId = data[:4]
data = data[4:]
filename, data = getNS(data)
assert data == '', 'still have data in REMOVE: %s' % repr(data)
d = defer.maybeDeferred(self.client.removeFile, filename)
d.addCallback(self._cbStatus, requestId, "remove succeeded")
d.addErrback(self._ebStatus, requestId, "remove failed")
def packet_RENAME(self, data):
requestId = data[:4]
data = data[4:]
oldPath, data = getNS(data)
newPath, data = getNS(data)
assert data == '', 'still have data in RENAME: %s' % repr(data)
d = defer.maybeDeferred(self.client.renameFile, oldPath, newPath)
d.addCallback(self._cbStatus, requestId, "rename succeeded")
d.addErrback(self._ebStatus, requestId, "rename failed")
def packet_MKDIR(self, data):
requestId = data[:4]
data = data[4:]
path, data = getNS(data)
attrs, data = self._parseAttributes(data)
assert data == '', 'still have data in MKDIR: %s' % repr(data)
d = defer.maybeDeferred(self.client.makeDirectory, path, attrs)
d.addCallback(self._cbStatus, requestId, "mkdir succeeded")
d.addErrback(self._ebStatus, requestId, "mkdir failed")
def packet_RMDIR(self, data):
requestId = data[:4]
data = data[4:]
path, data = getNS(data)
assert data == '', 'still have data in RMDIR: %s' % repr(data)
d = defer.maybeDeferred(self.client.removeDirectory, path)
d.addCallback(self._cbStatus, requestId, "rmdir succeeded")
d.addErrback(self._ebStatus, requestId, "rmdir failed")
def packet_OPENDIR(self, data):
requestId = data[:4]
data = data[4:]
path, data = getNS(data)
assert data == '', 'still have data in OPENDIR: %s' % repr(data)
d = defer.maybeDeferred(self.client.openDirectory, path)
d.addCallback(self._cbOpenDirectory, requestId)
d.addErrback(self._ebStatus, requestId, "opendir failed")
def _cbOpenDirectory(self, dirObj, requestId):
handle = str(hash(dirObj))
if handle in self.openDirs:
raise KeyError, "already opened this directory"
self.openDirs[handle] = [dirObj, iter(dirObj)]
self.sendPacket(FXP_HANDLE, requestId + NS(handle))
def packet_READDIR(self, data):
requestId = data[:4]
data = data[4:]
handle, data = getNS(data)
assert data == '', 'still have data in READDIR: %s' % repr(data)
if handle not in self.openDirs:
self._ebStatus(failure.Failure(KeyError()), requestId)
else:
dirObj, dirIter = self.openDirs[handle]
d = defer.maybeDeferred(self._scanDirectory, dirIter, [])
d.addCallback(self._cbSendDirectory, requestId)
d.addErrback(self._ebStatus, requestId, "scan directory failed")
def _scanDirectory(self, dirIter, f):
while len(f) < 250:
try:
info = dirIter.next()
except StopIteration:
if not f:
raise EOFError
return f
if isinstance(info, defer.Deferred):
info.addCallback(self._cbScanDirectory, dirIter, f)
return
else:
f.append(info)
return f
def _cbScanDirectory(self, result, dirIter, f):
f.append(result)
return self._scanDirectory(dirIter, f)
def _cbSendDirectory(self, result, requestId):
data = ''
for (filename, longname, attrs) in result:
data += NS(filename)
data += NS(longname)
data += self._packAttributes(attrs)
self.sendPacket(FXP_NAME, requestId +
struct.pack('!L', len(result))+data)
def packet_STAT(self, data, followLinks = 1):
requestId = data[:4]
data = data[4:]
path, data = getNS(data)
assert data == '', 'still have data in STAT/LSTAT: %s' % repr(data)
d = defer.maybeDeferred(self.client.getAttrs, path, followLinks)
d.addCallback(self._cbStat, requestId)
d.addErrback(self._ebStatus, requestId, 'stat/lstat failed')
def packet_LSTAT(self, data):
self.packet_STAT(data, 0)
def packet_FSTAT(self, data):
requestId = data[:4]
data = data[4:]
handle, data = getNS(data)
assert data == '', 'still have data in FSTAT: %s' % repr(data)
if handle not in self.openFiles:
self._ebStatus(failure.Failure(KeyError('%s not in self.openFiles'
% handle)), requestId)
else:
fileObj = self.openFiles[handle]
d = defer.maybeDeferred(fileObj.getAttrs)
d.addCallback(self._cbStat, requestId)
d.addErrback(self._ebStatus, requestId, 'fstat failed')
def _cbStat(self, result, requestId):
data = requestId + self._packAttributes(result)
self.sendPacket(FXP_ATTRS, data)
def packet_SETSTAT(self, data):
requestId = data[:4]
data = data[4:]
path, data = getNS(data)
attrs, data = self._parseAttributes(data)
if data != '':
log.msg('WARN: still have data in SETSTAT: %s' % repr(data))
d = defer.maybeDeferred(self.client.setAttrs, path, attrs)
d.addCallback(self._cbStatus, requestId, 'setstat succeeded')
d.addErrback(self._ebStatus, requestId, 'setstat failed')
def packet_FSETSTAT(self, data):
requestId = data[:4]
data = data[4:]
handle, data = getNS(data)
attrs, data = self._parseAttributes(data)
assert data == '', 'still have data in FSETSTAT: %s' % repr(data)
if handle not in self.openFiles:
self._ebStatus(failure.Failure(KeyError()), requestId)
else:
fileObj = self.openFiles[handle]
d = defer.maybeDeferred(fileObj.setAttrs, attrs)
d.addCallback(self._cbStatus, requestId, 'fsetstat succeeded')
d.addErrback(self._ebStatus, requestId, 'fsetstat failed')
def packet_READLINK(self, data):
requestId = data[:4]
data = data[4:]
path, data = getNS(data)
assert data == '', 'still have data in READLINK: %s' % repr(data)
d = defer.maybeDeferred(self.client.readLink, path)
d.addCallback(self._cbReadLink, requestId)
d.addErrback(self._ebStatus, requestId, 'readlink failed')
def _cbReadLink(self, result, requestId):
self._cbSendDirectory([(result, '', {})], requestId)
def packet_SYMLINK(self, data):
requestId = data[:4]
data = data[4:]
linkPath, data = getNS(data)
targetPath, data = getNS(data)
d = defer.maybeDeferred(self.client.makeLink, linkPath, targetPath)
d.addCallback(self._cbStatus, requestId, 'symlink succeeded')
d.addErrback(self._ebStatus, requestId, 'symlink failed')
def packet_REALPATH(self, data):
requestId = data[:4]
data = data[4:]
path, data = getNS(data)
assert data == '', 'still have data in REALPATH: %s' % repr(data)
d = defer.maybeDeferred(self.client.realPath, path)
d.addCallback(self._cbReadLink, requestId) # same return format
d.addErrback(self._ebStatus, requestId, 'realpath failed')
def packet_EXTENDED(self, data):
requestId = data[:4]
data = data[4:]
extName, extData = getNS(data)
d = defer.maybeDeferred(self.client.extendedRequest, extName, extData)
d.addCallback(self._cbExtended, requestId)
d.addErrback(self._ebStatus, requestId, 'extended %s failed' % extName)
def _cbExtended(self, data, requestId):
self.sendPacket(FXP_EXTENDED_REPLY, requestId + data)
def _cbStatus(self, result, requestId, msg = "request succeeded"):
self._sendStatus(requestId, FX_OK, msg)
def _ebStatus(self, reason, requestId, msg = "request failed"):
code = FX_FAILURE
message = msg
if reason.type in (IOError, OSError):
if reason.value.errno == errno.ENOENT: # no such file
code = FX_NO_SUCH_FILE
message = reason.value.strerror
elif reason.value.errno == errno.EACCES: # permission denied
code = FX_PERMISSION_DENIED
message = reason.value.strerror
elif reason.value.errno == errno.EEXIST:
code = FX_FILE_ALREADY_EXISTS
else:
log.err(reason)
elif reason.type == EOFError: # EOF
code = FX_EOF
if reason.value.args:
message = reason.value.args[0]
elif reason.type == NotImplementedError:
code = FX_OP_UNSUPPORTED
if reason.value.args:
message = reason.value.args[0]
elif reason.type == SFTPError:
code = reason.value.code
message = reason.value.message
else:
log.err(reason)
self._sendStatus(requestId, code, message)
def _sendStatus(self, requestId, code, message, lang = ''):
"""
Helper method to send a FXP_STATUS message.
"""
data = requestId + struct.pack('!L', code)
data += NS(message)
data += NS(lang)
self.sendPacket(FXP_STATUS, data)
def connectionLost(self, reason):
"""
Clean all opened files and directories.
"""
for fileObj in self.openFiles.values():
fileObj.close()
self.openFiles = {}
for (dirObj, dirIter) in self.openDirs.values():
dirObj.close()
self.openDirs = {}
class FileTransferClient(FileTransferBase):
def __init__(self, extData = {}):
"""
@param extData: a dict of extended_name : extended_data items
to be sent to the server.
"""
FileTransferBase.__init__(self)
self.extData = {}
self.counter = 0
self.openRequests = {} # id -> Deferred
self.wasAFile = {} # Deferred -> 1 TERRIBLE HACK
def connectionMade(self):
data = struct.pack('!L', max(self.versions))
for k,v in self.extData.itervalues():
data += NS(k) + NS(v)
self.sendPacket(FXP_INIT, data)
def _sendRequest(self, msg, data):
data = struct.pack('!L', self.counter) + data
d = defer.Deferred()
self.openRequests[self.counter] = d
self.counter += 1
self.sendPacket(msg, data)
return d
def _parseRequest(self, data):
(id,) = struct.unpack('!L', data[:4])
d = self.openRequests[id]
del self.openRequests[id]
return d, data[4:]
def openFile(self, filename, flags, attrs):
"""
Open a file.
This method returns a L{Deferred} that is called back with an object
that provides the L{ISFTPFile} interface.
@param filename: a string representing the file to open.
@param flags: a integer of the flags to open the file with, ORed together.
The flags and their values are listed at the bottom of this file.
@param attrs: a list of attributes to open the file with. It is a
dictionary, consisting of 0 or more keys. The possible keys are::
size: the size of the file in bytes
uid: the user ID of the file as an integer
gid: the group ID of the file as an integer
permissions: the permissions of the file with as an integer.
the bit representation of this field is defined by POSIX.
atime: the access time of the file as seconds since the epoch.
mtime: the modification time of the file as seconds since the epoch.
ext_*: extended attributes. The server is not required to
understand this, but it may.
NOTE: there is no way to indicate text or binary files. it is up
to the SFTP client to deal with this.
"""
data = NS(filename) + struct.pack('!L', flags) + self._packAttributes(attrs)
d = self._sendRequest(FXP_OPEN, data)
self.wasAFile[d] = (1, filename) # HACK
return d
def removeFile(self, filename):
"""
Remove the given file.
This method returns a Deferred that is called back when it succeeds.
@param filename: the name of the file as a string.
"""
return self._sendRequest(FXP_REMOVE, NS(filename))
def renameFile(self, oldpath, newpath):
"""
Rename the given file.
This method returns a Deferred that is called back when it succeeds.
@param oldpath: the current location of the file.
@param newpath: the new file name.
"""
return self._sendRequest(FXP_RENAME, NS(oldpath)+NS(newpath))
def makeDirectory(self, path, attrs):
"""
Make a directory.
This method returns a Deferred that is called back when it is
created.
@param path: the name of the directory to create as a string.
@param attrs: a dictionary of attributes to create the directory
with. Its meaning is the same as the attrs in the openFile method.
"""
return self._sendRequest(FXP_MKDIR, NS(path)+self._packAttributes(attrs))
def removeDirectory(self, path):
"""
Remove a directory (non-recursively)
It is an error to remove a directory that has files or directories in
it.
This method returns a Deferred that is called back when it is removed.
@param path: the directory to remove.
"""
return self._sendRequest(FXP_RMDIR, NS(path))
def openDirectory(self, path):
"""
Open a directory for scanning.
This method returns a Deferred that is called back with an iterable
object that has a close() method.
The close() method is called when the client is finished reading
from the directory. At this point, the iterable will no longer
be used.
The iterable returns triples of the form (filename, longname, attrs)
or a Deferred that returns the same. The sequence must support
__getitem__, but otherwise may be any 'sequence-like' object.
filename is the name of the file relative to the directory.
logname is an expanded format of the filename. The recommended format
is:
-rwxr-xr-x 1 mjos staff 348911 Mar 25 14:29 t-filexfer
1234567890 123 12345678 12345678 12345678 123456789012
The first line is sample output, the second is the length of the field.
The fields are: permissions, link count, user owner, group owner,
size in bytes, modification time.
attrs is a dictionary in the format of the attrs argument to openFile.
@param path: the directory to open.
"""
d = self._sendRequest(FXP_OPENDIR, NS(path))
self.wasAFile[d] = (0, path)
return d
def getAttrs(self, path, followLinks=0):
"""
Return the attributes for the given path.
This method returns a dictionary in the same format as the attrs
argument to openFile or a Deferred that is called back with same.
@param path: the path to return attributes for as a string.
@param followLinks: a boolean. if it is True, follow symbolic links
and return attributes for the real path at the base. if it is False,
return attributes for the specified path.
"""
if followLinks: m = FXP_STAT
else: m = FXP_LSTAT
return self._sendRequest(m, NS(path))
def setAttrs(self, path, attrs):
"""
Set the attributes for the path.
This method returns when the attributes are set or a Deferred that is
called back when they are.
@param path: the path to set attributes for as a string.
@param attrs: a dictionary in the same format as the attrs argument to
openFile.
"""
data = NS(path) + self._packAttributes(attrs)
return self._sendRequest(FXP_SETSTAT, data)
def readLink(self, path):
"""
Find the root of a set of symbolic links.
This method returns the target of the link, or a Deferred that
returns the same.
@param path: the path of the symlink to read.
"""
d = self._sendRequest(FXP_READLINK, NS(path))
return d.addCallback(self._cbRealPath)
def makeLink(self, linkPath, targetPath):
"""
Create a symbolic link.
This method returns when the link is made, or a Deferred that
returns the same.
@param linkPath: the pathname of the symlink as a string
@param targetPath: the path of the target of the link as a string.
"""
return self._sendRequest(FXP_SYMLINK, NS(linkPath)+NS(targetPath))
def realPath(self, path):
"""
Convert any path to an absolute path.
This method returns the absolute path as a string, or a Deferred
that returns the same.
@param path: the path to convert as a string.
"""
d = self._sendRequest(FXP_REALPATH, NS(path))
return d.addCallback(self._cbRealPath)
def _cbRealPath(self, result):
name, longname, attrs = result[0]
return name
def extendedRequest(self, request, data):
"""
Make an extended request of the server.
The method returns a Deferred that is called back with
the result of the extended request.
@param request: the name of the extended request to make.
@param data: any other data that goes along with the request.
"""
return self._sendRequest(FXP_EXTENDED, NS(request) + data)
def packet_VERSION(self, data):
version, = struct.unpack('!L', data[:4])
data = data[4:]
d = {}
while data:
k, data = getNS(data)
v, data = getNS(data)
d[k]=v
self.version = version
self.gotServerVersion(version, d)
def packet_STATUS(self, data):
d, data = self._parseRequest(data)
code, = struct.unpack('!L', data[:4])
data = data[4:]
msg, data = getNS(data)
lang = getNS(data)
if code == FX_OK:
d.callback((msg, lang))
elif code == FX_EOF:
d.errback(EOFError(msg))
elif code == FX_OP_UNSUPPORTED:
d.errback(NotImplementedError(msg))
else:
d.errback(SFTPError(code, msg, lang))
def packet_HANDLE(self, data):
d, data = self._parseRequest(data)
isFile, name = self.wasAFile.pop(d)
if isFile:
cb = ClientFile(self, getNS(data)[0])
else:
cb = ClientDirectory(self, getNS(data)[0])
cb.name = name
d.callback(cb)
def packet_DATA(self, data):
d, data = self._parseRequest(data)
d.callback(getNS(data)[0])
def packet_NAME(self, data):
d, data = self._parseRequest(data)
count, = struct.unpack('!L', data[:4])
data = data[4:]
files = []
for i in range(count):
filename, data = getNS(data)
longname, data = getNS(data)
attrs, data = self._parseAttributes(data)
files.append((filename, longname, attrs))
d.callback(files)
def packet_ATTRS(self, data):
d, data = self._parseRequest(data)
d.callback(self._parseAttributes(data)[0])
def packet_EXTENDED_REPLY(self, data):
d, data = self._parseRequest(data)
d.callback(data)
def gotServerVersion(self, serverVersion, extData):
"""
Called when the client sends their version info.
@param otherVersion: an integer representing the version of the SFTP
protocol they are claiming.
@param extData: a dictionary of extended_name : extended_data items.
These items are sent by the client to indicate additional features.
"""
class ClientFile:
interface.implements(ISFTPFile)
def __init__(self, parent, handle):
self.parent = parent
self.handle = NS(handle)
def close(self):
return self.parent._sendRequest(FXP_CLOSE, self.handle)
def readChunk(self, offset, length):
data = self.handle + struct.pack("!QL", offset, length)
return self.parent._sendRequest(FXP_READ, data)
def writeChunk(self, offset, chunk):
data = self.handle + struct.pack("!Q", offset) + NS(chunk)
return self.parent._sendRequest(FXP_WRITE, data)
def getAttrs(self):
return self.parent._sendRequest(FXP_FSTAT, self.handle)
def setAttrs(self, attrs):
data = self.handle + self.parent._packAttributes(attrs)
return self.parent._sendRequest(FXP_FSTAT, data)
class ClientDirectory:
def __init__(self, parent, handle):
self.parent = parent
self.handle = NS(handle)
self.filesCache = []
def read(self):
d = self.parent._sendRequest(FXP_READDIR, self.handle)
return d
def close(self):
return self.parent._sendRequest(FXP_CLOSE, self.handle)
def __iter__(self):
return self
def next(self):
if self.filesCache:
return self.filesCache.pop(0)
d = self.read()
d.addCallback(self._cbReadDir)
d.addErrback(self._ebReadDir)
return d
def _cbReadDir(self, names):
self.filesCache = names[1:]
return names[0]
def _ebReadDir(self, reason):
reason.trap(EOFError)
def _():
raise StopIteration
self.next = _
return reason
class SFTPError(Exception):
def __init__(self, errorCode, errorMessage, lang = ''):
Exception.__init__(self)
self.code = errorCode
self._message = errorMessage
self.lang = lang
def message(self):
"""
A string received over the network that explains the error to a human.
"""
# Python 2.6 deprecates assigning to the 'message' attribute of an
# exception. We define this read-only property here in order to
# prevent the warning about deprecation while maintaining backwards
# compatibility with object clients that rely on the 'message'
# attribute being set correctly. See bug #3897.
return self._message
message = property(message)
def __str__(self):
return 'SFTPError %s: %s' % (self.code, self.message)
FXP_INIT = 1
FXP_VERSION = 2
FXP_OPEN = 3
FXP_CLOSE = 4
FXP_READ = 5
FXP_WRITE = 6
FXP_LSTAT = 7
FXP_FSTAT = 8
FXP_SETSTAT = 9
FXP_FSETSTAT = 10
FXP_OPENDIR = 11
FXP_READDIR = 12
FXP_REMOVE = 13
FXP_MKDIR = 14
FXP_RMDIR = 15
FXP_REALPATH = 16
FXP_STAT = 17
FXP_RENAME = 18
FXP_READLINK = 19
FXP_SYMLINK = 20
FXP_STATUS = 101
FXP_HANDLE = 102
FXP_DATA = 103
FXP_NAME = 104
FXP_ATTRS = 105
FXP_EXTENDED = 200
FXP_EXTENDED_REPLY = 201
FILEXFER_ATTR_SIZE = 0x00000001
FILEXFER_ATTR_UIDGID = 0x00000002
FILEXFER_ATTR_OWNERGROUP = FILEXFER_ATTR_UIDGID
FILEXFER_ATTR_PERMISSIONS = 0x00000004
FILEXFER_ATTR_ACMODTIME = 0x00000008
FILEXFER_ATTR_EXTENDED = 0x80000000L
FILEXFER_TYPE_REGULAR = 1
FILEXFER_TYPE_DIRECTORY = 2
FILEXFER_TYPE_SYMLINK = 3
FILEXFER_TYPE_SPECIAL = 4
FILEXFER_TYPE_UNKNOWN = 5
FXF_READ = 0x00000001
FXF_WRITE = 0x00000002
FXF_APPEND = 0x00000004
FXF_CREAT = 0x00000008
FXF_TRUNC = 0x00000010
FXF_EXCL = 0x00000020
FXF_TEXT = 0x00000040
FX_OK = 0
FX_EOF = 1
FX_NO_SUCH_FILE = 2
FX_PERMISSION_DENIED = 3
FX_FAILURE = 4
FX_BAD_MESSAGE = 5
FX_NO_CONNECTION = 6
FX_CONNECTION_LOST = 7
FX_OP_UNSUPPORTED = 8
FX_FILE_ALREADY_EXISTS = 11
# http://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/ defines more
# useful error codes, but so far OpenSSH doesn't implement them. We use them
# internally for clarity, but for now define them all as FX_FAILURE to be
# compatible with existing software.
FX_NOT_A_DIRECTORY = FX_FAILURE
FX_FILE_IS_A_DIRECTORY = FX_FAILURE
# initialize FileTransferBase.packetTypes:
g = globals()
for name in g.keys():
if name.startswith('FXP_'):
value = g[name]
FileTransferBase.packetTypes[value] = name[4:]
del g, name, value
|
r4vi/open-ihm
|
refs/heads/master
|
src/openihm/includes/mysql/connector/errors.py
|
4
|
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright 2009 Sun Microsystems, Inc. All rights reserved
# Use is subject to license terms. (See COPYING)
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# There are special exceptions to the terms and conditions of the GNU
# General Public License as it is applied to this software. View the
# full text of the exception in file EXCEPTIONS-CLIENT in the directory
# of this software distribution or see the FOSS License Exception at
# www.mysql.com.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Python exceptions
"""
# see dispatch_mysql_error method for errno ranges and smaller lists
__programming_errors = (
1083,1084,1090,1091,1093,1096,1097,1101,1102,1103,1107,1108,1110,1111,
1113,1120,1124,1125,1128,1136,1366,1139,1140,1146,1149,)
__operational_errors = (
1028,1029,1030,1053,1077,1078,1079,1080,1081,1095,1104,1106,1114,1116,
1117,1119,1122,1123,1126,1133,1135,1137,1145,1147,)
def get_mysql_exception(errno,msg):
exception = OperationalError
if (errno >= 1046 and errno <= 1052) or \
(errno >= 1054 and errno <= 1061) or \
(errno >= 1063 and errno <= 1075) or \
errno in __programming_errors:
exception = ProgrammingError
elif errno in (1097,1109,1118,1121,1138,1292):
exception = DataError
elif errno in (1031,1089,1112,1115,1127,1148,1149):
exception = NotSupportedError
elif errno in (1062,1082,1099,1100):
exception = IntegrityError
elif errno in (1085,1086,1094,1098):
exception = InternalError
elif (errno >= 1004 and errno <= 1030) or \
(errno >= 1132 and errno <= 1045) or \
(errno >= 1141 and errno <= 1145) or \
(errno >= 1129 and errno <= 1133) or \
errno in __operational_errors:
exception = OperationalError
return exception(msg,errno=errno)
class ClientError(object):
client_errors = {
2000: "Unknown MySQL error",
2001: "Can't create UNIX socket (%d)",
2002: "Can't connect to local MySQL server through socket '%s' (%s)",
2003: "Can't connect to MySQL server on '%s' (%s)",
2004: "Can't create TCP/IP socket (%s)",
2005: "Unknown MySQL server host '%s' (%s)",
2006: "MySQL server has gone away",
2007: "Protocol mismatch; server version = %d, client version = %d",
2008: "MySQL client ran out of memory",
2009: "Wrong host info",
2010: "Localhost via UNIX socket",
2011: "%s via TCP/IP",
2012: "Error in server handshake",
2013: "Lost connection to MySQL server during query",
2014: "Commands out of sync; you can't run this command now",
2015: "Named pipe: %s",
2016: "Can't wait for named pipe to host: %s pipe: %s (%d)",
2017: "Can't open named pipe to host: %s pipe: %s (%d)",
2018: "Can't set state of named pipe to host: %s pipe: %s (%d)",
2019: "Can't initialize character set %s (path: %s)",
2020: "Got packet bigger than 'max_allowed_packet' bytes",
2021: "Embedded server",
2022: "Error on SHOW SLAVE STATUS:",
2023: "Error on SHOW SLAVE HOSTS:",
2024: "Error connecting to slave:",
2025: "Error connecting to master:",
2026: "SSL connection error",
2027: "Malformed packet",
2028: "This client library is licensed only for use with MySQL servers having '%s' license",
2029: "Invalid use of null pointer",
2030: "Statement not prepared",
2031: "No data supplied for parameters in prepared statement",
2032: "Data truncated",
2033: "No parameters exist in the statement",
2034: "Invalid parameter number",
2035: "Can't send long data for non-string/non-binary data types (parameter: %d)",
2036: "Using unsupported buffer type: %d (parameter: %d)",
2037: "Shared memory: %s",
2038: "Can't open shared memory; client could not create request event (%d)",
2039: "Can't open shared memory; no answer event received from server (%d)",
2040: "Can't open shared memory; server could not allocate file mapping (%d)",
2041: "Can't open shared memory; server could not get pointer to file mapping (%d)",
2042: "Can't open shared memory; client could not allocate file mapping (%d)",
2043: "Can't open shared memory; client could not get pointer to file mapping (%d)",
2044: "Can't open shared memory; client could not create %s event (%d)",
2045: "Can't open shared memory; no answer from server (%d)",
2046: "Can't open shared memory; cannot send request event to server (%d)",
2047: "Wrong or unknown protocol",
2048: "Invalid connection handle",
2049: "Connection using old (pre-4.1.1) authentication protocol refused (client option 'secure_auth' enabled)",
2050: "Row retrieval was canceled by mysql_stmt_close() call",
2051: "Attempt to read column without prior row fetch",
2052: "Prepared statement contains no metadata",
2053: "Attempt to read a row while there is no result set associated with the statement",
2054: "This feature is not implemented yet",
2055: "Lost connection to MySQL server at '%s', system error: %d",
2056: "Statement closed indirectly because of a preceeding %s() call",
2057: "The number of columns in the result set differs from the number of bound buffers. You must reset the statement, rebind the result set columns, and execute the statement again",
}
def __new__(cls):
raise TypeError, "Can not instanciate from %s" % cls.__name__
@classmethod
def get_error_msg(cls,errno,values=None):
try:
m = cls.client_errors[errno]
try:
m = m % values
except:
pass
return m
except:
return "Unknown client error (wrong errno?)"
class Error(StandardError):
def __init__(self, m, errno=None, values=None):
try:
# process MySQL error packet
self._process_packet(m)
except:
self.errno = errno or -1
self.sqlstate = -1
if m is None and (errno >= 2000 and errno < 3000):
m = ClientError.get_error_msg(errno,values)
elif m is None:
m = 'Unknown error'
self.msg = "%s: %s" % (self.errno,m)
def _process_packet(self, packet):
self.errno = packet.errno
self.sqlstate = packet.sqlstate
if self.sqlstate:
self.msg = '%d (%s): %s' % (self.errno,self.sqlstate,packet.errmsg)
else:
self.msg = '%d: %s' % (self.errno, packet.errmsg)
def __str__(self):
return self.msg
def __unicode__(self):
return self.msg
class Warning(StandardError):
pass
class InterfaceError(Error):
def __init__(self, m=None, errno=None, values=None):
Error.__init__(self, m, errno, values)
class DatabaseError(Error):
def __init__(self, m=None, errno=None, values=None):
Error.__init__(self, m, errno, values)
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
|
VanirAOSP/external_chromium_org
|
refs/heads/kk44
|
tools/telemetry/telemetry/core/extension_unittest.py
|
23
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import shutil
import tempfile
import unittest
from telemetry.core import browser_finder
from telemetry.core import extension_to_load
from telemetry.core import util
from telemetry.core.backends.chrome import extension_dict_backend
from telemetry.unittest import options_for_unittests
class ExtensionTest(unittest.TestCase):
def setUp(self):
extension_path = os.path.join(util.GetUnittestDataDir(), 'simple_extension')
options = options_for_unittests.GetCopy()
load_extension = extension_to_load.ExtensionToLoad(
extension_path, options.browser_type)
options.extensions_to_load = [load_extension]
browser_to_create = browser_finder.FindBrowser(options)
self._browser = None
self._extension = None
if not browser_to_create:
# May not find a browser that supports extensions.
return
self._browser = browser_to_create.Create()
self._browser.Start()
self._extension = self._browser.extensions[load_extension]
self.assertTrue(self._extension)
def tearDown(self):
if self._browser:
self._browser.Close()
def testExtensionBasic(self):
"""Test ExtensionPage's ExecuteJavaScript and EvaluateJavaScript."""
if not self._extension:
logging.warning('Did not find a browser that supports extensions, '
'skipping test.')
return
self.assertTrue(
self._extension.EvaluateJavaScript('chrome.runtime != null'))
self._extension.ExecuteJavaScript('setTestVar("abcdef")')
self.assertEquals('abcdef',
self._extension.EvaluateJavaScript('_testVar'))
def testDisconnect(self):
"""Test that ExtensionPage.Disconnect exists by calling it.
EvaluateJavaScript should reconnect."""
if not self._extension:
logging.warning('Did not find a browser that supports extensions, '
'skipping test.')
return
self._extension.Disconnect()
self.assertEquals(2, self._extension.EvaluateJavaScript('1+1'))
class NonExistentExtensionTest(unittest.TestCase):
def testNonExistentExtensionPath(self):
"""Test that a non-existent extension path will raise an exception."""
extension_path = os.path.join(util.GetUnittestDataDir(), 'foo')
options = options_for_unittests.GetCopy()
self.assertRaises(extension_to_load.ExtensionPathNonExistentException,
lambda: extension_to_load.ExtensionToLoad(
extension_path, options.browser_type))
def testExtensionNotLoaded(self):
"""Querying an extension that was not loaded will return None"""
extension_path = os.path.join(util.GetUnittestDataDir(), 'simple_extension')
options = options_for_unittests.GetCopy()
load_extension = extension_to_load.ExtensionToLoad(
extension_path, options.browser_type)
browser_to_create = browser_finder.FindBrowser(options)
with browser_to_create.Create() as b:
b.Start()
if b.supports_extensions:
self.assertRaises(extension_dict_backend.ExtensionNotFoundException,
lambda: b.extensions[load_extension])
class MultipleExtensionTest(unittest.TestCase):
def setUp(self):
""" Copy the manifest and background.js files of simple_extension to a
number of temporary directories to load as extensions"""
self._extension_dirs = [tempfile.mkdtemp()
for i in range(3)] # pylint: disable=W0612
src_extension_dir = os.path.join(
util.GetUnittestDataDir(), 'simple_extension')
manifest_path = os.path.join(src_extension_dir, 'manifest.json')
script_path = os.path.join(src_extension_dir, 'background.js')
for d in self._extension_dirs:
shutil.copy(manifest_path, d)
shutil.copy(script_path, d)
options = options_for_unittests.GetCopy()
self._extensions_to_load = [extension_to_load.ExtensionToLoad(
d, options.browser_type)
for d in self._extension_dirs]
options.extensions_to_load = self._extensions_to_load
browser_to_create = browser_finder.FindBrowser(options)
self._browser = None
# May not find a browser that supports extensions.
if browser_to_create:
self._browser = browser_to_create.Create()
self._browser.Start()
def tearDown(self):
if self._browser:
self._browser.Close()
for d in self._extension_dirs:
shutil.rmtree(d)
def testMultipleExtensions(self):
if not self._browser:
logging.warning('Did not find a browser that supports extensions, '
'skipping test.')
return
# Test contains.
loaded_extensions = filter(lambda e: e in self._browser.extensions,
self._extensions_to_load)
self.assertEqual(len(loaded_extensions), len(self._extensions_to_load))
for load_extension in self._extensions_to_load:
extension = self._browser.extensions[load_extension]
assert extension
self.assertTrue(
extension.EvaluateJavaScript('chrome.runtime != null'))
extension.ExecuteJavaScript('setTestVar("abcdef")')
self.assertEquals('abcdef', extension.EvaluateJavaScript('_testVar'))
class ComponentExtensionTest(unittest.TestCase):
def testComponentExtensionBasic(self):
extension_path = os.path.join(
util.GetUnittestDataDir(), 'component_extension')
options = options_for_unittests.GetCopy()
load_extension = extension_to_load.ExtensionToLoad(
extension_path, options.browser_type, is_component=True)
options.extensions_to_load = [load_extension]
browser_to_create = browser_finder.FindBrowser(options)
if not browser_to_create:
logging.warning('Did not find a browser that supports extensions, '
'skipping test.')
return
with browser_to_create.Create() as b:
b.Start()
extension = b.extensions[load_extension]
self.assertTrue(
extension.EvaluateJavaScript('chrome.runtime != null'))
extension.ExecuteJavaScript('setTestVar("abcdef")')
self.assertEquals('abcdef', extension.EvaluateJavaScript('_testVar'))
def testComponentExtensionNoPublicKey(self):
# simple_extension does not have a public key.
extension_path = os.path.join(util.GetUnittestDataDir(), 'simple_extension')
options = options_for_unittests.GetCopy()
self.assertRaises(extension_to_load.MissingPublicKeyException,
lambda: extension_to_load.ExtensionToLoad(
extension_path,
browser_type=options.browser_type,
is_component=True))
|
npuichigo/ttsflow
|
refs/heads/master
|
third_party/tensorflow/tensorflow/contrib/learn/python/learn/estimators/run_config.py
|
9
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Run Config."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import os
import six
from tensorflow.contrib.framework.python.framework import experimental
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.estimator import run_config as core_run_config
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import server_lib
# A list of the property names in RunConfig user allows to change. They will
# not affect the execution framework, so when execution framework checks the
# `uid` of the RunConfig, it should be ingored.
_DEFAULT_UID_WHITE_LIST = [
'tf_random_seed',
'save_summary_steps',
'save_checkpoints_steps',
'save_checkpoints_secs',
'session_config',
'keep_checkpoint_max',
'keep_checkpoint_every_n_hours',
'log_step_count_steps',
]
class Environment(object):
# For running general distributed training.
CLOUD = 'cloud'
# For running Google-internal distributed training.
GOOGLE = 'google'
# For running on local desktop.
LOCAL = 'local'
class TaskType(object):
MASTER = 'master'
PS = 'ps'
WORKER = 'worker'
class ClusterConfig(object):
"""This class specifies the configurations for a distributed run.
If you're using `tf.learn` `Estimators`, you should probably use the subclass
RunConfig instead.
"""
def __init__(self, master=None, evaluation_master=None):
"""Constructor.
Sets the properties `cluster_spec`, `is_chief`, `master` (if `None` in the
args), `num_ps_replicas`, `task_id`, and `task_type` based on the
`TF_CONFIG` environment variable, if the pertinent information is
present. The `TF_CONFIG` environment variable is a JSON object with
attributes: `cluster`, `environment`, and `task`.
`cluster` is a JSON serialized version of `ClusterSpec`'s Python dict from
`server_lib.py`, mapping task types (usually one of the TaskType enums) to a
list of task addresses.
`environment` specifies the runtime environment for the job (usually one of
the `Environment` enums). Defaults to `LOCAL`.
`task` has two attributes: `type` and `index`, where `type` can be any of
the task types in `cluster`. When `TF_CONFIG` contains said information, the
following properties are set on this class:
* `task_type` is set to `TF_CONFIG['task']['type']`. Defaults to `None`.
* `task_id` is set to `TF_CONFIG['task']['index']`. Defaults to 0.
* `cluster_spec` is parsed from `TF_CONFIG['cluster']`. Defaults to {}.
* `master` is determined by looking up `task_type` and `task_id` in the
`cluster_spec`. Defaults to ''.
* `num_ps_replicas` is set by counting the number of nodes listed
in the `ps` attribute of `cluster_spec`. Defaults to 0.
* `num_worker_replicas` is set by counting the number of nodes listed
in the `worker` attribute of `cluster_spec`. Defaults to 0.
* `is_chief` is deteremined based on `task_type`, `type_id`, and
`environment`.
Example:
```
cluster = {'ps': ['host1:2222', 'host2:2222'],
'worker': ['host3:2222', 'host4:2222', 'host5:2222']}
os.environ['TF_CONFIG'] = json.dumps(
{'cluster': cluster,
'task': {'type': 'worker', 'index': 1}})
config = ClusterConfig()
assert config.master == 'host4:2222'
assert config.task_id == 1
assert config.num_ps_replicas == 2
assert config.num_worker_replicas == 3
assert config.cluster_spec == server_lib.ClusterSpec(cluster)
assert config.task_type == 'worker'
assert not config.is_chief
```
Args:
master: TensorFlow master. Defaults to empty string for local.
evaluation_master: The master on which to perform evaluation.
"""
# If not explicitly specified in the constructor and the TF_CONFIG
# environment variable is present, load cluster_spec from TF_CONFIG.
config = json.loads(os.environ.get('TF_CONFIG') or '{}')
# Set task_type and task_id if the TF_CONFIG environment variable is
# present. Otherwise, use the respective default (None / 0).
task_env = config.get('task', {})
self._task_type = task_env.get('type', None)
self._task_id = self.get_task_id()
self._cluster_spec = server_lib.ClusterSpec(config.get('cluster', {}))
self._master = (master if master is not None else
_get_master(self._cluster_spec, self._task_type,
self._task_id) or '')
self._num_ps_replicas = _count_ps(self._cluster_spec) or 0
self._num_worker_replicas = _count_worker(self._cluster_spec) or 0
# Set is_chief.
self._environment = config.get('environment', Environment.LOCAL)
self._is_chief = None
if self._task_type is None:
self._is_chief = (self._task_id == 0)
elif self._environment == Environment.CLOUD:
# When the TF_CONFIG environment variable is set, we can set the
# default of is_chief to 0 when task_type is "master" and task_id is 0.
self._is_chief = (self._task_type == TaskType.MASTER and
self._task_id == 0)
else:
# Legacy behavior is that is_chief is None if task_id == 0.
self._is_chief = (self._task_type == TaskType.WORKER and
self._task_id == 0)
self._evaluation_master = evaluation_master or ''
@property
def cluster_spec(self):
return self._cluster_spec
@property
def environment(self):
return self._environment
@property
def evaluation_master(self):
return self._evaluation_master
@property
def is_chief(self):
return self._is_chief
@property
def master(self):
return self._master
@property
def num_ps_replicas(self):
return self._num_ps_replicas
@property
def num_worker_replicas(self):
return self._num_worker_replicas
@property
def task_id(self):
return self._task_id
@property
def task_type(self):
return self._task_type
@staticmethod
def get_task_id():
"""Returns task index from `TF_CONFIG` environmental variable.
If you have a ClusterConfig instance, you can just access its task_id
property instead of calling this function and re-parsing the environmental
variable.
Returns:
`TF_CONFIG['task']['index']`. Defaults to 0.
"""
config = json.loads(os.environ.get('TF_CONFIG') or '{}')
task_env = config.get('task', {})
task_index = task_env.get('index')
return int(task_index) if task_index else 0
class RunConfig(ClusterConfig, core_run_config.RunConfig):
"""This class specifies the configurations for an `Estimator` run.
This class is the implementation of ${tf.estimator.RunConfig} interface.
If you're a Google-internal user using command line flags with
`learn_runner.py` (for instance, to do distributed training or to use
parameter servers), you probably want to use `learn_runner.EstimatorConfig`
instead.
"""
_USE_DEFAULT = 0
def __init__(self,
master=None,
num_cores=0,
log_device_placement=False,
gpu_memory_fraction=1,
tf_random_seed=None,
save_summary_steps=100,
save_checkpoints_secs=_USE_DEFAULT,
save_checkpoints_steps=None,
keep_checkpoint_max=5,
keep_checkpoint_every_n_hours=10000,
log_step_count_steps=100,
evaluation_master='',
model_dir=None,
session_config=None):
"""Constructor.
Note that the superclass `ClusterConfig` may set properties like
`cluster_spec`, `is_chief`, `master` (if `None` in the args),
`num_ps_replicas`, `task_id`, and `task_type` based on the `TF_CONFIG`
environment variable. See `ClusterConfig` for more details.
Args:
master: TensorFlow master. Defaults to empty string for local.
num_cores: Number of cores to be used. If 0, the system picks an
appropriate number (default: 0).
log_device_placement: Log the op placement to devices (default: False).
gpu_memory_fraction: Fraction of GPU memory used by the process on
each GPU uniformly on the same machine.
tf_random_seed: Random seed for TensorFlow initializers.
Setting this value allows consistency between reruns.
save_summary_steps: Save summaries every this many steps.
save_checkpoints_secs: Save checkpoints every this many seconds. Can not
be specified with `save_checkpoints_steps`.
save_checkpoints_steps: Save checkpoints every this many steps. Can not be
specified with `save_checkpoints_secs`.
keep_checkpoint_max: The maximum number of recent checkpoint files to
keep. As new files are created, older files are deleted. If None or 0,
all checkpoint files are kept. Defaults to 5 (that is, the 5 most recent
checkpoint files are kept.)
keep_checkpoint_every_n_hours: Number of hours between each checkpoint
to be saved. The default value of 10,000 hours effectively disables
the feature.
log_step_count_steps: The frequency, in number of global steps, that the
global step/sec will be logged during training.
evaluation_master: the master on which to perform evaluation.
model_dir: directory where model parameters, graph etc are saved. If
`None`, will use `model_dir` property in `TF_CONFIG` environment
variable. If both are set, must have same value. If both are `None`, see
`Estimator` about where the model will be saved.
session_config: a ConfigProto used to set session parameters, or None.
Note - using this argument, it is easy to provide settings which break
otherwise perfectly good models. Use with care.
"""
super(RunConfig, self).__init__(
master=master, evaluation_master=evaluation_master)
gpu_options = config_pb2.GPUOptions(
per_process_gpu_memory_fraction=gpu_memory_fraction)
self._tf_config = config_pb2.ConfigProto(
log_device_placement=log_device_placement,
inter_op_parallelism_threads=num_cores,
intra_op_parallelism_threads=num_cores,
gpu_options=gpu_options)
self._tf_random_seed = tf_random_seed
self._save_summary_steps = save_summary_steps
self._save_checkpoints_secs = save_checkpoints_secs
self._log_step_count_steps = log_step_count_steps
self._session_config = session_config
if save_checkpoints_secs == RunConfig._USE_DEFAULT:
if save_checkpoints_steps is None:
self._save_checkpoints_secs = 600
else:
self._save_checkpoints_secs = None
self._save_checkpoints_steps = save_checkpoints_steps
# TODO(weiho): Remove these after ModelFn refactoring, when users can
# create Scaffold and Saver in their model_fn to set these.
self._keep_checkpoint_max = keep_checkpoint_max
self._keep_checkpoint_every_n_hours = keep_checkpoint_every_n_hours
self._model_dir = _get_model_dir(model_dir)
@experimental
def uid(self, whitelist=None):
"""Generates a 'Unique Identifier' based on all internal fields.
Caller should use the uid string to check `RunConfig` instance integrity
in one session use, but should not rely on the implementation details, which
is subject to change.
Args:
whitelist: A list of the string names of the properties uid should not
include. If `None`, defaults to `_DEFAULT_UID_WHITE_LIST`, which
includes most properties user allowes to change.
Returns:
A uid string.
"""
if whitelist is None:
whitelist = _DEFAULT_UID_WHITE_LIST
state = {k: v for k, v in self.__dict__.items() if not k.startswith('__')}
# Pop out the keys in whitelist.
for k in whitelist:
state.pop('_' + k, None)
ordered_state = collections.OrderedDict(
sorted(state.items(), key=lambda t: t[0]))
# For class instance without __repr__, some special cares are required.
# Otherwise, the object address will be used.
if '_cluster_spec' in ordered_state:
ordered_state['_cluster_spec'] = ordered_state['_cluster_spec'].as_dict()
return ', '.join(
'%s=%r' % (k, v) for (k, v) in six.iteritems(ordered_state))
@property
def model_dir(self):
return self._model_dir
@property
def tf_config(self):
return self._tf_config
@property
def tf_random_seed(self):
return self._tf_random_seed
@property
def save_summary_steps(self):
return self._save_summary_steps
@property
def save_checkpoints_secs(self):
return self._save_checkpoints_secs
@property
def save_checkpoints_steps(self):
return self._save_checkpoints_steps
@property
def session_config(self):
return self._session_config
@property
def keep_checkpoint_max(self):
return self._keep_checkpoint_max
@property
def keep_checkpoint_every_n_hours(self):
return self._keep_checkpoint_every_n_hours
@property
def log_step_count_steps(self):
return self._log_step_count_steps
def _count_ps(cluster_spec):
"""Counts the number of parameter servers in cluster_spec."""
return len(cluster_spec.as_dict().get('ps', [])) if cluster_spec else 0
def _count_worker(cluster_spec):
"""Counts the number of workers in cluster_spec."""
return len(cluster_spec.as_dict().get('worker', [])) if cluster_spec else 0
def _get_master(cluster_spec, task_type, task_id):
"""Returns the appropriate string for the TensorFlow master."""
if not cluster_spec:
return ''
# If there is only one node in the cluster, do things locally.
jobs = cluster_spec.jobs
if len(jobs) == 1 and len(cluster_spec.job_tasks(jobs[0])) == 1:
return ''
# Lookup the master in cluster_spec using task_type and task_id,
# if possible.
if task_type:
if task_type not in jobs:
raise ValueError(
'%s is not a valid task_type in the cluster_spec:\n'
'%s\n\n'
'Note that these values may be coming from the TF_CONFIG environment '
'variable.' % (task_type, cluster_spec))
addresses = cluster_spec.job_tasks(task_type)
if task_id >= len(addresses) or task_id < 0:
raise ValueError(
'%d is not a valid task_id for task_type %s in the '
'cluster_spec:\n'
'%s\n\n'
'Note that these value may be coming from the TF_CONFIG environment '
'variable.' % (task_id, task_type, cluster_spec))
return 'grpc://' + addresses[task_id]
# For backwards compatibility, we return empty string if task_type was
# not set (task_type did not previously exist).
return ''
def _get_model_dir(model_dir):
"""Returns `model_dir` based user provided `model_dir` or `TF_CONFIG`."""
model_dir_in_tf_config = json.loads(
os.environ.get('TF_CONFIG') or '{}').get('model_dir', None)
if model_dir_in_tf_config is not None:
if model_dir is not None and model_dir_in_tf_config != model_dir:
raise ValueError(
'`model_dir` provided in RunConfig construct, if set, '
'must have the same value as the model_dir in TF_CONFIG. '
'model_dir: {}\nTF_CONFIG["model_dir"]: {}.\n'.format(
model_dir, model_dir_in_tf_config))
logging.info('Using model_dir in TF_CONFIG: %s', model_dir_in_tf_config)
return model_dir or model_dir_in_tf_config
|
Seek/ValueTrackerQT
|
refs/heads/master
|
hearthstone/objects.py
|
1
|
import sqlalchemy
from .enums import CardClass, CardSet, CardType, Rarity
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
from sqlalchemy import Column, Integer, String, Enum, DateTime, Boolean
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext import mutable
import pdb
import datetime
from bisect import insort
# class JsonEncodedDict(sqlalchemy.TypeDecorator):
# """Enables JSON storage by encoding and decoding on the fly."""
# impl = sqlalchemy.String
# def process_bind_param(self, value, dialect):
# return json.dumps(value)
# def process_result_value(self, value, dialect):
# return json.loads(value)
# mutable.MutableDict.associate_with(JsonEncodedDict)
class Card(Base):
__tablename__ = "cards"
id = Column(String, primary_key=True)
name = Column(String)
text = Column(String)
type = Column(Integer)
rarity = Column(Integer)
cost = Column(Integer)
attack = Column(Integer)
health = Column(Integer)
collectible = Column(Integer)
set = Column(Integer)
flavor = Column(String)
playerClass = Column(Integer)
def __repr__(self):
return "<Card(name='%s', id='%s')>" % (
self.name, self.id)
def __lt__(self, other):
if self.cost == other.cost:
if self.type == other.type:
if self.name == other.name:
return False
else:
return self.name < other.name
else:
return self.type > other.type
else:
return self.cost < other.cost
class Deck(Base):
__tablename__ = 'decks'
id = Column(Integer, primary_key=True)
name = Column(String)
playerClass = Column(Integer)
date_created = Column(DateTime)
date_modified = Column(DateTime)
cards = association_proxy('deck_cards', 'card')
def __repr__(self):
return "<Deck(name='%s', id='%s')>" % (
self.name, self.id)
class DeckCard(Base):
__tablename__ = 'deck_cards'
deck_id = Column(Integer, sqlalchemy.ForeignKey('decks.id'),
primary_key=True)
card_id = Column(Integer, sqlalchemy.ForeignKey('cards.id'),
primary_key=True)
number = Column(Integer)
deck = relationship(Deck,
backref=backref("deck_cards",
cascade="all, delete-orphan"))
card = relationship(Card)
def __init__(self, card=None, deck=None, number=None):
self.card = card
self.deck = deck
self.number = number
class Player(Base):
__tablename__ = 'players'
id = Column(Integer, primary_key=True)
name = Column(String)
high = Column(Integer)
low = Column(Integer)
def __repr__(self):
return "<Player(name='%s', low='%s')>" % (
self.name, self.low)
class Match(Base):
__tablename__ = 'matches'
id = Column(Integer, primary_key=True)
opponent_id = Column(Integer, sqlalchemy.ForeignKey('players.id'))
won = Column(Boolean)
first = Column(Boolean)
date = Column(DateTime)
duration = Column(Integer)
turns = Column(Integer)
deck_id = Column(Integer, sqlalchemy.ForeignKey('decks.id'))
playerClass = Column(Integer)
opponentClass = Column(Integer)
deck = relationship("Deck")
opponent = relationship("Player")
def __repr__(self):
return "<Match(id='%s', date='%s')>" % (
self.name, self.date)
|
richardcornish/richlearnspythonthehardway
|
refs/heads/master
|
exercises/ex02.py
|
2
|
# Exercise 2
# https://learnpythonthehardway.org/book/ex2.html
# A comment, this is so you can read your program later.
# Anything after the # is ignored by python.
print "I could have code like this." # and the comment after is ignored
# You can also use a comment to "disable" or comment out a piece of code:
# print "This won't run."
print "This will run."
|
dasseclab/dasseclab
|
refs/heads/master
|
clones/routersploit/tests/creds/cameras/dlink/test_ssh_default_creds.py
|
1
|
from routersploit.modules.creds.cameras.dlink.ssh_default_creds import Exploit
def test_check_success(target):
""" Test scenario - testing against SSH server """
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 22
assert exploit.threads == 1
assert exploit.defaults == ["admin:admin"]
assert exploit.stop_on_success is True
assert exploit.verbosity is True
exploit.target = target.host
exploit.port = target.port
assert exploit.check() is False
assert exploit.check_default() is None
assert exploit.run() is None
|
Aravind-Sundararajan/kongcoin
|
refs/heads/master
|
test/functional/wallet-dump.py
|
23
|
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the dumpwallet RPC."""
import os
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
def read_dump(file_name, addrs, hd_master_addr_old):
"""
Read the given dump, count the addrs that match, count change and reserve.
Also check that the old hd_master is inactive
"""
with open(file_name, encoding='utf8') as inputfile:
found_addr = 0
found_addr_chg = 0
found_addr_rsv = 0
hd_master_addr_ret = None
for line in inputfile:
# only read non comment lines
if line[0] != "#" and len(line) > 10:
# split out some data
key_label, comment = line.split("#")
# key = key_label.split(" ")[0]
keytype = key_label.split(" ")[2]
if len(comment) > 1:
addr_keypath = comment.split(" addr=")[1]
addr = addr_keypath.split(" ")[0]
keypath = None
if keytype == "inactivehdmaster=1":
# ensure the old master is still available
assert(hd_master_addr_old == addr)
elif keytype == "hdmaster=1":
# ensure we have generated a new hd master key
assert(hd_master_addr_old != addr)
hd_master_addr_ret = addr
else:
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
# count key types
for addrObj in addrs:
if addrObj['address'] == addr and addrObj['hdkeypath'] == keypath and keytype == "label=":
found_addr += 1
break
elif keytype == "change=1":
found_addr_chg += 1
break
elif keytype == "reserve=1":
found_addr_rsv += 1
break
return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
class WalletDumpTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
self.extra_args = [["-keypool=90"]]
def setup_network(self, split=False):
# Use 1 minute timeout because the initial getnewaddress RPC can take
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, self.extra_args, timewait=60)
def run_test (self):
tmpdir = self.options.tmpdir
# generate 20 addresses to compare against the dump
test_addr_count = 20
addrs = []
for i in range(0,test_addr_count):
addr = self.nodes[0].getnewaddress()
vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath
addrs.append(vaddr)
# Should be a no-op:
self.nodes[0].keypoolrefill()
# dump unencrypted wallet
result = self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump")
assert_equal(result['filename'], os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump"))
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None)
assert_equal(found_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_addr_chg, 50) # 50 blocks where mined
assert_equal(found_addr_rsv, 90*2) # 90 keys plus 100% internal keys
#encrypt wallet, restart, unlock and dump
self.nodes[0].encryptwallet('test')
self.bitcoind_processes[0].wait()
self.nodes[0] = self.start_node(0, self.options.tmpdir, self.extra_args[0])
self.nodes[0].walletpassphrase('test', 10)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
assert_equal(found_addr_chg, 90*2 + 50) # old reserve keys are marked as change now
assert_equal(found_addr_rsv, 90*2)
if __name__ == '__main__':
WalletDumpTest().main ()
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
refs/heads/master
|
esstoolkit/external/pyqtgraph/util/garbage_collector.py
|
33
|
import gc
from ..Qt import QtCore
class GarbageCollector(object):
'''
Disable automatic garbage collection and instead collect manually
on a timer.
This is done to ensure that garbage collection only happens in the GUI
thread, as otherwise Qt can crash.
Credit: Erik Janssens
Source: http://pydev.blogspot.com/2014/03/should-python-garbage-collector-be.html
'''
def __init__(self, interval=1.0, debug=False):
self.debug = debug
if debug:
gc.set_debug(gc.DEBUG_LEAK)
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.check)
self.threshold = gc.get_threshold()
gc.disable()
self.timer.start(interval * 1000)
def check(self):
#return self.debug_cycles() # uncomment to just debug cycles
l0, l1, l2 = gc.get_count()
if self.debug:
print('gc_check called:', l0, l1, l2)
if l0 > self.threshold[0]:
num = gc.collect(0)
if self.debug:
print('collecting gen 0, found: %d unreachable' % num)
if l1 > self.threshold[1]:
num = gc.collect(1)
if self.debug:
print('collecting gen 1, found: %d unreachable' % num)
if l2 > self.threshold[2]:
num = gc.collect(2)
if self.debug:
print('collecting gen 2, found: %d unreachable' % num)
def debug_cycles(self):
gc.collect()
for obj in gc.garbage:
print(obj, repr(obj), type(obj))
|
blademainer/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyMethodMayBeStaticInspection/empty.py
|
83
|
__author__ = 'ktisha'
class Child(Base):
def f(self):
pass
|
nhicher/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/amazon/sqs_queue.py
|
17
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: sqs_queue
short_description: Creates or deletes AWS SQS queues.
description:
- Create or delete AWS SQS queues.
- Update attributes on existing queues.
version_added: "2.0"
author:
- Alan Loi (@loia)
- Fernando Jose Pando (@nand0p)
- Nadir Lloret (@nadirollo)
requirements:
- "boto >= 2.33.0"
options:
state:
description:
- Create or delete the queue
required: false
choices: ['present', 'absent']
default: 'present'
name:
description:
- Name of the queue.
required: true
default_visibility_timeout:
description:
- The default visibility timeout in seconds.
message_retention_period:
description:
- The message retention period in seconds.
maximum_message_size:
description:
- The maximum message size in bytes.
delivery_delay:
description:
- The delivery delay in seconds.
receive_message_wait_time:
description:
- The receive message wait time in seconds.
policy:
description:
- The json dict policy to attach to queue
version_added: "2.1"
redrive_policy:
description:
- json dict with the redrive_policy (see example)
version_added: "2.2"
extends_documentation_fragment:
- aws
- ec2
"""
RETURN = '''
default_visibility_timeout:
description: The default visibility timeout in seconds.
type: int
returned: always
sample: 30
delivery_delay:
description: The delivery delay in seconds.
type: int
returned: always
sample: 0
maximum_message_size:
description: The maximum message size in bytes.
type: int
returned: always
sample: 262144
message_retention_period:
description: The message retention period in seconds.
type: int
returned: always
sample: 345600
name:
description: Name of the SQS Queue
type: string
returned: always
sample: "queuename-987d2de0"
queue_arn:
description: The queue's Amazon resource name (ARN).
type: string
returned: on successful creation or update of the queue
sample: 'arn:aws:sqs:us-east-1:199999999999:queuename-987d2de0'
receive_message_wait_time:
description: The receive message wait time in seconds.
type: int
returned: always
sample: 0
region:
description: Region that the queue was created within
type: string
returned: always
sample: 'us-east-1'
'''
EXAMPLES = '''
# Create SQS queue with redrive policy
- sqs_queue:
name: my-queue
region: ap-southeast-2
default_visibility_timeout: 120
message_retention_period: 86400
maximum_message_size: 1024
delivery_delay: 30
receive_message_wait_time: 20
policy: "{{ json_dict }}"
redrive_policy:
maxReceiveCount: 5
deadLetterTargetArn: arn:aws:sqs:eu-west-1:123456789012:my-dead-queue
# Delete SQS queue
- sqs_queue:
name: my-queue
region: ap-southeast-2
state: absent
'''
import json
import traceback
try:
import boto.sqs
from boto.exception import BotoServerError, NoAuthHandlerFound
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info
def create_or_update_sqs_queue(connection, module):
queue_name = module.params.get('name')
queue_attributes = dict(
default_visibility_timeout=module.params.get('default_visibility_timeout'),
message_retention_period=module.params.get('message_retention_period'),
maximum_message_size=module.params.get('maximum_message_size'),
delivery_delay=module.params.get('delivery_delay'),
receive_message_wait_time=module.params.get('receive_message_wait_time'),
policy=module.params.get('policy'),
redrive_policy=module.params.get('redrive_policy')
)
result = dict(
region=module.params.get('region'),
name=queue_name,
)
result.update(queue_attributes)
try:
queue = connection.get_queue(queue_name)
if queue:
# Update existing
result['changed'] = update_sqs_queue(queue, check_mode=module.check_mode, **queue_attributes)
else:
# Create new
if not module.check_mode:
queue = connection.create_queue(queue_name)
update_sqs_queue(queue, **queue_attributes)
result['changed'] = True
if not module.check_mode:
result['queue_arn'] = queue.get_attributes('QueueArn')['QueueArn']
result['default_visibility_timeout'] = queue.get_attributes('VisibilityTimeout')['VisibilityTimeout']
result['message_retention_period'] = queue.get_attributes('MessageRetentionPeriod')['MessageRetentionPeriod']
result['maximum_message_size'] = queue.get_attributes('MaximumMessageSize')['MaximumMessageSize']
result['delivery_delay'] = queue.get_attributes('DelaySeconds')['DelaySeconds']
result['receive_message_wait_time'] = queue.get_attributes('ReceiveMessageWaitTimeSeconds')['ReceiveMessageWaitTimeSeconds']
except BotoServerError:
result['msg'] = 'Failed to create/update sqs queue due to error: ' + traceback.format_exc()
module.fail_json(**result)
else:
module.exit_json(**result)
def update_sqs_queue(queue,
check_mode=False,
default_visibility_timeout=None,
message_retention_period=None,
maximum_message_size=None,
delivery_delay=None,
receive_message_wait_time=None,
policy=None,
redrive_policy=None):
changed = False
changed = set_queue_attribute(queue, 'VisibilityTimeout', default_visibility_timeout,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'MessageRetentionPeriod', message_retention_period,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'MaximumMessageSize', maximum_message_size,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'DelaySeconds', delivery_delay,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'ReceiveMessageWaitTimeSeconds', receive_message_wait_time,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'Policy', policy,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'RedrivePolicy', redrive_policy,
check_mode=check_mode) or changed
return changed
def set_queue_attribute(queue, attribute, value, check_mode=False):
if not value and value != 0:
return False
try:
existing_value = queue.get_attributes(attributes=attribute)[attribute]
except:
existing_value = ''
# convert dict attributes to JSON strings (sort keys for comparing)
if attribute in ['Policy', 'RedrivePolicy']:
value = json.dumps(value, sort_keys=True)
if existing_value:
existing_value = json.dumps(json.loads(existing_value), sort_keys=True)
if str(value) != existing_value:
if not check_mode:
queue.set_attribute(attribute, value)
return True
return False
def delete_sqs_queue(connection, module):
queue_name = module.params.get('name')
result = dict(
region=module.params.get('region'),
name=queue_name,
)
try:
queue = connection.get_queue(queue_name)
if queue:
if not module.check_mode:
connection.delete_queue(queue)
result['changed'] = True
else:
result['changed'] = False
except BotoServerError:
result['msg'] = 'Failed to delete sqs queue due to error: ' + traceback.format_exc()
module.fail_json(**result)
else:
module.exit_json(**result)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['present', 'absent']),
name=dict(required=True, type='str'),
default_visibility_timeout=dict(type='int'),
message_retention_period=dict(type='int'),
maximum_message_size=dict(type='int'),
delivery_delay=dict(type='int'),
receive_message_wait_time=dict(type='int'),
policy=dict(type='dict', required=False),
redrive_policy=dict(type='dict', required=False),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg='region must be specified')
try:
connection = connect_to_aws(boto.sqs, region, **aws_connect_params)
except (NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
state = module.params.get('state')
if state == 'present':
create_or_update_sqs_queue(connection, module)
elif state == 'absent':
delete_sqs_queue(connection, module)
if __name__ == '__main__':
main()
|
PiscesDream/Ideas
|
refs/heads/master
|
ML/guess_next/ann.py
|
1
|
'''
update:
2014/09/03:
softmax in the last layer
'''
import theano
import theano.tensor as T
import gzip
import cPickle
import numpy
import time
class HiddenLayer(object):
def __init__(self, rng, input, n_in, n_out, W=None, b=None,
activation=T.tanh):
self.input = input
if W is None:
W_values = numpy.asarray(rng.uniform(
low=-numpy.sqrt(6. / (n_in + n_out)),
high=numpy.sqrt(6. / (n_in + n_out)),
size=(n_in, n_out)), dtype=theano.config.floatX)
if activation == theano.tensor.nnet.sigmoid:
W_values *= 4
W = theano.shared(value=W_values, name='W', borrow=True)
if b is None:
b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
b = theano.shared(value=b_values, name='b', borrow=True)
self.W = W
self.b = b
lin_output = T.dot(input, self.W) + self.b
self.output = (lin_output if activation is None
else activation(lin_output))
# parameters of the model
self.params = [self.W, self.b]
class ANN(object):
def __init__(self, n_in, n_out, lmbd = 0.01, hiddens = [10]):
x = T.matrix('x')
y = T.ivector('y')
lr = T.scalar('lr')
rng = numpy.random.RandomState(numpy.random.randint(2 ** 30))
params = []
hid_layers = []
L2 = .0
n_hid = hiddens + [n_out]
for ind, ele in enumerate(n_hid):
if ind == 0:
input = x
n_in = n_in
else:
input = hid_layers[-1].output
n_in = n_hid[ind-1]
if ind == len(n_hid) - 1:
activation = T.nnet.softmax
else:
activation = T.nnet.sigmoid
layer = HiddenLayer(rng, input = input, n_in = n_in, n_out = ele, activation = activation)
hid_layers.append( layer)
L2 += T.sum(layer.W ** 2)
params.extend([layer.W, layer.b])
nl = -T.mean(T.log(hid_layers[-1].output)[T.arange(y.shape[0]), y])
cost = nl + L2 * lmbd
grads = T.grad(cost, params)
updates = []
for param_i, grad_i in zip(params, grads):
updates.append((param_i, param_i - lr * grad_i))
y_pred = T.argmax(hid_layers[-1].output, 1)
errors = T.mean(T.neq(y_pred, y))
self.n_in = n_in
self.n_out = n_out
self.hiddens = hiddens
self.hid_layers = hid_layers
self.x = x
self.y = y
self.lr = lr
self.cost = cost
self.errors = errors
self.updates = updates
self.pred = y_pred
self.time = []
def fit(self, datasets, batch_size = 500, n_epochs = 200, lr = 0.01):
''' without validation'''
index = T.lscalar()
train_set_x, train_set_y = datasets[0]
test_set_x, test_set_y = datasets[1]
n_train_batches = train_set_x.get_value(borrow=True).shape[0]
n_test_batches = test_set_x.get_value(borrow=True).shape[0]
n_train_batches /= batch_size
n_test_batches /= batch_size
train_model = theano.function([index], self.cost,
updates = self.updates,
givens = {
self.x: train_set_x[index * batch_size: (index + 1) * batch_size],
self.y: train_set_y[index * batch_size: (index + 1) * batch_size],
self.lr: lr})
test_model = theano.function([], self.errors,
givens = {
self.x: test_set_x,
self.y: test_set_y})
train_error = theano.function([], self.errors,
givens = {
self.x: train_set_x,
self.y: train_set_y})
debug_f = theano.function([index], self.errors,
givens = {
self.x: test_set_x[index * batch_size : (index+1) * batch_size],
self.y: test_set_y[index * batch_size : (index+1) * batch_size]})
# print numpy.mean([debug_f(i) for i in xrange(n_test_batches)])
print(test_model())
print '...training'
maxiter = n_epochs
iteration = 0
while iteration < maxiter:
start_time = time.time()
iteration += 1
print 'iteration %d' % iteration
for minibatch_index in xrange(n_train_batches):
print '\tL of (%03d/%03d) = %f\r' % (minibatch_index, n_train_batches, train_model(minibatch_index)),
print ''
print '\ttrain error = %f' % train_error()
print '\ttest error = %f' % test_model()
self.time.append(time.time()-start_time)
def __repr__(self):
return '<CNN: %r; HID: %r>' % (self.nkerns, self.nhiddens)
def pred(self, x):
return theano.function([], T.argmax(self.hid_layers[-1].output, 1),
givens = {self.x: x})()
def prob(self, x):
return theano.function([], self.hid_layers[-1].output,
givens = {self.x: x})()
def __repr__(self):
return '<ANN:%r-%r-%r>' % (self.n_in, self.hiddens, self.n_out)
def load_data(dataset, num = None):
print '... loading data'
f = gzip.open(dataset, 'rb')
train_set, valid_set, test_set = cPickle.load(f)
train_set = (numpy.concatenate([train_set[0], valid_set[0]], 0), numpy.concatenate([train_set[1], valid_set[1]], 0))
f.close()
def shared_dataset(data_xy, borrow=True, num = None):
data_x, data_y = data_xy
if num:
data_x = data_x[:num]
data_y = data_y[:num]
# data_y = boarden(10, data_y)
size = int(data_x.shape[1]**.5)
# data_x = data_x.reshape(data_x.shape[0], -1)
print data_x.shape, data_y.shape
shared_x = theano.shared(numpy.asarray(data_x,
dtype=theano.config.floatX),
borrow=borrow)
shared_y = theano.shared(numpy.asarray(data_y,
dtype=theano.config.floatX),
borrow=borrow)
return shared_x, T.cast(shared_y, 'int32')
test_set_x, test_set_y = shared_dataset(test_set, num = num)
# valid_set_x, valid_set_y = shared_dataset(valid_set, num = num)
train_set_x, train_set_y = shared_dataset(train_set, num = num)
rval = [(train_set_x, train_set_y), #(valid_set_x, valid_set_y),
(test_set_x, test_set_y)]
return rval
if __name__ == '__main__':
theano.config.exception_verbosity='high'
theano.config.on_unused_input='ignore'
datasets = load_data('../../Data/mnist/mnist.pkl.gz')
cl = ANN(28 * 28, 10, hiddens = [1])
cl.fit(datasets, lr = 0.1)
|
michaelpacer/linkchecker
|
refs/heads/master
|
third_party/dnspython/dns/rdtypes/__init__.py
|
109
|
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS rdata type classes"""
__all__ = [
'ANY',
'IN',
'mxbase',
'nsbase',
]
|
ashray/VTK-EVM
|
refs/heads/yiq
|
ThirdParty/Twisted/twisted/conch/test/test_knownhosts.py
|
30
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.client.knownhosts}.
"""
import os
from binascii import Error as BinasciiError, b2a_base64, a2b_base64
try:
import Crypto
import pyasn1
except ImportError:
skip = "PyCrypto and PyASN1 required for twisted.conch.knownhosts."
else:
from twisted.conch.ssh.keys import Key, BadKeyError
from twisted.conch.client.knownhosts import \
PlainEntry, HashedEntry, KnownHostsFile, UnparsedEntry, ConsoleUI
from twisted.conch.client import default
from zope.interface.verify import verifyObject
from twisted.python.filepath import FilePath
from twisted.trial.unittest import TestCase
from twisted.internet.defer import Deferred
from twisted.conch.interfaces import IKnownHostEntry
from twisted.conch.error import HostKeyChanged, UserRejectedKey, InvalidEntry
from twisted.test.testutils import ComparisonTestsMixin
sampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAsV0VMRbGmzhqxxayLRHmvnFvtyNqgbNKV46dU1bVFB+3y'
'tNvue4Riqv/SVkPRNwMb7eWH29SviXaBxUhYyzKkDoNUq3rTNnH1Vnif6d6X4JCrUb5d3W+Dm'
'YClyJrZ5HgD/hUpdSkTRqdbQ2TrvSAxRacj+vHHT4F4dm1bJSewm3B2D8HVOoi/CbVh3dsIiC'
'dp8VltdZx4qYVfYe2LwVINCbAa3d3tj9ma7RVfw3OH2Mfb+toLd1N5tBQFb7oqTt2nC6I/6Bd'
'4JwPUld+IEitw/suElq/AIJVQXXujeyiZlea90HE65U2mF1ytr17HTAIT2ySokJWyuBANGACk'
'6iIaw==')
otherSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAIEAwaeCZd3UCuPXhX39+/p9qO028jTF76DMVd9mPvYVDVXuf'
'WckKZauF7+0b7qm+ChT7kan6BzRVo4++gCVNfAlMzLysSt3ylmOR48tFpAfygg9UCX3DjHz0E'
'lOOUKh3iifc9aUShD0OPaK3pR5JJ8jfiBfzSYWt/hDi/iZ4igsSs8=')
thirdSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAl/TQakPkePlnwCBRPitIVUTg6Z8VzN1en+DGkyo/evkmLw'
'7o4NWR5qbysk9A9jXW332nxnEuAnbcCam9SHe1su1liVfyIK0+3bdn0YRB0sXIbNEtMs2LtCho'
'/aV3cXPS+Cf1yut3wvIpaRnAzXxuKPCTXQ7/y0IXa8TwkRBH58OJa3RqfQ/NsSp5SAfdsrHyH2'
'aitiVKm2jfbTKzSEqOQG/zq4J9GXTkq61gZugory/Tvl5/yPgSnOR6C9jVOMHf27ZPoRtyj9SY'
'343Hd2QHiIE0KPZJEgCynKeWoKz8v6eTSK8n4rBnaqWdp8MnGZK1WGy05MguXbyCDuTC8AmJXQ'
'==')
sampleKey = a2b_base64(sampleEncodedKey)
otherSampleKey = a2b_base64(otherSampleEncodedKey)
thirdSampleKey = a2b_base64(thirdSampleEncodedKey)
samplePlaintextLine = (
"www.twistedmatrix.com ssh-rsa " + sampleEncodedKey + "\n")
otherSamplePlaintextLine = (
"divmod.com ssh-rsa " + otherSampleEncodedKey + "\n")
sampleHostIPLine = (
"www.twistedmatrix.com,198.49.126.131 ssh-rsa " + sampleEncodedKey + "\n")
sampleHashedLine = (
"|1|gJbSEPBG9ZSBoZpHNtZBD1bHKBA=|bQv+0Xa0dByrwkA1EB0E7Xop/Fo= ssh-rsa " +
sampleEncodedKey + "\n")
class EntryTestsMixin:
"""
Tests for implementations of L{IKnownHostEntry}. Subclasses must set the
'entry' attribute to a provider of that interface, the implementation of
that interface under test.
@ivar entry: a provider of L{IKnownHostEntry} with a hostname of
www.twistedmatrix.com and an RSA key of sampleKey.
"""
def test_providesInterface(self):
"""
The given entry should provide IKnownHostEntry.
"""
verifyObject(IKnownHostEntry, self.entry)
def test_fromString(self):
"""
Constructing a plain text entry from an unhashed known_hosts entry will
result in an L{IKnownHostEntry} provider with 'keyString', 'hostname',
and 'keyType' attributes. While outside the interface in question,
these attributes are held in common by L{PlainEntry} and L{HashedEntry}
implementations; other implementations should override this method in
subclasses.
"""
entry = self.entry
self.assertEqual(entry.publicKey, Key.fromString(sampleKey))
self.assertEqual(entry.keyType, "ssh-rsa")
def test_matchesKey(self):
"""
L{IKnownHostEntry.matchesKey} checks to see if an entry matches a given
SSH key.
"""
twistedmatrixDotCom = Key.fromString(sampleKey)
divmodDotCom = Key.fromString(otherSampleKey)
self.assertEqual(
True,
self.entry.matchesKey(twistedmatrixDotCom))
self.assertEqual(
False,
self.entry.matchesKey(divmodDotCom))
def test_matchesHost(self):
"""
L{IKnownHostEntry.matchesHost} checks to see if an entry matches a
given hostname.
"""
self.assertEqual(True, self.entry.matchesHost(
"www.twistedmatrix.com"))
self.assertEqual(False, self.entry.matchesHost(
"www.divmod.com"))
class PlainEntryTests(EntryTestsMixin, TestCase):
"""
Test cases for L{PlainEntry}.
"""
plaintextLine = samplePlaintextLine
hostIPLine = sampleHostIPLine
def setUp(self):
"""
Set 'entry' to a sample plain-text entry with sampleKey as its key.
"""
self.entry = PlainEntry.fromString(self.plaintextLine)
def test_matchesHostIP(self):
"""
A "hostname,ip" formatted line will match both the host and the IP.
"""
self.entry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(True, self.entry.matchesHost("198.49.126.131"))
self.test_matchesHost()
def test_toString(self):
"""
L{PlainEntry.toString} generates the serialized OpenSSL format string
for the entry, sans newline.
"""
self.assertEqual(self.entry.toString(), self.plaintextLine.rstrip("\n"))
multiHostEntry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(multiHostEntry.toString(),
self.hostIPLine.rstrip("\n"))
class PlainTextWithCommentTests(PlainEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
plaintextLine = samplePlaintextLine[:-1] + " plain text comment.\n"
hostIPLine = sampleHostIPLine[:-1] + " text following host/IP line\n"
class HashedEntryTests(EntryTestsMixin, ComparisonTestsMixin, TestCase):
"""
Tests for L{HashedEntry}.
This suite doesn't include any tests for host/IP pairs because hashed
entries store IP addresses the same way as hostnames and does not support
comma-separated lists. (If you hash the IP and host together you can't
tell if you've got the key already for one or the other.)
"""
hashedLine = sampleHashedLine
def setUp(self):
"""
Set 'entry' to a sample hashed entry for twistedmatrix.com with
sampleKey as its key.
"""
self.entry = HashedEntry.fromString(self.hashedLine)
def test_toString(self):
"""
L{HashedEntry.toString} generates the serialized OpenSSL format string
for the entry, sans the newline.
"""
self.assertEqual(self.entry.toString(), self.hashedLine.rstrip("\n"))
def test_equality(self):
"""
Two L{HashedEntry} instances compare equal if and only if they represent
the same host and key in exactly the same way: the host salt, host hash,
public key type, public key, and comment fields must all be equal.
"""
hostSalt = "gJbSEPBG9ZSBoZpHNtZBD1bHKBA"
hostHash = "bQv+0Xa0dByrwkA1EB0E7Xop/Fo"
publicKey = Key.fromString(sampleKey)
comment = "hello, world"
entry = HashedEntry(
hostSalt, hostHash, publicKey.type(), publicKey, comment)
duplicate = HashedEntry(
hostSalt, hostHash, publicKey.type(), publicKey, comment)
# Vary the host salt
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt[::-1], hostHash, publicKey.type(), publicKey,
comment))
# Vary the host hash
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash[::-1], publicKey.type(), publicKey,
comment))
# Vary the key type
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash, publicKey.type()[::-1], publicKey,
comment))
# Vary the key
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash, publicKey.type(),
Key.fromString(otherSampleKey), comment))
# Vary the comment
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash, publicKey.type(), publicKey,
comment[::-1]))
class HashedEntryWithCommentTests(HashedEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
hashedLine = sampleHashedLine[:-1] + " plain text comment.\n"
class UnparsedEntryTests(TestCase, EntryTestsMixin):
"""
Tests for L{UnparsedEntry}
"""
def setUp(self):
"""
Set up the 'entry' to be an unparsed entry for some random text.
"""
self.entry = UnparsedEntry(" This is a bogus entry. \n")
def test_fromString(self):
"""
Creating an L{UnparsedEntry} should simply record the string it was
passed.
"""
self.assertEqual(" This is a bogus entry. \n",
self.entry._string)
def test_matchesHost(self):
"""
An unparsed entry can't match any hosts.
"""
self.assertEqual(False, self.entry.matchesHost("www.twistedmatrix.com"))
def test_matchesKey(self):
"""
An unparsed entry can't match any keys.
"""
self.assertEqual(False, self.entry.matchesKey(Key.fromString(sampleKey)))
def test_toString(self):
"""
L{UnparsedEntry.toString} returns its input string, sans trailing
newline.
"""
self.assertEqual(" This is a bogus entry. ", self.entry.toString())
class ParseErrorTests(TestCase):
"""
L{HashedEntry.fromString} and L{PlainEntry.fromString} can raise a variety
of errors depending on misformattings of certain strings. These tests make
sure those errors are caught. Since many of the ways that this can go
wrong are in the lower-level APIs being invoked by the parsing logic,
several of these are integration tests with the C{base64} and
L{twisted.conch.ssh.keys} modules.
"""
def invalidEntryTest(self, cls):
"""
If there are fewer than three elements, C{fromString} should raise
L{InvalidEntry}.
"""
self.assertRaises(InvalidEntry, cls.fromString, "invalid")
def notBase64Test(self, cls):
"""
If the key is not base64, C{fromString} should raise L{BinasciiError}.
"""
self.assertRaises(BinasciiError, cls.fromString, "x x x")
def badKeyTest(self, cls, prefix):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{fromString} should raise L{BadKeyError}.
"""
self.assertRaises(BadKeyError, cls.fromString, ' '.join(
[prefix, "ssh-rsa", b2a_base64(
"Hey, this isn't an SSH key!").strip()]))
def test_invalidPlainEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, L{PlainEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(PlainEntry)
def test_invalidHashedEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, or the hostname salt/hash portion has more than two elements,
L{HashedEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(HashedEntry)
a, b, c = sampleHashedLine.split()
self.assertRaises(InvalidEntry, HashedEntry.fromString, ' '.join(
[a + "||", b, c]))
def test_plainNotBase64(self):
"""
If the key portion of a plain entry is not decodable as base64,
C{fromString} should raise L{BinasciiError}.
"""
self.notBase64Test(PlainEntry)
def test_hashedNotBase64(self):
"""
If the key, host salt, or host hash portion of a hashed entry is not
encoded, it will raise L{BinasciiError}.
"""
self.notBase64Test(HashedEntry)
a, b, c = sampleHashedLine.split()
# Salt not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|" + b2a_base64("stuff").strip(), b, c]))
# Host hash not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join([HashedEntry.MAGIC + b2a_base64("stuff").strip() + "|x",
b, c]))
# Neither salt nor hash valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|x", b, c]))
def test_hashedBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{HashedEntry.fromString} should raise L{BadKeyError}.
"""
a, b, c = sampleHashedLine.split()
self.badKeyTest(HashedEntry, a)
def test_plainBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{PlainEntry.fromString} should raise L{BadKeyError}.
"""
self.badKeyTest(PlainEntry, "hostname")
class KnownHostsDatabaseTests(TestCase):
"""
Tests for L{KnownHostsFile}.
"""
def pathWithContent(self, content):
"""
Return a FilePath with the given initial content.
"""
fp = FilePath(self.mktemp())
fp.setContent(content)
return fp
def loadSampleHostsFile(self, content=(
sampleHashedLine + otherSamplePlaintextLine +
"\n# That was a blank line.\n"
"This is just unparseable.\n"
"|1|This also unparseable.\n")):
"""
Return a sample hosts file, with keys for www.twistedmatrix.com and
divmod.com present.
"""
return KnownHostsFile.fromPath(self.pathWithContent(content))
def test_readOnlySavePath(self):
"""
L{KnownHostsFile.savePath} is read-only; if an assignment is made to
it, L{AttributeError} is raised and the value is unchanged.
"""
path = FilePath(self.mktemp())
new = FilePath(self.mktemp())
hostsFile = KnownHostsFile(path)
self.assertRaises(AttributeError, setattr, hostsFile, "savePath", new)
self.assertEqual(path, hostsFile.savePath)
def test_defaultInitializerIgnoresExisting(self):
"""
The default initializer for L{KnownHostsFile} disregards any existing
contents in the save path.
"""
hostsFile = KnownHostsFile(self.pathWithContent(sampleHashedLine))
self.assertEqual([], list(hostsFile.iterentries()))
def test_defaultInitializerClobbersExisting(self):
"""
After using the default initializer for L{KnownHostsFile}, the first use
of L{KnownHostsFile.save} overwrites any existing contents in the save
path.
"""
path = self.pathWithContent(sampleHashedLine)
hostsFile = KnownHostsFile(path)
entry = hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
hostsFile.save()
# Check KnownHostsFile to see what it thinks the state is
self.assertEqual([entry], list(hostsFile.iterentries()))
# And also directly check the underlying file itself
self.assertEqual(entry.toString() + "\n", path.getContent())
def test_saveResetsClobberState(self):
"""
After L{KnownHostsFile.save} is used once with an instance initialized
by the default initializer, contents of the save path are respected and
preserved.
"""
hostsFile = KnownHostsFile(self.pathWithContent(sampleHashedLine))
preSave = hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
hostsFile.save()
postSave = hostsFile.addHostKey(
"another.example.com", Key.fromString(thirdSampleKey))
hostsFile.save()
self.assertEqual([preSave, postSave], list(hostsFile.iterentries()))
def test_loadFromPath(self):
"""
Loading a L{KnownHostsFile} from a path with six entries in it will
result in a L{KnownHostsFile} object with six L{IKnownHostEntry}
providers in it.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(6, len(list(hostsFile.iterentries())))
def test_iterentriesUnsaved(self):
"""
If the save path for a L{KnownHostsFile} does not exist,
L{KnownHostsFile.iterentries} still returns added but unsaved entries.
"""
hostsFile = KnownHostsFile(FilePath(self.mktemp()))
hostsFile.addHostKey("www.example.com", Key.fromString(sampleKey))
self.assertEqual(1, len(list(hostsFile.iterentries())))
def test_verifyHashedEntry(self):
"""
Loading a L{KnownHostsFile} from a path containing a single valid
L{HashedEntry} entry will result in a L{KnownHostsFile} object
with one L{IKnownHostEntry} provider.
"""
hostsFile = self.loadSampleHostsFile((sampleHashedLine))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], HashedEntry)
self.assertEqual(True, entries[0].matchesHost("www.twistedmatrix.com"))
self.assertEqual(1, len(entries))
def test_verifyPlainEntry(self):
"""
Loading a L{KnownHostsFile} from a path containing a single valid
L{PlainEntry} entry will result in a L{KnownHostsFile} object
with one L{IKnownHostEntry} provider.
"""
hostsFile = self.loadSampleHostsFile((otherSamplePlaintextLine))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], PlainEntry)
self.assertEqual(True, entries[0].matchesHost("divmod.com"))
self.assertEqual(1, len(entries))
def test_verifyUnparsedEntry(self):
"""
Loading a L{KnownHostsFile} from a path that only contains '\n' will
result in a L{KnownHostsFile} object containing a L{UnparsedEntry}
object.
"""
hostsFile = self.loadSampleHostsFile(("\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "")
self.assertEqual(1, len(entries))
def test_verifyUnparsedComment(self):
"""
Loading a L{KnownHostsFile} from a path that contains a comment will
result in a L{KnownHostsFile} object containing a L{UnparsedEntry}
object.
"""
hostsFile = self.loadSampleHostsFile(("# That was a blank line.\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "# That was a blank line.")
def test_verifyUnparsableLine(self):
"""
Loading a L{KnownHostsFile} from a path that contains an unparseable
line will be represented as an L{UnparsedEntry} instance.
"""
hostsFile = self.loadSampleHostsFile(("This is just unparseable.\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "This is just unparseable.")
self.assertEqual(1, len(entries))
def test_verifyUnparsableEncryptionMarker(self):
"""
Loading a L{KnownHostsFile} from a path containing an unparseable line
that starts with an encryption marker will be represented as an
L{UnparsedEntry} instance.
"""
hostsFile = self.loadSampleHostsFile(("|1|This is unparseable.\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "|1|This is unparseable.")
self.assertEqual(1, len(entries))
def test_loadNonExistent(self):
"""
Loading a L{KnownHostsFile} from a path that does not exist should
result in an empty L{KnownHostsFile} that will save back to that path.
"""
pn = self.mktemp()
knownHostsFile = KnownHostsFile.fromPath(FilePath(pn))
entries = list(knownHostsFile.iterentries())
self.assertEqual([], entries)
self.assertEqual(False, FilePath(pn).exists())
knownHostsFile.save()
self.assertEqual(True, FilePath(pn).exists())
def test_loadNonExistentParent(self):
"""
Loading a L{KnownHostsFile} from a path whose parent directory does not
exist should result in an empty L{KnownHostsFile} that will save back
to that path, creating its parent directory(ies) in the process.
"""
thePath = FilePath(self.mktemp())
knownHostsPath = thePath.child("foo").child("known_hosts")
knownHostsFile = KnownHostsFile.fromPath(knownHostsPath)
knownHostsFile.save()
knownHostsPath.restat(False)
self.assertEqual(True, knownHostsPath.exists())
def test_savingAddsEntry(self):
"""
L{KnownHostsFile.save} will write out a new file with any entries
that have been added.
"""
path = self.pathWithContent(sampleHashedLine +
otherSamplePlaintextLine)
knownHostsFile = KnownHostsFile.fromPath(path)
newEntry = knownHostsFile.addHostKey("some.example.com",
Key.fromString(thirdSampleKey))
expectedContent = (
sampleHashedLine +
otherSamplePlaintextLine + HashedEntry.MAGIC +
b2a_base64(newEntry._hostSalt).strip() + "|" +
b2a_base64(newEntry._hostHash).strip() + " ssh-rsa " +
thirdSampleEncodedKey + "\n")
# Sanity check, let's make sure the base64 API being used for the test
# isn't inserting spurious newlines.
self.assertEqual(3, expectedContent.count("\n"))
knownHostsFile.save()
self.assertEqual(expectedContent, path.getContent())
def test_savingAvoidsDuplication(self):
"""
L{KnownHostsFile.save} only writes new entries to the save path, not
entries which were added and already written by a previous call to
C{save}.
"""
path = FilePath(self.mktemp())
knownHosts = KnownHostsFile(path)
entry = knownHosts.addHostKey(
"some.example.com", Key.fromString(sampleKey))
knownHosts.save()
knownHosts.save()
knownHosts = KnownHostsFile.fromPath(path)
self.assertEqual([entry], list(knownHosts.iterentries()))
def test_savingsPreservesExisting(self):
"""
L{KnownHostsFile.save} will not overwrite existing entries in its save
path, even if they were only added after the L{KnownHostsFile} instance
was initialized.
"""
# Start off with one host/key pair in the file
path = self.pathWithContent(sampleHashedLine)
knownHosts = KnownHostsFile.fromPath(path)
# After initializing the KnownHostsFile instance, add a second host/key
# pair to the file directly - without the instance's help or knowledge.
with path.open("a") as hostsFileObj:
hostsFileObj.write(otherSamplePlaintextLine)
# Add a third host/key pair using the KnownHostsFile instance
key = Key.fromString(thirdSampleKey)
knownHosts.addHostKey("brandnew.example.com", key)
knownHosts.save()
# Check that all three host/key pairs are present.
knownHosts = KnownHostsFile.fromPath(path)
self.assertEqual([True, True, True], [
knownHosts.hasHostKey(
"www.twistedmatrix.com", Key.fromString(sampleKey)),
knownHosts.hasHostKey(
"divmod.com", Key.fromString(otherSampleKey)),
knownHosts.hasHostKey("brandnew.example.com", key)])
def test_hasPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{True} when a key for the given
hostname is present and matches the expected key.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(True, hostsFile.hasHostKey(
"www.twistedmatrix.com", Key.fromString(sampleKey)))
def test_hasNonPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{False} when a key for the given
hostname is not present.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(False, hostsFile.hasHostKey(
"non-existent.example.com", Key.fromString(sampleKey)))
def test_hasLaterAddedKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{True} when a key for the given
hostname is present in the file, even if it is only added to the file
after the L{KnownHostsFile} instance is initialized.
"""
key = Key.fromString(sampleKey)
entry = PlainEntry(["brandnew.example.com"], key.sshType(), key, "")
hostsFile = self.loadSampleHostsFile()
with hostsFile.savePath.open("a") as hostsFileObj:
hostsFileObj.write(entry.toString() + "\n")
self.assertEqual(
True, hostsFile.hasHostKey("brandnew.example.com", key))
def test_savedEntryHasKeyMismatch(self):
"""
L{KnownHostsFile.hasHostKey} raises L{HostKeyChanged} if the host key is
present in the underlying file, but different from the expected one.
The resulting exception should have an C{offendingEntry} indicating the
given entry.
"""
hostsFile = self.loadSampleHostsFile()
entries = list(hostsFile.iterentries())
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.twistedmatrix.com", Key.fromString(otherSampleKey))
self.assertEqual(exception.offendingEntry, entries[0])
self.assertEqual(exception.lineno, 1)
self.assertEqual(exception.path, hostsFile.savePath)
def test_savedEntryAfterAddHasKeyMismatch(self):
"""
Even after a new entry has been added in memory but not yet saved, the
L{HostKeyChanged} exception raised by L{KnownHostsFile.hasHostKey} has a
C{lineno} attribute which indicates the 1-based line number of the
offending entry in the underlying file when the given host key does not
match the expected host key.
"""
hostsFile = self.loadSampleHostsFile()
hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.twistedmatrix.com", Key.fromString(otherSampleKey))
self.assertEqual(exception.lineno, 1)
self.assertEqual(exception.path, hostsFile.savePath)
def test_unsavedEntryHasKeyMismatch(self):
"""
L{KnownHostsFile.hasHostKey} raises L{HostKeyChanged} if the host key is
present in memory (but not yet saved), but different from the expected
one. The resulting exception has a C{offendingEntry} indicating the
given entry, but no filename or line number information (reflecting the
fact that the entry exists only in memory).
"""
hostsFile = KnownHostsFile(FilePath(self.mktemp()))
entry = hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.example.com", Key.fromString(thirdSampleKey))
self.assertEqual(exception.offendingEntry, entry)
self.assertEqual(exception.lineno, None)
self.assertEqual(exception.path, None)
def test_addHostKey(self):
"""
L{KnownHostsFile.addHostKey} adds a new L{HashedEntry} to the host
file, and returns it.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertEqual(False,
hostsFile.hasHostKey("somewhere.example.com", aKey))
newEntry = hostsFile.addHostKey("somewhere.example.com", aKey)
# The code in OpenSSH requires host salts to be 20 characters long.
# This is the required length of a SHA-1 HMAC hash, so it's just a
# sanity check.
self.assertEqual(20, len(newEntry._hostSalt))
self.assertEqual(True,
newEntry.matchesHost("somewhere.example.com"))
self.assertEqual(newEntry.keyType, "ssh-rsa")
self.assertEqual(aKey, newEntry.publicKey)
self.assertEqual(True,
hostsFile.hasHostKey("somewhere.example.com", aKey))
def test_randomSalts(self):
"""
L{KnownHostsFile.addHostKey} generates a random salt for each new key,
so subsequent salts will be different.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertNotEqual(
hostsFile.addHostKey("somewhere.example.com", aKey)._hostSalt,
hostsFile.addHostKey("somewhere-else.example.com", aKey)._hostSalt)
def test_verifyValidKey(self):
"""
Verifying a valid key should return a L{Deferred} which fires with
True.
"""
hostsFile = self.loadSampleHostsFile()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
ui = FakeUI()
d = hostsFile.verifyHostKey(ui, "www.twistedmatrix.com", "1.2.3.4",
Key.fromString(sampleKey))
l = []
d.addCallback(l.append)
self.assertEqual(l, [True])
def test_verifyInvalidKey(self):
"""
Verfying an invalid key should return a L{Deferred} which fires with a
L{HostKeyChanged} failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "1.2.3.4", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def verifyNonPresentKey(self):
"""
Set up a test to verify a key that isn't present. Return a 3-tuple of
the UI, a list set up to collect the result of the verifyHostKey call,
and the sample L{KnownHostsFile} being used.
This utility method avoids returning a L{Deferred}, and records results
in the returned list instead, because the events which get generated
here are pre-recorded in the 'ui' object. If the L{Deferred} in
question does not fire, the it will fail quickly with an empty list.
"""
hostsFile = self.loadSampleHostsFile()
absentKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
l = []
d = hostsFile.verifyHostKey(
ui, "sample-host.example.com", "4.3.2.1", absentKey)
d.addBoth(l.append)
self.assertEqual([], l)
self.assertEqual(
ui.promptText,
"The authenticity of host 'sample-host.example.com (4.3.2.1)' "
"can't be established.\n"
"RSA key fingerprint is "
"89:4e:cc:8c:57:83:96:48:ef:63:ad:ee:99:00:4c:8f.\n"
"Are you sure you want to continue connecting (yes/no)? ")
return ui, l, hostsFile
def test_verifyNonPresentKey_Yes(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says yes, the Deferred should fire with True.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(True)
self.assertEqual([True], l)
reloaded = KnownHostsFile.fromPath(knownHostsFile.savePath)
self.assertEqual(
True,
reloaded.hasHostKey("4.3.2.1", Key.fromString(thirdSampleKey)))
self.assertEqual(
True,
reloaded.hasHostKey("sample-host.example.com",
Key.fromString(thirdSampleKey)))
def test_verifyNonPresentKey_No(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says no, the Deferred should fail with
UserRejectedKey.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(False)
l[0].trap(UserRejectedKey)
def test_verifyHostIPMismatch(self):
"""
Verifying a key where the host is present (and correct), but the IP is
present and different, should result the deferred firing in a
HostKeyChanged failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "4.3.2.1", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def test_verifyKeyForHostAndIP(self):
"""
Verifying a key where the hostname is present but the IP is not should
result in the key being added for the IP and the user being warned
about the change.
"""
ui = FakeUI()
hostsFile = self.loadSampleHostsFile()
expectedKey = Key.fromString(sampleKey)
hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "5.4.3.2", expectedKey)
self.assertEqual(
True, KnownHostsFile.fromPath(hostsFile.savePath).hasHostKey(
"5.4.3.2", expectedKey))
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'5.4.3.2' to the list of known hosts."],
ui.userWarnings)
class FakeFile(object):
"""
A fake file-like object that acts enough like a file for
L{ConsoleUI.prompt}.
"""
def __init__(self):
self.inlines = []
self.outchunks = []
self.closed = False
def readline(self):
"""
Return a line from the 'inlines' list.
"""
return self.inlines.pop(0)
def write(self, chunk):
"""
Append the given item to the 'outchunks' list.
"""
if self.closed:
raise IOError("the file was closed")
self.outchunks.append(chunk)
def close(self):
"""
Set the 'closed' flag to True, explicitly marking that it has been
closed.
"""
self.closed = True
class ConsoleUITests(TestCase):
"""
Test cases for L{ConsoleUI}.
"""
def setUp(self):
"""
Create a L{ConsoleUI} pointed at a L{FakeFile}.
"""
self.fakeFile = FakeFile()
self.ui = ConsoleUI(self.openFile)
def openFile(self):
"""
Return the current fake file.
"""
return self.fakeFile
def newFile(self, lines):
"""
Create a new fake file (the next file that self.ui will open) with the
given list of lines to be returned from readline().
"""
self.fakeFile = FakeFile()
self.fakeFile.inlines = lines
def test_promptYes(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'yes', then it returns a L{Deferred} that fires with
True.
"""
for okYes in ['yes', 'Yes', 'yes\n']:
self.newFile([okYes])
l = []
self.ui.prompt("Hello, world!").addCallback(l.append)
self.assertEqual(["Hello, world!"], self.fakeFile.outchunks)
self.assertEqual([True], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptNo(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'no', then it returns a L{Deferred} that fires with
False.
"""
for okNo in ['no', 'No', 'no\n']:
self.newFile([okNo])
l = []
self.ui.prompt("Goodbye, world!").addCallback(l.append)
self.assertEqual(["Goodbye, world!"], self.fakeFile.outchunks)
self.assertEqual([False], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptRepeatedly(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is neither 'yes' nor 'no', then it says "Please enter
'yes' or 'no'" until it gets a 'yes' or a 'no', at which point it
returns a Deferred that answers either True or False.
"""
self.newFile(['what', 'uh', 'okay', 'yes'])
l = []
self.ui.prompt("Please say something useful.").addCallback(l.append)
self.assertEqual([True], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something useful."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
self.newFile(['blah', 'stuff', 'feh', 'no'])
l = []
self.ui.prompt("Please say something negative.").addCallback(l.append)
self.assertEqual([False], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something negative."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
def test_promptOpenFailed(self):
"""
If the C{opener} passed to L{ConsoleUI} raises an exception, that
exception will fail the L{Deferred} returned from L{ConsoleUI.prompt}.
"""
def raiseIt():
raise IOError()
ui = ConsoleUI(raiseIt)
d = ui.prompt("This is a test.")
return self.assertFailure(d, IOError)
def test_warn(self):
"""
L{ConsoleUI.warn} should output a message to the console object.
"""
self.ui.warn("Test message.")
self.assertEqual(["Test message."], self.fakeFile.outchunks)
self.assertEqual(True, self.fakeFile.closed)
def test_warnOpenFailed(self):
"""
L{ConsoleUI.warn} should log a traceback if the output can't be opened.
"""
def raiseIt():
1 / 0
ui = ConsoleUI(raiseIt)
ui.warn("This message never makes it.")
self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1)
class FakeUI(object):
"""
A fake UI object, adhering to the interface expected by
L{KnownHostsFile.verifyHostKey}
@ivar userWarnings: inputs provided to 'warn'.
@ivar promptDeferred: last result returned from 'prompt'.
@ivar promptText: the last input provided to 'prompt'.
"""
def __init__(self):
self.userWarnings = []
self.promptDeferred = None
self.promptText = None
def prompt(self, text):
"""
Issue the user an interactive prompt, which they can accept or deny.
"""
self.promptText = text
self.promptDeferred = Deferred()
return self.promptDeferred
def warn(self, text):
"""
Issue a non-interactive warning to the user.
"""
self.userWarnings.append(text)
class FakeObject(object):
"""
A fake object that can have some attributes. Used to fake
L{SSHClientTransport} and L{SSHClientFactory}.
"""
class DefaultAPITests(TestCase):
"""
The API in L{twisted.conch.client.default.verifyHostKey} is the integration
point between the code in the rest of conch and L{KnownHostsFile}.
"""
def patchedOpen(self, fname, mode):
"""
The patched version of 'open'; this returns a L{FakeFile} that the
instantiated L{ConsoleUI} can use.
"""
self.assertEqual(fname, "/dev/tty")
self.assertEqual(mode, "r+b")
return self.fakeFile
def setUp(self):
"""
Patch 'open' in verifyHostKey.
"""
self.fakeFile = FakeFile()
self.patch(default, "_open", self.patchedOpen)
self.hostsOption = self.mktemp()
knownHostsFile = KnownHostsFile(FilePath(self.hostsOption))
knownHostsFile.addHostKey("exists.example.com",
Key.fromString(sampleKey))
knownHostsFile.addHostKey("4.3.2.1", Key.fromString(sampleKey))
knownHostsFile.save()
self.fakeTransport = FakeObject()
self.fakeTransport.factory = FakeObject()
self.options = self.fakeTransport.factory.options = {
'host': "exists.example.com",
'known-hosts': self.hostsOption
}
def test_verifyOKKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host, IP, and key which already match the
known_hosts file it is supposed to check.
"""
l = []
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def replaceHome(self, tempHome):
"""
Replace the HOME environment variable until the end of the current
test, with the given new home-directory, so that L{os.path.expanduser}
will yield controllable, predictable results.
@param tempHome: the pathname to replace the HOME variable with.
@type tempHome: L{str}
"""
oldHome = os.environ.get('HOME')
def cleanupHome():
if oldHome is None:
del os.environ['HOME']
else:
os.environ['HOME'] = oldHome
self.addCleanup(cleanupHome)
os.environ['HOME'] = tempHome
def test_noKnownHostsOption(self):
"""
L{default.verifyHostKey} should find your known_hosts file in
~/.ssh/known_hosts if you don't specify one explicitly on the command
line.
"""
l = []
tmpdir = self.mktemp()
oldHostsOption = self.hostsOption
hostsNonOption = FilePath(tmpdir).child(".ssh").child("known_hosts")
hostsNonOption.parent().makedirs()
FilePath(oldHostsOption).moveTo(hostsNonOption)
self.replaceHome(tmpdir)
self.options['known-hosts'] = None
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def test_verifyHostButNotIP(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host which matches with an IP is not present in its
known_hosts file, and should also warn the user that it has added the
IP address.
"""
l = []
default.verifyHostKey(self.fakeTransport, "8.7.6.5", sampleKey,
"Fingerprint not required.").addCallback(l.append)
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'8.7.6.5' to the list of known hosts."],
self.fakeFile.outchunks)
self.assertEqual([1], l)
knownHostsFile = KnownHostsFile.fromPath(FilePath(self.hostsOption))
self.assertEqual(True, knownHostsFile.hasHostKey("8.7.6.5",
Key.fromString(sampleKey)))
def test_verifyQuestion(self):
"""
L{default.verifyHostKey} should return a L{Default} which fires with
C{0} when passed a unknown host that the user refuses to acknowledge.
"""
self.fakeTransport.factory.options['host'] = 'fake.example.com'
self.fakeFile.inlines.append("no")
d = default.verifyHostKey(
self.fakeTransport, "9.8.7.6", otherSampleKey, "No fingerprint!")
self.assertEqual(
["The authenticity of host 'fake.example.com (9.8.7.6)' "
"can't be established.\n"
"RSA key fingerprint is "
"57:a1:c2:a1:07:a0:2b:f4:ce:b5:e5:b7:ae:cc:e1:99.\n"
"Are you sure you want to continue connecting (yes/no)? "],
self.fakeFile.outchunks)
return self.assertFailure(d, UserRejectedKey)
def test_verifyBadKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fails with
L{HostKeyChanged} if the host key is incorrect.
"""
d = default.verifyHostKey(
self.fakeTransport, "4.3.2.1", otherSampleKey,
"Again, not required.")
return self.assertFailure(d, HostKeyChanged)
|
tuzhaopeng/NMT-Coverage
|
refs/heads/master
|
build/lib/experiments/nmt/data_state.py
|
2
|
dict(
source=["/home/zptu/research/nmt.coverage/data/fbis/binarized_text.zh.h5"],
target=["/home/zptu/research/nmt.coverage/data/fbis/binarized_text.en.h5"],
indx_word="/home/zptu/research/nmt.coverage/data/fbis/ivocab.zh.pkl",
indx_word_target="/home/zptu/research/nmt.coverage/data/fbis/ivocab.en.pkl",
word_indx="/home/zptu/research/nmt.coverage/data/fbis/vocab.zh.pkl",
word_indx_trgt="/home/zptu/research/nmt.coverage/data/fbis/vocab.en.pkl",
null_sym_source=30000,
null_sym_target=30000,
n_sym_source=30001,
n_sym_target=30001,
loopIters=200000,
seqlen=50,
bs=80,
dim=1000,
saveFreq=30,
last_forward = False,
forward = True,
backward = True,
last_backward = False,
##########
# for coverage
maintain_coverage=True,
# for accumulated coverage, the dim can only be 1
coverage_dim=1,
use_coverage_cost = False
# Hard Alignment: at each step, set the align of highest probability to be 1.0, and other aligns to be 0.0; default is soft coverage that uses the real probabilities of aligns
use_hard_alignment=False,
use_coverage_for_alignment=True,
# not recommended, for alignment only yields better performance
use_coverage_for_decoding=False,
#-----------------------
use_accumulated_coverage=False,
# all the below options are for coverage model I -- simple coverage
# Upper bound of the value is 1.0 (for additive) or 0.0 (for subtractive), to eliminate the effect of input sentence length
use_accumulated_coverage_bound=False,
# we define 4 types of accumulated operation, for each position
# additive: sum up the alignment probabilities in the past (coverage starts with 0.0) (default)
# subtractive: minus the alignment probabilities in the past (coverage starts with 1.0)
# max-pooling: use the most representative value (the highest probability till now)(coverage starts with 0.0)
# mean-pooling: use the mean of the alignment probabilities in the past (coverage starts with 0.0)
coverage_accumulated_operation = "additive",
#-----------------------
# settings for recurrent_coverage
use_recurrent_coverage=False,
use_input_annotations_for_recurrent_coverage=False,
use_decoding_state_for_recurrent_coverage=False,
use_recurrent_gating_coverage=True,
##########
)
|
BeATz-UnKNoWN/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/lib2to3/fixes/fix_methodattrs.py
|
203
|
"""Fix bound method attributes (method.im_? -> method.__?__).
"""
# Author: Christian Heimes
# Local imports
from .. import fixer_base
from ..fixer_util import Name
MAP = {
"im_func" : "__func__",
"im_self" : "__self__",
"im_class" : "__self__.__class__"
}
class FixMethodattrs(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
"""
def transform(self, node, results):
attr = results["attr"][0]
new = MAP[attr.value]
attr.replace(Name(new, prefix=attr.prefix))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.